1 /* Control flow functions for trees.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "hash-table.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
31 #include "basic-block.h"
34 #include "gimple-pretty-print.h"
35 #include "pointer-set.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
40 #include "gimple-expr.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
68 #include "tree-ssa-live.h"
70 #include "tree-cfgcleanup.h"
72 #include "wide-int-print.h"
74 /* This file contains functions for building the Control Flow Graph (CFG)
75 for a function tree. */
77 /* Local declarations. */
79 /* Initial capacity for the basic block array. */
80 static const int initial_cfg_capacity
= 20;
82 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
83 which use a particular edge. The CASE_LABEL_EXPRs are chained together
84 via their CASE_CHAIN field, which we clear after we're done with the
85 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
87 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
88 update the case vector in response to edge redirections.
90 Right now this table is set up and torn down at key points in the
91 compilation process. It would be nice if we could make the table
92 more persistent. The key is getting notification of changes to
93 the CFG (particularly edge removal, creation and redirection). */
95 static struct pointer_map_t
*edge_to_cases
;
97 /* If we record edge_to_cases, this bitmap will hold indexes
98 of basic blocks that end in a GIMPLE_SWITCH which we touched
99 due to edge manipulations. */
101 static bitmap touched_switch_bbs
;
103 /* CFG statistics. */
106 long num_merged_labels
;
109 static struct cfg_stats_d cfg_stats
;
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
118 /* Hashtable helpers. */
120 struct locus_discrim_hasher
: typed_free_remove
<locus_discrim_map
>
122 typedef locus_discrim_map value_type
;
123 typedef locus_discrim_map compare_type
;
124 static inline hashval_t
hash (const value_type
*);
125 static inline bool equal (const value_type
*, const compare_type
*);
128 /* Trivial hash function for a location_t. ITEM is a pointer to
129 a hash table entry that maps a location_t to a discriminator. */
132 locus_discrim_hasher::hash (const value_type
*item
)
134 return LOCATION_LINE (item
->locus
);
137 /* Equality function for the locus-to-discriminator map. A and B
138 point to the two hash table entries to compare. */
141 locus_discrim_hasher::equal (const value_type
*a
, const compare_type
*b
)
143 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
146 static hash_table
<locus_discrim_hasher
> *discriminator_per_locus
;
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq
);
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block
);
155 static void make_gimple_switch_edges (basic_block
);
156 static bool make_goto_expr_edges (basic_block
);
157 static void make_gimple_asm_edges (basic_block
);
158 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
159 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple
, gimple
);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge
);
165 static gimple
first_non_label_stmt (basic_block
);
166 static bool verify_gimple_transaction (gimple
);
168 /* Flowgraph optimization and cleanup. */
169 static void gimple_merge_blocks (basic_block
, basic_block
);
170 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
171 static void remove_bb (basic_block
);
172 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
173 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
174 static edge
find_taken_edge_switch_expr (basic_block
, tree
);
175 static tree
find_case_label_for_value (gimple
, tree
);
178 init_empty_tree_cfg_for_function (struct function
*fn
)
180 /* Initialize the basic block array. */
182 profile_status_for_fn (fn
) = PROFILE_ABSENT
;
183 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
184 last_basic_block_for_fn (fn
) = NUM_FIXED_BLOCKS
;
185 vec_alloc (basic_block_info_for_fn (fn
), initial_cfg_capacity
);
186 vec_safe_grow_cleared (basic_block_info_for_fn (fn
),
187 initial_cfg_capacity
);
189 /* Build a mapping of labels to their associated blocks. */
190 vec_alloc (label_to_block_map_for_fn (fn
), initial_cfg_capacity
);
191 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
),
192 initial_cfg_capacity
);
194 SET_BASIC_BLOCK_FOR_FN (fn
, ENTRY_BLOCK
, ENTRY_BLOCK_PTR_FOR_FN (fn
));
195 SET_BASIC_BLOCK_FOR_FN (fn
, EXIT_BLOCK
, EXIT_BLOCK_PTR_FOR_FN (fn
));
197 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FN (fn
);
199 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
204 init_empty_tree_cfg (void)
206 init_empty_tree_cfg_for_function (cfun
);
209 /*---------------------------------------------------------------------------
211 ---------------------------------------------------------------------------*/
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
217 build_gimple_cfg (gimple_seq seq
)
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
222 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
224 init_empty_tree_cfg ();
228 /* Make sure there is always at least one block, even if it's empty. */
229 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
230 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
232 /* Adjust the size of the array. */
233 if (basic_block_info_for_fn (cfun
)->length ()
234 < (size_t) n_basic_blocks_for_fn (cfun
))
235 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
236 n_basic_blocks_for_fn (cfun
));
238 /* To speed up statement iterator walks, we first purge dead labels. */
239 cleanup_dead_labels ();
241 /* Group case nodes to reduce the number of edges.
242 We do this after cleaning up dead labels because otherwise we miss
243 a lot of obvious case merging opportunities. */
244 group_case_labels ();
246 /* Create the edges of the flowgraph. */
247 discriminator_per_locus
= new hash_table
<locus_discrim_hasher
> (13);
249 assign_discriminators ();
250 cleanup_dead_labels ();
251 delete discriminator_per_locus
;
252 discriminator_per_locus
= NULL
;
256 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
257 them and propagate the information to the loop. We assume that the
258 annotations come immediately before the condition of the loop. */
261 replace_loop_annotate ()
265 gimple_stmt_iterator gsi
;
268 FOR_EACH_LOOP (loop
, 0)
270 gsi
= gsi_last_bb (loop
->header
);
271 stmt
= gsi_stmt (gsi
);
272 if (!(stmt
&& gimple_code (stmt
) == GIMPLE_COND
))
274 for (gsi_prev_nondebug (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
276 stmt
= gsi_stmt (gsi
);
277 if (gimple_code (stmt
) != GIMPLE_CALL
)
279 if (!gimple_call_internal_p (stmt
)
280 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
282 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
284 case annot_expr_ivdep_kind
:
285 loop
->safelen
= INT_MAX
;
287 case annot_expr_no_vector_kind
:
288 loop
->dont_vectorize
= true;
290 case annot_expr_vector_kind
:
291 loop
->force_vectorize
= true;
292 cfun
->has_force_vectorize_loops
= true;
297 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
298 gimple_call_arg (stmt
, 0));
299 gsi_replace (&gsi
, stmt
, true);
303 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
304 FOR_EACH_BB_FN (bb
, cfun
)
306 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
308 stmt
= gsi_stmt (gsi
);
309 if (gimple_code (stmt
) != GIMPLE_CALL
)
311 if (!gimple_call_internal_p (stmt
)
312 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
314 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
316 case annot_expr_ivdep_kind
:
317 case annot_expr_no_vector_kind
:
318 case annot_expr_vector_kind
:
323 warning_at (gimple_location (stmt
), 0, "ignoring loop annotation");
324 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
325 gimple_call_arg (stmt
, 0));
326 gsi_replace (&gsi
, stmt
, true);
333 execute_build_cfg (void)
335 gimple_seq body
= gimple_body (current_function_decl
);
337 build_gimple_cfg (body
);
338 gimple_set_body (current_function_decl
, NULL
);
339 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
341 fprintf (dump_file
, "Scope blocks:\n");
342 dump_scope_blocks (dump_file
, dump_flags
);
345 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
346 replace_loop_annotate ();
352 const pass_data pass_data_build_cfg
=
354 GIMPLE_PASS
, /* type */
356 OPTGROUP_NONE
, /* optinfo_flags */
357 TV_TREE_CFG
, /* tv_id */
358 PROP_gimple_leh
, /* properties_required */
359 ( PROP_cfg
| PROP_loops
), /* properties_provided */
360 0, /* properties_destroyed */
361 0, /* todo_flags_start */
362 0, /* todo_flags_finish */
365 class pass_build_cfg
: public gimple_opt_pass
368 pass_build_cfg (gcc::context
*ctxt
)
369 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
372 /* opt_pass methods: */
373 virtual unsigned int execute (function
*) { return execute_build_cfg (); }
375 }; // class pass_build_cfg
380 make_pass_build_cfg (gcc::context
*ctxt
)
382 return new pass_build_cfg (ctxt
);
386 /* Return true if T is a computed goto. */
389 computed_goto_p (gimple t
)
391 return (gimple_code (t
) == GIMPLE_GOTO
392 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
395 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
396 the other edge points to a bb with just __builtin_unreachable ().
397 I.e. return true for C->M edge in:
405 __builtin_unreachable ();
409 assert_unreachable_fallthru_edge_p (edge e
)
411 basic_block pred_bb
= e
->src
;
412 gimple last
= last_stmt (pred_bb
);
413 if (last
&& gimple_code (last
) == GIMPLE_COND
)
415 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
416 if (other_bb
== e
->dest
)
417 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
418 if (EDGE_COUNT (other_bb
->succs
) == 0)
420 gimple_stmt_iterator gsi
= gsi_after_labels (other_bb
);
425 stmt
= gsi_stmt (gsi
);
426 while (is_gimple_debug (stmt
) || gimple_clobber_p (stmt
))
431 stmt
= gsi_stmt (gsi
);
433 return gimple_call_builtin_p (stmt
, BUILT_IN_UNREACHABLE
);
440 /* Build a flowgraph for the sequence of stmts SEQ. */
443 make_blocks (gimple_seq seq
)
445 gimple_stmt_iterator i
= gsi_start (seq
);
447 bool start_new_block
= true;
448 bool first_stmt_of_seq
= true;
449 basic_block bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
451 while (!gsi_end_p (i
))
458 /* If the statement starts a new basic block or if we have determined
459 in a previous pass that we need to create a new block for STMT, do
461 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
463 if (!first_stmt_of_seq
)
464 gsi_split_seq_before (&i
, &seq
);
465 bb
= create_basic_block (seq
, NULL
, bb
);
466 start_new_block
= false;
469 /* Now add STMT to BB and create the subgraphs for special statement
471 gimple_set_bb (stmt
, bb
);
473 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
475 if (stmt_ends_bb_p (stmt
))
477 /* If the stmt can make abnormal goto use a new temporary
478 for the assignment to the LHS. This makes sure the old value
479 of the LHS is available on the abnormal edge. Otherwise
480 we will end up with overlapping life-ranges for abnormal
482 if (gimple_has_lhs (stmt
)
483 && stmt_can_make_abnormal_goto (stmt
)
484 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
486 tree lhs
= gimple_get_lhs (stmt
);
487 tree tmp
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
488 gimple s
= gimple_build_assign (lhs
, tmp
);
489 gimple_set_location (s
, gimple_location (stmt
));
490 gimple_set_block (s
, gimple_block (stmt
));
491 gimple_set_lhs (stmt
, tmp
);
492 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
493 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
494 DECL_GIMPLE_REG_P (tmp
) = 1;
495 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
497 start_new_block
= true;
501 first_stmt_of_seq
= false;
506 /* Create and return a new empty basic block after bb AFTER. */
509 create_bb (void *h
, void *e
, basic_block after
)
515 /* Create and initialize a new basic block. Since alloc_block uses
516 GC allocation that clears memory to allocate a basic block, we do
517 not have to clear the newly allocated basic block here. */
520 bb
->index
= last_basic_block_for_fn (cfun
);
522 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
524 /* Add the new block to the linked list of blocks. */
525 link_block (bb
, after
);
527 /* Grow the basic block array if needed. */
528 if ((size_t) last_basic_block_for_fn (cfun
)
529 == basic_block_info_for_fn (cfun
)->length ())
532 (last_basic_block_for_fn (cfun
)
533 + (last_basic_block_for_fn (cfun
) + 3) / 4);
534 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
), new_size
);
537 /* Add the newly created block to the array. */
538 SET_BASIC_BLOCK_FOR_FN (cfun
, last_basic_block_for_fn (cfun
), bb
);
540 n_basic_blocks_for_fn (cfun
)++;
541 last_basic_block_for_fn (cfun
)++;
547 /*---------------------------------------------------------------------------
549 ---------------------------------------------------------------------------*/
551 /* Fold COND_EXPR_COND of each COND_EXPR. */
554 fold_cond_expr_cond (void)
558 FOR_EACH_BB_FN (bb
, cfun
)
560 gimple stmt
= last_stmt (bb
);
562 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
564 location_t loc
= gimple_location (stmt
);
568 fold_defer_overflow_warnings ();
569 cond
= fold_binary_loc (loc
, gimple_cond_code (stmt
), boolean_type_node
,
570 gimple_cond_lhs (stmt
), gimple_cond_rhs (stmt
));
573 zerop
= integer_zerop (cond
);
574 onep
= integer_onep (cond
);
577 zerop
= onep
= false;
579 fold_undefer_overflow_warnings (zerop
|| onep
,
581 WARN_STRICT_OVERFLOW_CONDITIONAL
);
583 gimple_cond_make_false (stmt
);
585 gimple_cond_make_true (stmt
);
590 /* If basic block BB has an abnormal edge to a basic block
591 containing IFN_ABNORMAL_DISPATCHER internal call, return
592 that the dispatcher's basic block, otherwise return NULL. */
595 get_abnormal_succ_dispatcher (basic_block bb
)
600 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
601 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)) == EDGE_ABNORMAL
)
603 gimple_stmt_iterator gsi
604 = gsi_start_nondebug_after_labels_bb (e
->dest
);
605 gimple g
= gsi_stmt (gsi
);
607 && is_gimple_call (g
)
608 && gimple_call_internal_p (g
)
609 && gimple_call_internal_fn (g
) == IFN_ABNORMAL_DISPATCHER
)
615 /* Helper function for make_edges. Create a basic block with
616 with ABNORMAL_DISPATCHER internal call in it if needed, and
617 create abnormal edges from BBS to it and from it to FOR_BB
618 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
621 handle_abnormal_edges (basic_block
*dispatcher_bbs
,
622 basic_block for_bb
, int *bb_to_omp_idx
,
623 auto_vec
<basic_block
> *bbs
, bool computed_goto
)
625 basic_block
*dispatcher
= dispatcher_bbs
+ (computed_goto
? 1 : 0);
626 unsigned int idx
= 0;
632 dispatcher
= dispatcher_bbs
+ 2 * bb_to_omp_idx
[for_bb
->index
];
633 if (bb_to_omp_idx
[for_bb
->index
] != 0)
637 /* If the dispatcher has been created already, then there are basic
638 blocks with abnormal edges to it, so just make a new edge to
640 if (*dispatcher
== NULL
)
642 /* Check if there are any basic blocks that need to have
643 abnormal edges to this dispatcher. If there are none, return
645 if (bb_to_omp_idx
== NULL
)
647 if (bbs
->is_empty ())
652 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
653 if (bb_to_omp_idx
[bb
->index
] == bb_to_omp_idx
[for_bb
->index
])
659 /* Create the dispatcher bb. */
660 *dispatcher
= create_basic_block (NULL
, NULL
, for_bb
);
663 /* Factor computed gotos into a common computed goto site. Also
664 record the location of that site so that we can un-factor the
665 gotos after we have converted back to normal form. */
666 gimple_stmt_iterator gsi
= gsi_start_bb (*dispatcher
);
668 /* Create the destination of the factored goto. Each original
669 computed goto will put its desired destination into this
670 variable and jump to the label we create immediately below. */
671 tree var
= create_tmp_var (ptr_type_node
, "gotovar");
673 /* Build a label for the new block which will contain the
674 factored computed goto. */
675 tree factored_label_decl
676 = create_artificial_label (UNKNOWN_LOCATION
);
677 gimple factored_computed_goto_label
678 = gimple_build_label (factored_label_decl
);
679 gsi_insert_after (&gsi
, factored_computed_goto_label
, GSI_NEW_STMT
);
681 /* Build our new computed goto. */
682 gimple factored_computed_goto
= gimple_build_goto (var
);
683 gsi_insert_after (&gsi
, factored_computed_goto
, GSI_NEW_STMT
);
685 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
688 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
691 gsi
= gsi_last_bb (bb
);
692 gimple last
= gsi_stmt (gsi
);
694 gcc_assert (computed_goto_p (last
));
696 /* Copy the original computed goto's destination into VAR. */
698 = gimple_build_assign (var
, gimple_goto_dest (last
));
699 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
701 edge e
= make_edge (bb
, *dispatcher
, EDGE_FALLTHRU
);
702 e
->goto_locus
= gimple_location (last
);
703 gsi_remove (&gsi
, true);
708 tree arg
= inner
? boolean_true_node
: boolean_false_node
;
709 gimple g
= gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER
,
711 gimple_stmt_iterator gsi
= gsi_after_labels (*dispatcher
);
712 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
714 /* Create predecessor edges of the dispatcher. */
715 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
718 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
720 make_edge (bb
, *dispatcher
, EDGE_ABNORMAL
);
725 make_edge (*dispatcher
, for_bb
, EDGE_ABNORMAL
);
728 /* Join all the blocks in the flowgraph. */
734 struct omp_region
*cur_region
= NULL
;
735 auto_vec
<basic_block
> ab_edge_goto
;
736 auto_vec
<basic_block
> ab_edge_call
;
737 int *bb_to_omp_idx
= NULL
;
738 int cur_omp_region_idx
= 0;
740 /* Create an edge from entry to the first block with executable
742 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
743 BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
),
746 /* Traverse the basic block array placing edges. */
747 FOR_EACH_BB_FN (bb
, cfun
)
749 gimple last
= last_stmt (bb
);
753 bb_to_omp_idx
[bb
->index
] = cur_omp_region_idx
;
757 enum gimple_code code
= gimple_code (last
);
761 if (make_goto_expr_edges (bb
))
762 ab_edge_goto
.safe_push (bb
);
767 edge e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
768 e
->goto_locus
= gimple_location (last
);
773 make_cond_expr_edges (bb
);
777 make_gimple_switch_edges (bb
);
781 make_eh_edges (last
);
784 case GIMPLE_EH_DISPATCH
:
785 fallthru
= make_eh_dispatch_edges (last
);
789 /* If this function receives a nonlocal goto, then we need to
790 make edges from this call site to all the nonlocal goto
792 if (stmt_can_make_abnormal_goto (last
))
793 ab_edge_call
.safe_push (bb
);
795 /* If this statement has reachable exception handlers, then
796 create abnormal edges to them. */
797 make_eh_edges (last
);
799 /* BUILTIN_RETURN is really a return statement. */
800 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
802 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
805 /* Some calls are known not to return. */
807 fallthru
= !(gimple_call_flags (last
) & ECF_NORETURN
);
811 /* A GIMPLE_ASSIGN may throw internally and thus be considered
813 if (is_ctrl_altering_stmt (last
))
814 make_eh_edges (last
);
819 make_gimple_asm_edges (bb
);
824 fallthru
= make_gimple_omp_edges (bb
, &cur_region
,
825 &cur_omp_region_idx
);
826 if (cur_region
&& bb_to_omp_idx
== NULL
)
827 bb_to_omp_idx
= XCNEWVEC (int, n_basic_blocks_for_fn (cfun
));
830 case GIMPLE_TRANSACTION
:
832 tree abort_label
= gimple_transaction_label (last
);
834 make_edge (bb
, label_to_block (abort_label
), EDGE_TM_ABORT
);
840 gcc_assert (!stmt_ends_bb_p (last
));
848 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
851 /* Computed gotos are hell to deal with, especially if there are
852 lots of them with a large number of destinations. So we factor
853 them to a common computed goto location before we build the
854 edge list. After we convert back to normal form, we will un-factor
855 the computed gotos since factoring introduces an unwanted jump.
856 For non-local gotos and abnormal edges from calls to calls that return
857 twice or forced labels, factor the abnormal edges too, by having all
858 abnormal edges from the calls go to a common artificial basic block
859 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
860 basic block to all forced labels and calls returning twice.
861 We do this per-OpenMP structured block, because those regions
862 are guaranteed to be single entry single exit by the standard,
863 so it is not allowed to enter or exit such regions abnormally this way,
864 thus all computed gotos, non-local gotos and setjmp/longjmp calls
865 must not transfer control across SESE region boundaries. */
866 if (!ab_edge_goto
.is_empty () || !ab_edge_call
.is_empty ())
868 gimple_stmt_iterator gsi
;
869 basic_block dispatcher_bb_array
[2] = { NULL
, NULL
};
870 basic_block
*dispatcher_bbs
= dispatcher_bb_array
;
871 int count
= n_basic_blocks_for_fn (cfun
);
874 dispatcher_bbs
= XCNEWVEC (basic_block
, 2 * count
);
876 FOR_EACH_BB_FN (bb
, cfun
)
878 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
880 gimple label_stmt
= gsi_stmt (gsi
);
883 if (gimple_code (label_stmt
) != GIMPLE_LABEL
)
886 target
= gimple_label_label (label_stmt
);
888 /* Make an edge to every label block that has been marked as a
889 potential target for a computed goto or a non-local goto. */
890 if (FORCED_LABEL (target
))
891 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
892 &ab_edge_goto
, true);
893 if (DECL_NONLOCAL (target
))
895 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
896 &ab_edge_call
, false);
901 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
902 gsi_next_nondebug (&gsi
);
903 if (!gsi_end_p (gsi
))
905 /* Make an edge to every setjmp-like call. */
906 gimple call_stmt
= gsi_stmt (gsi
);
907 if (is_gimple_call (call_stmt
)
908 && ((gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
)
909 || gimple_call_builtin_p (call_stmt
,
910 BUILT_IN_SETJMP_RECEIVER
)))
911 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
912 &ab_edge_call
, false);
917 XDELETE (dispatcher_bbs
);
920 XDELETE (bb_to_omp_idx
);
924 /* Fold COND_EXPR_COND of each COND_EXPR. */
925 fold_cond_expr_cond ();
928 /* Find the next available discriminator value for LOCUS. The
929 discriminator distinguishes among several basic blocks that
930 share a common locus, allowing for more accurate sample-based
934 next_discriminator_for_locus (location_t locus
)
936 struct locus_discrim_map item
;
937 struct locus_discrim_map
**slot
;
940 item
.discriminator
= 0;
941 slot
= discriminator_per_locus
->find_slot_with_hash (
942 &item
, LOCATION_LINE (locus
), INSERT
);
944 if (*slot
== HTAB_EMPTY_ENTRY
)
946 *slot
= XNEW (struct locus_discrim_map
);
948 (*slot
)->locus
= locus
;
949 (*slot
)->discriminator
= 0;
951 (*slot
)->discriminator
++;
952 return (*slot
)->discriminator
;
955 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
958 same_line_p (location_t locus1
, location_t locus2
)
960 expanded_location from
, to
;
962 if (locus1
== locus2
)
965 from
= expand_location (locus1
);
966 to
= expand_location (locus2
);
968 if (from
.line
!= to
.line
)
970 if (from
.file
== to
.file
)
972 return (from
.file
!= NULL
974 && filename_cmp (from
.file
, to
.file
) == 0);
977 /* Assign discriminators to each basic block. */
980 assign_discriminators (void)
984 FOR_EACH_BB_FN (bb
, cfun
)
988 gimple last
= last_stmt (bb
);
989 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
991 if (locus
== UNKNOWN_LOCATION
)
994 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
996 gimple first
= first_non_label_stmt (e
->dest
);
997 gimple last
= last_stmt (e
->dest
);
998 if ((first
&& same_line_p (locus
, gimple_location (first
)))
999 || (last
&& same_line_p (locus
, gimple_location (last
))))
1001 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
1002 bb
->discriminator
= next_discriminator_for_locus (locus
);
1004 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
1010 /* Create the edges for a GIMPLE_COND starting at block BB. */
1013 make_cond_expr_edges (basic_block bb
)
1015 gimple entry
= last_stmt (bb
);
1016 gimple then_stmt
, else_stmt
;
1017 basic_block then_bb
, else_bb
;
1018 tree then_label
, else_label
;
1022 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
1024 /* Entry basic blocks for each component. */
1025 then_label
= gimple_cond_true_label (entry
);
1026 else_label
= gimple_cond_false_label (entry
);
1027 then_bb
= label_to_block (then_label
);
1028 else_bb
= label_to_block (else_label
);
1029 then_stmt
= first_stmt (then_bb
);
1030 else_stmt
= first_stmt (else_bb
);
1032 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1033 e
->goto_locus
= gimple_location (then_stmt
);
1034 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1036 e
->goto_locus
= gimple_location (else_stmt
);
1038 /* We do not need the labels anymore. */
1039 gimple_cond_set_true_label (entry
, NULL_TREE
);
1040 gimple_cond_set_false_label (entry
, NULL_TREE
);
1044 /* Called for each element in the hash table (P) as we delete the
1045 edge to cases hash table.
1047 Clear all the TREE_CHAINs to prevent problems with copying of
1048 SWITCH_EXPRs and structure sharing rules, then free the hash table
1052 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED
, void **value
,
1053 void *data ATTRIBUTE_UNUSED
)
1057 for (t
= (tree
) *value
; t
; t
= next
)
1059 next
= CASE_CHAIN (t
);
1060 CASE_CHAIN (t
) = NULL
;
1067 /* Start recording information mapping edges to case labels. */
1070 start_recording_case_labels (void)
1072 gcc_assert (edge_to_cases
== NULL
);
1073 edge_to_cases
= pointer_map_create ();
1074 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
1077 /* Return nonzero if we are recording information for case labels. */
1080 recording_case_labels_p (void)
1082 return (edge_to_cases
!= NULL
);
1085 /* Stop recording information mapping edges to case labels and
1086 remove any information we have recorded. */
1088 end_recording_case_labels (void)
1092 pointer_map_traverse (edge_to_cases
, edge_to_cases_cleanup
, NULL
);
1093 pointer_map_destroy (edge_to_cases
);
1094 edge_to_cases
= NULL
;
1095 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
1097 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
1100 gimple stmt
= last_stmt (bb
);
1101 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1102 group_case_labels_stmt (stmt
);
1105 BITMAP_FREE (touched_switch_bbs
);
1108 /* If we are inside a {start,end}_recording_cases block, then return
1109 a chain of CASE_LABEL_EXPRs from T which reference E.
1111 Otherwise return NULL. */
1114 get_cases_for_edge (edge e
, gimple t
)
1119 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1120 chains available. Return NULL so the caller can detect this case. */
1121 if (!recording_case_labels_p ())
1124 slot
= pointer_map_contains (edge_to_cases
, e
);
1126 return (tree
) *slot
;
1128 /* If we did not find E in the hash table, then this must be the first
1129 time we have been queried for information about E & T. Add all the
1130 elements from T to the hash table then perform the query again. */
1132 n
= gimple_switch_num_labels (t
);
1133 for (i
= 0; i
< n
; i
++)
1135 tree elt
= gimple_switch_label (t
, i
);
1136 tree lab
= CASE_LABEL (elt
);
1137 basic_block label_bb
= label_to_block (lab
);
1138 edge this_edge
= find_edge (e
->src
, label_bb
);
1140 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1142 slot
= pointer_map_insert (edge_to_cases
, this_edge
);
1143 CASE_CHAIN (elt
) = (tree
) *slot
;
1147 return (tree
) *pointer_map_contains (edge_to_cases
, e
);
1150 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1153 make_gimple_switch_edges (basic_block bb
)
1155 gimple entry
= last_stmt (bb
);
1158 n
= gimple_switch_num_labels (entry
);
1160 for (i
= 0; i
< n
; ++i
)
1162 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1163 basic_block label_bb
= label_to_block (lab
);
1164 make_edge (bb
, label_bb
, 0);
1169 /* Return the basic block holding label DEST. */
1172 label_to_block_fn (struct function
*ifun
, tree dest
)
1174 int uid
= LABEL_DECL_UID (dest
);
1176 /* We would die hard when faced by an undefined label. Emit a label to
1177 the very first basic block. This will hopefully make even the dataflow
1178 and undefined variable warnings quite right. */
1179 if (seen_error () && uid
< 0)
1181 gimple_stmt_iterator gsi
=
1182 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
));
1185 stmt
= gimple_build_label (dest
);
1186 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1187 uid
= LABEL_DECL_UID (dest
);
1189 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1191 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1194 /* Create edges for a goto statement at block BB. Returns true
1195 if abnormal edges should be created. */
1198 make_goto_expr_edges (basic_block bb
)
1200 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1201 gimple goto_t
= gsi_stmt (last
);
1203 /* A simple GOTO creates normal edges. */
1204 if (simple_goto_p (goto_t
))
1206 tree dest
= gimple_goto_dest (goto_t
);
1207 basic_block label_bb
= label_to_block (dest
);
1208 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1209 e
->goto_locus
= gimple_location (goto_t
);
1210 gsi_remove (&last
, true);
1214 /* A computed GOTO creates abnormal edges. */
1218 /* Create edges for an asm statement with labels at block BB. */
1221 make_gimple_asm_edges (basic_block bb
)
1223 gimple stmt
= last_stmt (bb
);
1224 int i
, n
= gimple_asm_nlabels (stmt
);
1226 for (i
= 0; i
< n
; ++i
)
1228 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1229 basic_block label_bb
= label_to_block (label
);
1230 make_edge (bb
, label_bb
, 0);
1234 /*---------------------------------------------------------------------------
1236 ---------------------------------------------------------------------------*/
1238 /* Cleanup useless labels in basic blocks. This is something we wish
1239 to do early because it allows us to group case labels before creating
1240 the edges for the CFG, and it speeds up block statement iterators in
1241 all passes later on.
1242 We rerun this pass after CFG is created, to get rid of the labels that
1243 are no longer referenced. After then we do not run it any more, since
1244 (almost) no new labels should be created. */
1246 /* A map from basic block index to the leading label of that block. */
1247 static struct label_record
1252 /* True if the label is referenced from somewhere. */
1256 /* Given LABEL return the first label in the same basic block. */
1259 main_block_label (tree label
)
1261 basic_block bb
= label_to_block (label
);
1262 tree main_label
= label_for_bb
[bb
->index
].label
;
1264 /* label_to_block possibly inserted undefined label into the chain. */
1267 label_for_bb
[bb
->index
].label
= label
;
1271 label_for_bb
[bb
->index
].used
= true;
1275 /* Clean up redundant labels within the exception tree. */
1278 cleanup_dead_labels_eh (void)
1285 if (cfun
->eh
== NULL
)
1288 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1289 if (lp
&& lp
->post_landing_pad
)
1291 lab
= main_block_label (lp
->post_landing_pad
);
1292 if (lab
!= lp
->post_landing_pad
)
1294 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1295 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1299 FOR_ALL_EH_REGION (r
)
1303 case ERT_MUST_NOT_THROW
:
1309 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1313 c
->label
= main_block_label (lab
);
1318 case ERT_ALLOWED_EXCEPTIONS
:
1319 lab
= r
->u
.allowed
.label
;
1321 r
->u
.allowed
.label
= main_block_label (lab
);
1327 /* Cleanup redundant labels. This is a three-step process:
1328 1) Find the leading label for each block.
1329 2) Redirect all references to labels to the leading labels.
1330 3) Cleanup all useless labels. */
1333 cleanup_dead_labels (void)
1336 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block_for_fn (cfun
));
1338 /* Find a suitable label for each block. We use the first user-defined
1339 label if there is one, or otherwise just the first label we see. */
1340 FOR_EACH_BB_FN (bb
, cfun
)
1342 gimple_stmt_iterator i
;
1344 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1347 gimple stmt
= gsi_stmt (i
);
1349 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1352 label
= gimple_label_label (stmt
);
1354 /* If we have not yet seen a label for the current block,
1355 remember this one and see if there are more labels. */
1356 if (!label_for_bb
[bb
->index
].label
)
1358 label_for_bb
[bb
->index
].label
= label
;
1362 /* If we did see a label for the current block already, but it
1363 is an artificially created label, replace it if the current
1364 label is a user defined label. */
1365 if (!DECL_ARTIFICIAL (label
)
1366 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1368 label_for_bb
[bb
->index
].label
= label
;
1374 /* Now redirect all jumps/branches to the selected label.
1375 First do so for each block ending in a control statement. */
1376 FOR_EACH_BB_FN (bb
, cfun
)
1378 gimple stmt
= last_stmt (bb
);
1379 tree label
, new_label
;
1384 switch (gimple_code (stmt
))
1387 label
= gimple_cond_true_label (stmt
);
1390 new_label
= main_block_label (label
);
1391 if (new_label
!= label
)
1392 gimple_cond_set_true_label (stmt
, new_label
);
1395 label
= gimple_cond_false_label (stmt
);
1398 new_label
= main_block_label (label
);
1399 if (new_label
!= label
)
1400 gimple_cond_set_false_label (stmt
, new_label
);
1406 size_t i
, n
= gimple_switch_num_labels (stmt
);
1408 /* Replace all destination labels. */
1409 for (i
= 0; i
< n
; ++i
)
1411 tree case_label
= gimple_switch_label (stmt
, i
);
1412 label
= CASE_LABEL (case_label
);
1413 new_label
= main_block_label (label
);
1414 if (new_label
!= label
)
1415 CASE_LABEL (case_label
) = new_label
;
1422 int i
, n
= gimple_asm_nlabels (stmt
);
1424 for (i
= 0; i
< n
; ++i
)
1426 tree cons
= gimple_asm_label_op (stmt
, i
);
1427 tree label
= main_block_label (TREE_VALUE (cons
));
1428 TREE_VALUE (cons
) = label
;
1433 /* We have to handle gotos until they're removed, and we don't
1434 remove them until after we've created the CFG edges. */
1436 if (!computed_goto_p (stmt
))
1438 label
= gimple_goto_dest (stmt
);
1439 new_label
= main_block_label (label
);
1440 if (new_label
!= label
)
1441 gimple_goto_set_dest (stmt
, new_label
);
1445 case GIMPLE_TRANSACTION
:
1447 tree label
= gimple_transaction_label (stmt
);
1450 tree new_label
= main_block_label (label
);
1451 if (new_label
!= label
)
1452 gimple_transaction_set_label (stmt
, new_label
);
1462 /* Do the same for the exception region tree labels. */
1463 cleanup_dead_labels_eh ();
1465 /* Finally, purge dead labels. All user-defined labels and labels that
1466 can be the target of non-local gotos and labels which have their
1467 address taken are preserved. */
1468 FOR_EACH_BB_FN (bb
, cfun
)
1470 gimple_stmt_iterator i
;
1471 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1473 if (!label_for_this_bb
)
1476 /* If the main label of the block is unused, we may still remove it. */
1477 if (!label_for_bb
[bb
->index
].used
)
1478 label_for_this_bb
= NULL
;
1480 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1483 gimple stmt
= gsi_stmt (i
);
1485 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1488 label
= gimple_label_label (stmt
);
1490 if (label
== label_for_this_bb
1491 || !DECL_ARTIFICIAL (label
)
1492 || DECL_NONLOCAL (label
)
1493 || FORCED_LABEL (label
))
1496 gsi_remove (&i
, true);
1500 free (label_for_bb
);
1503 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1504 the ones jumping to the same label.
1505 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1508 group_case_labels_stmt (gimple stmt
)
1510 int old_size
= gimple_switch_num_labels (stmt
);
1511 int i
, j
, new_size
= old_size
;
1512 basic_block default_bb
= NULL
;
1514 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1516 /* Look for possible opportunities to merge cases. */
1518 while (i
< old_size
)
1520 tree base_case
, base_high
;
1521 basic_block base_bb
;
1523 base_case
= gimple_switch_label (stmt
, i
);
1525 gcc_assert (base_case
);
1526 base_bb
= label_to_block (CASE_LABEL (base_case
));
1528 /* Discard cases that have the same destination as the
1530 if (base_bb
== default_bb
)
1532 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1538 base_high
= CASE_HIGH (base_case
)
1539 ? CASE_HIGH (base_case
)
1540 : CASE_LOW (base_case
);
1543 /* Try to merge case labels. Break out when we reach the end
1544 of the label vector or when we cannot merge the next case
1545 label with the current one. */
1546 while (i
< old_size
)
1548 tree merge_case
= gimple_switch_label (stmt
, i
);
1549 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1550 wide_int bhp1
= wi::add (base_high
, 1);
1552 /* Merge the cases if they jump to the same place,
1553 and their ranges are consecutive. */
1554 if (merge_bb
== base_bb
1555 && wi::eq_p (CASE_LOW (merge_case
), bhp1
))
1557 base_high
= CASE_HIGH (merge_case
) ?
1558 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1559 CASE_HIGH (base_case
) = base_high
;
1560 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1569 /* Compress the case labels in the label vector, and adjust the
1570 length of the vector. */
1571 for (i
= 0, j
= 0; i
< new_size
; i
++)
1573 while (! gimple_switch_label (stmt
, j
))
1575 gimple_switch_set_label (stmt
, i
,
1576 gimple_switch_label (stmt
, j
++));
1579 gcc_assert (new_size
<= old_size
);
1580 gimple_switch_set_num_labels (stmt
, new_size
);
1583 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1584 and scan the sorted vector of cases. Combine the ones jumping to the
1588 group_case_labels (void)
1592 FOR_EACH_BB_FN (bb
, cfun
)
1594 gimple stmt
= last_stmt (bb
);
1595 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1596 group_case_labels_stmt (stmt
);
1600 /* Checks whether we can merge block B into block A. */
1603 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1606 gimple_stmt_iterator gsi
;
1608 if (!single_succ_p (a
))
1611 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1614 if (single_succ (a
) != b
)
1617 if (!single_pred_p (b
))
1620 if (b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1623 /* If A ends by a statement causing exceptions or something similar, we
1624 cannot merge the blocks. */
1625 stmt
= last_stmt (a
);
1626 if (stmt
&& stmt_ends_bb_p (stmt
))
1629 /* Do not allow a block with only a non-local label to be merged. */
1631 && gimple_code (stmt
) == GIMPLE_LABEL
1632 && DECL_NONLOCAL (gimple_label_label (stmt
)))
1635 /* Examine the labels at the beginning of B. */
1636 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1639 stmt
= gsi_stmt (gsi
);
1640 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1642 lab
= gimple_label_label (stmt
);
1644 /* Do not remove user forced labels or for -O0 any user labels. */
1645 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1649 /* Protect the loop latches. */
1650 if (current_loops
&& b
->loop_father
->latch
== b
)
1653 /* It must be possible to eliminate all phi nodes in B. If ssa form
1654 is not up-to-date and a name-mapping is registered, we cannot eliminate
1655 any phis. Symbols marked for renaming are never a problem though. */
1656 for (gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1658 gimple phi
= gsi_stmt (gsi
);
1659 /* Technically only new names matter. */
1660 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1664 /* When not optimizing, don't merge if we'd lose goto_locus. */
1666 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1668 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1669 gimple_stmt_iterator prev
, next
;
1670 prev
= gsi_last_nondebug_bb (a
);
1671 next
= gsi_after_labels (b
);
1672 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1673 gsi_next_nondebug (&next
);
1674 if ((gsi_end_p (prev
)
1675 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1676 && (gsi_end_p (next
)
1677 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1684 /* Replaces all uses of NAME by VAL. */
1687 replace_uses_by (tree name
, tree val
)
1689 imm_use_iterator imm_iter
;
1694 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1696 /* Mark the block if we change the last stmt in it. */
1697 if (cfgcleanup_altered_bbs
1698 && stmt_ends_bb_p (stmt
))
1699 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1701 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1703 replace_exp (use
, val
);
1705 if (gimple_code (stmt
) == GIMPLE_PHI
)
1707 e
= gimple_phi_arg_edge (stmt
, PHI_ARG_INDEX_FROM_USE (use
));
1708 if (e
->flags
& EDGE_ABNORMAL
)
1710 /* This can only occur for virtual operands, since
1711 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1712 would prevent replacement. */
1713 gcc_checking_assert (virtual_operand_p (name
));
1714 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1719 if (gimple_code (stmt
) != GIMPLE_PHI
)
1721 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1722 gimple orig_stmt
= stmt
;
1725 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1726 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1727 only change sth from non-invariant to invariant, and only
1728 when propagating constants. */
1729 if (is_gimple_min_invariant (val
))
1730 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1732 tree op
= gimple_op (stmt
, i
);
1733 /* Operands may be empty here. For example, the labels
1734 of a GIMPLE_COND are nulled out following the creation
1735 of the corresponding CFG edges. */
1736 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
1737 recompute_tree_invariant_for_addr_expr (op
);
1740 if (fold_stmt (&gsi
))
1741 stmt
= gsi_stmt (gsi
);
1743 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
1744 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
1750 gcc_checking_assert (has_zero_uses (name
));
1752 /* Also update the trees stored in loop structures. */
1757 FOR_EACH_LOOP (loop
, 0)
1759 substitute_in_loop_info (loop
, name
, val
);
1764 /* Merge block B into block A. */
1767 gimple_merge_blocks (basic_block a
, basic_block b
)
1769 gimple_stmt_iterator last
, gsi
, psi
;
1772 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1774 /* Remove all single-valued PHI nodes from block B of the form
1775 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1776 gsi
= gsi_last_bb (a
);
1777 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
1779 gimple phi
= gsi_stmt (psi
);
1780 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
1782 bool may_replace_uses
= (virtual_operand_p (def
)
1783 || may_propagate_copy (def
, use
));
1785 /* In case we maintain loop closed ssa form, do not propagate arguments
1786 of loop exit phi nodes. */
1788 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
1789 && !virtual_operand_p (def
)
1790 && TREE_CODE (use
) == SSA_NAME
1791 && a
->loop_father
!= b
->loop_father
)
1792 may_replace_uses
= false;
1794 if (!may_replace_uses
)
1796 gcc_assert (!virtual_operand_p (def
));
1798 /* Note that just emitting the copies is fine -- there is no problem
1799 with ordering of phi nodes. This is because A is the single
1800 predecessor of B, therefore results of the phi nodes cannot
1801 appear as arguments of the phi nodes. */
1802 copy
= gimple_build_assign (def
, use
);
1803 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
1804 remove_phi_node (&psi
, false);
1808 /* If we deal with a PHI for virtual operands, we can simply
1809 propagate these without fussing with folding or updating
1811 if (virtual_operand_p (def
))
1813 imm_use_iterator iter
;
1814 use_operand_p use_p
;
1817 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
1818 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1819 SET_USE (use_p
, use
);
1821 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
1822 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
1825 replace_uses_by (def
, use
);
1827 remove_phi_node (&psi
, true);
1831 /* Ensure that B follows A. */
1832 move_block_after (b
, a
);
1834 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
1835 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1837 /* Remove labels from B and set gimple_bb to A for other statements. */
1838 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
1840 gimple stmt
= gsi_stmt (gsi
);
1841 if (gimple_code (stmt
) == GIMPLE_LABEL
)
1843 tree label
= gimple_label_label (stmt
);
1846 gsi_remove (&gsi
, false);
1848 /* Now that we can thread computed gotos, we might have
1849 a situation where we have a forced label in block B
1850 However, the label at the start of block B might still be
1851 used in other ways (think about the runtime checking for
1852 Fortran assigned gotos). So we can not just delete the
1853 label. Instead we move the label to the start of block A. */
1854 if (FORCED_LABEL (label
))
1856 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
1857 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
1859 /* Other user labels keep around in a form of a debug stmt. */
1860 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_STMTS
)
1862 gimple dbg
= gimple_build_debug_bind (label
,
1865 gimple_debug_bind_reset_value (dbg
);
1866 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
1869 lp_nr
= EH_LANDING_PAD_NR (label
);
1872 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
1873 lp
->post_landing_pad
= NULL
;
1878 gimple_set_bb (stmt
, a
);
1883 /* When merging two BBs, if their counts are different, the larger count
1884 is selected as the new bb count. This is to handle inconsistent
1886 if (a
->loop_father
== b
->loop_father
)
1888 a
->count
= MAX (a
->count
, b
->count
);
1889 a
->frequency
= MAX (a
->frequency
, b
->frequency
);
1892 /* Merge the sequences. */
1893 last
= gsi_last_bb (a
);
1894 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
1895 set_bb_seq (b
, NULL
);
1897 if (cfgcleanup_altered_bbs
)
1898 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
1902 /* Return the one of two successors of BB that is not reachable by a
1903 complex edge, if there is one. Else, return BB. We use
1904 this in optimizations that use post-dominators for their heuristics,
1905 to catch the cases in C++ where function calls are involved. */
1908 single_noncomplex_succ (basic_block bb
)
1911 if (EDGE_COUNT (bb
->succs
) != 2)
1914 e0
= EDGE_SUCC (bb
, 0);
1915 e1
= EDGE_SUCC (bb
, 1);
1916 if (e0
->flags
& EDGE_COMPLEX
)
1918 if (e1
->flags
& EDGE_COMPLEX
)
1924 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1927 notice_special_calls (gimple call
)
1929 int flags
= gimple_call_flags (call
);
1931 if (flags
& ECF_MAY_BE_ALLOCA
)
1932 cfun
->calls_alloca
= true;
1933 if (flags
& ECF_RETURNS_TWICE
)
1934 cfun
->calls_setjmp
= true;
1938 /* Clear flags set by notice_special_calls. Used by dead code removal
1939 to update the flags. */
1942 clear_special_calls (void)
1944 cfun
->calls_alloca
= false;
1945 cfun
->calls_setjmp
= false;
1948 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1951 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
1953 /* Since this block is no longer reachable, we can just delete all
1954 of its PHI nodes. */
1955 remove_phi_nodes (bb
);
1957 /* Remove edges to BB's successors. */
1958 while (EDGE_COUNT (bb
->succs
) > 0)
1959 remove_edge (EDGE_SUCC (bb
, 0));
1963 /* Remove statements of basic block BB. */
1966 remove_bb (basic_block bb
)
1968 gimple_stmt_iterator i
;
1972 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
1973 if (dump_flags
& TDF_DETAILS
)
1975 dump_bb (dump_file
, bb
, 0, TDF_BLOCKS
);
1976 fprintf (dump_file
, "\n");
1982 struct loop
*loop
= bb
->loop_father
;
1984 /* If a loop gets removed, clean up the information associated
1986 if (loop
->latch
== bb
1987 || loop
->header
== bb
)
1988 free_numbers_of_iterations_estimates_loop (loop
);
1991 /* Remove all the instructions in the block. */
1992 if (bb_seq (bb
) != NULL
)
1994 /* Walk backwards so as to get a chance to substitute all
1995 released DEFs into debug stmts. See
1996 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1998 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
2000 gimple stmt
= gsi_stmt (i
);
2001 if (gimple_code (stmt
) == GIMPLE_LABEL
2002 && (FORCED_LABEL (gimple_label_label (stmt
))
2003 || DECL_NONLOCAL (gimple_label_label (stmt
))))
2006 gimple_stmt_iterator new_gsi
;
2008 /* A non-reachable non-local label may still be referenced.
2009 But it no longer needs to carry the extra semantics of
2011 if (DECL_NONLOCAL (gimple_label_label (stmt
)))
2013 DECL_NONLOCAL (gimple_label_label (stmt
)) = 0;
2014 FORCED_LABEL (gimple_label_label (stmt
)) = 1;
2017 new_bb
= bb
->prev_bb
;
2018 new_gsi
= gsi_start_bb (new_bb
);
2019 gsi_remove (&i
, false);
2020 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
2024 /* Release SSA definitions if we are in SSA. Note that we
2025 may be called when not in SSA. For example,
2026 final_cleanup calls this function via
2027 cleanup_tree_cfg. */
2028 if (gimple_in_ssa_p (cfun
))
2029 release_defs (stmt
);
2031 gsi_remove (&i
, true);
2035 i
= gsi_last_bb (bb
);
2041 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2042 bb
->il
.gimple
.seq
= NULL
;
2043 bb
->il
.gimple
.phi_nodes
= NULL
;
2047 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2048 predicate VAL, return the edge that will be taken out of the block.
2049 If VAL does not match a unique edge, NULL is returned. */
2052 find_taken_edge (basic_block bb
, tree val
)
2056 stmt
= last_stmt (bb
);
2059 gcc_assert (is_ctrl_stmt (stmt
));
2064 if (!is_gimple_min_invariant (val
))
2067 if (gimple_code (stmt
) == GIMPLE_COND
)
2068 return find_taken_edge_cond_expr (bb
, val
);
2070 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
2071 return find_taken_edge_switch_expr (bb
, val
);
2073 if (computed_goto_p (stmt
))
2075 /* Only optimize if the argument is a label, if the argument is
2076 not a label then we can not construct a proper CFG.
2078 It may be the case that we only need to allow the LABEL_REF to
2079 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2080 appear inside a LABEL_EXPR just to be safe. */
2081 if ((TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
2082 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
2083 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
2090 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2091 statement, determine which of the outgoing edges will be taken out of the
2092 block. Return NULL if either edge may be taken. */
2095 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2100 dest
= label_to_block (val
);
2103 e
= find_edge (bb
, dest
);
2104 gcc_assert (e
!= NULL
);
2110 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2111 statement, determine which of the two edges will be taken out of the
2112 block. Return NULL if either edge may be taken. */
2115 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2117 edge true_edge
, false_edge
;
2119 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2121 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2122 return (integer_zerop (val
) ? false_edge
: true_edge
);
2125 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2126 statement, determine which edge will be taken out of the block. Return
2127 NULL if any edge may be taken. */
2130 find_taken_edge_switch_expr (basic_block bb
, tree val
)
2132 basic_block dest_bb
;
2137 switch_stmt
= last_stmt (bb
);
2138 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2139 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2141 e
= find_edge (bb
, dest_bb
);
2147 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2148 We can make optimal use here of the fact that the case labels are
2149 sorted: We can do a binary search for a case matching VAL. */
2152 find_case_label_for_value (gimple switch_stmt
, tree val
)
2154 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2155 tree default_case
= gimple_switch_default_label (switch_stmt
);
2157 for (low
= 0, high
= n
; high
- low
> 1; )
2159 size_t i
= (high
+ low
) / 2;
2160 tree t
= gimple_switch_label (switch_stmt
, i
);
2163 /* Cache the result of comparing CASE_LOW and val. */
2164 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2171 if (CASE_HIGH (t
) == NULL
)
2173 /* A singe-valued case label. */
2179 /* A case range. We can only handle integer ranges. */
2180 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2185 return default_case
;
2189 /* Dump a basic block on stderr. */
2192 gimple_debug_bb (basic_block bb
)
2194 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2198 /* Dump basic block with index N on stderr. */
2201 gimple_debug_bb_n (int n
)
2203 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun
, n
));
2204 return BASIC_BLOCK_FOR_FN (cfun
, n
);
2208 /* Dump the CFG on stderr.
2210 FLAGS are the same used by the tree dumping functions
2211 (see TDF_* in dumpfile.h). */
2214 gimple_debug_cfg (int flags
)
2216 gimple_dump_cfg (stderr
, flags
);
2220 /* Dump the program showing basic block boundaries on the given FILE.
2222 FLAGS are the same used by the tree dumping functions (see TDF_* in
2226 gimple_dump_cfg (FILE *file
, int flags
)
2228 if (flags
& TDF_DETAILS
)
2230 dump_function_header (file
, current_function_decl
, flags
);
2231 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2232 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2233 last_basic_block_for_fn (cfun
));
2235 brief_dump_cfg (file
, flags
| TDF_COMMENT
);
2236 fprintf (file
, "\n");
2239 if (flags
& TDF_STATS
)
2240 dump_cfg_stats (file
);
2242 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2246 /* Dump CFG statistics on FILE. */
2249 dump_cfg_stats (FILE *file
)
2251 static long max_num_merged_labels
= 0;
2252 unsigned long size
, total
= 0;
2255 const char * const fmt_str
= "%-30s%-13s%12s\n";
2256 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2257 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2258 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2259 const char *funcname
= current_function_name ();
2261 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2263 fprintf (file
, "---------------------------------------------------------\n");
2264 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2265 fprintf (file
, fmt_str
, "", " instances ", "used ");
2266 fprintf (file
, "---------------------------------------------------------\n");
2268 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2270 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2271 SCALE (size
), LABEL (size
));
2274 FOR_EACH_BB_FN (bb
, cfun
)
2275 num_edges
+= EDGE_COUNT (bb
->succs
);
2276 size
= num_edges
* sizeof (struct edge_def
);
2278 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2280 fprintf (file
, "---------------------------------------------------------\n");
2281 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2283 fprintf (file
, "---------------------------------------------------------\n");
2284 fprintf (file
, "\n");
2286 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2287 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2289 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2290 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2292 fprintf (file
, "\n");
2296 /* Dump CFG statistics on stderr. Keep extern so that it's always
2297 linked in the final executable. */
2300 debug_cfg_stats (void)
2302 dump_cfg_stats (stderr
);
2305 /*---------------------------------------------------------------------------
2306 Miscellaneous helpers
2307 ---------------------------------------------------------------------------*/
2309 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2310 flow. Transfers of control flow associated with EH are excluded. */
2313 call_can_make_abnormal_goto (gimple t
)
2315 /* If the function has no non-local labels, then a call cannot make an
2316 abnormal transfer of control. */
2317 if (!cfun
->has_nonlocal_label
2318 && !cfun
->calls_setjmp
)
2321 /* Likewise if the call has no side effects. */
2322 if (!gimple_has_side_effects (t
))
2325 /* Likewise if the called function is leaf. */
2326 if (gimple_call_flags (t
) & ECF_LEAF
)
2333 /* Return true if T can make an abnormal transfer of control flow.
2334 Transfers of control flow associated with EH are excluded. */
2337 stmt_can_make_abnormal_goto (gimple t
)
2339 if (computed_goto_p (t
))
2341 if (is_gimple_call (t
))
2342 return call_can_make_abnormal_goto (t
);
2347 /* Return true if T represents a stmt that always transfers control. */
2350 is_ctrl_stmt (gimple t
)
2352 switch (gimple_code (t
))
2366 /* Return true if T is a statement that may alter the flow of control
2367 (e.g., a call to a non-returning function). */
2370 is_ctrl_altering_stmt (gimple t
)
2374 switch (gimple_code (t
))
2378 int flags
= gimple_call_flags (t
);
2380 /* A call alters control flow if it can make an abnormal goto. */
2381 if (call_can_make_abnormal_goto (t
))
2384 /* A call also alters control flow if it does not return. */
2385 if (flags
& ECF_NORETURN
)
2388 /* TM ending statements have backedges out of the transaction.
2389 Return true so we split the basic block containing them.
2390 Note that the TM_BUILTIN test is merely an optimization. */
2391 if ((flags
& ECF_TM_BUILTIN
)
2392 && is_tm_ending_fndecl (gimple_call_fndecl (t
)))
2395 /* BUILT_IN_RETURN call is same as return statement. */
2396 if (gimple_call_builtin_p (t
, BUILT_IN_RETURN
))
2401 case GIMPLE_EH_DISPATCH
:
2402 /* EH_DISPATCH branches to the individual catch handlers at
2403 this level of a try or allowed-exceptions region. It can
2404 fallthru to the next statement as well. */
2408 if (gimple_asm_nlabels (t
) > 0)
2413 /* OpenMP directives alter control flow. */
2416 case GIMPLE_TRANSACTION
:
2417 /* A transaction start alters control flow. */
2424 /* If a statement can throw, it alters control flow. */
2425 return stmt_can_throw_internal (t
);
2429 /* Return true if T is a simple local goto. */
2432 simple_goto_p (gimple t
)
2434 return (gimple_code (t
) == GIMPLE_GOTO
2435 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2439 /* Return true if STMT should start a new basic block. PREV_STMT is
2440 the statement preceding STMT. It is used when STMT is a label or a
2441 case label. Labels should only start a new basic block if their
2442 previous statement wasn't a label. Otherwise, sequence of labels
2443 would generate unnecessary basic blocks that only contain a single
2447 stmt_starts_bb_p (gimple stmt
, gimple prev_stmt
)
2452 /* Labels start a new basic block only if the preceding statement
2453 wasn't a label of the same type. This prevents the creation of
2454 consecutive blocks that have nothing but a single label. */
2455 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2457 /* Nonlocal and computed GOTO targets always start a new block. */
2458 if (DECL_NONLOCAL (gimple_label_label (stmt
))
2459 || FORCED_LABEL (gimple_label_label (stmt
)))
2462 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2464 if (DECL_NONLOCAL (gimple_label_label (prev_stmt
)))
2467 cfg_stats
.num_merged_labels
++;
2473 else if (gimple_code (stmt
) == GIMPLE_CALL
2474 && gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2475 /* setjmp acts similar to a nonlocal GOTO target and thus should
2476 start a new block. */
2483 /* Return true if T should end a basic block. */
2486 stmt_ends_bb_p (gimple t
)
2488 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2491 /* Remove block annotations and other data structures. */
2494 delete_tree_cfg_annotations (void)
2496 vec_free (label_to_block_map_for_fn (cfun
));
2500 /* Return the first statement in basic block BB. */
2503 first_stmt (basic_block bb
)
2505 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2508 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2516 /* Return the first non-label statement in basic block BB. */
2519 first_non_label_stmt (basic_block bb
)
2521 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2522 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2524 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2527 /* Return the last statement in basic block BB. */
2530 last_stmt (basic_block bb
)
2532 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2535 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2543 /* Return the last statement of an otherwise empty block. Return NULL
2544 if the block is totally empty, or if it contains more than one
2548 last_and_only_stmt (basic_block bb
)
2550 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2556 last
= gsi_stmt (i
);
2557 gsi_prev_nondebug (&i
);
2561 /* Empty statements should no longer appear in the instruction stream.
2562 Everything that might have appeared before should be deleted by
2563 remove_useless_stmts, and the optimizers should just gsi_remove
2564 instead of smashing with build_empty_stmt.
2566 Thus the only thing that should appear here in a block containing
2567 one executable statement is a label. */
2568 prev
= gsi_stmt (i
);
2569 if (gimple_code (prev
) == GIMPLE_LABEL
)
2575 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2578 reinstall_phi_args (edge new_edge
, edge old_edge
)
2580 edge_var_map_vector
*v
;
2583 gimple_stmt_iterator phis
;
2585 v
= redirect_edge_var_map_vector (old_edge
);
2589 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2590 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2591 i
++, gsi_next (&phis
))
2593 gimple phi
= gsi_stmt (phis
);
2594 tree result
= redirect_edge_var_map_result (vm
);
2595 tree arg
= redirect_edge_var_map_def (vm
);
2597 gcc_assert (result
== gimple_phi_result (phi
));
2599 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2602 redirect_edge_var_map_clear (old_edge
);
2605 /* Returns the basic block after which the new basic block created
2606 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2607 near its "logical" location. This is of most help to humans looking
2608 at debugging dumps. */
2611 split_edge_bb_loc (edge edge_in
)
2613 basic_block dest
= edge_in
->dest
;
2614 basic_block dest_prev
= dest
->prev_bb
;
2618 edge e
= find_edge (dest_prev
, dest
);
2619 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2620 return edge_in
->src
;
2625 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2626 Abort on abnormal edges. */
2629 gimple_split_edge (edge edge_in
)
2631 basic_block new_bb
, after_bb
, dest
;
2634 /* Abnormal edges cannot be split. */
2635 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2637 dest
= edge_in
->dest
;
2639 after_bb
= split_edge_bb_loc (edge_in
);
2641 new_bb
= create_empty_bb (after_bb
);
2642 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
2643 new_bb
->count
= edge_in
->count
;
2644 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2645 new_edge
->probability
= REG_BR_PROB_BASE
;
2646 new_edge
->count
= edge_in
->count
;
2648 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2649 gcc_assert (e
== edge_in
);
2650 reinstall_phi_args (new_edge
, e
);
2656 /* Verify properties of the address expression T with base object BASE. */
2659 verify_address (tree t
, tree base
)
2662 bool old_side_effects
;
2664 bool new_side_effects
;
2666 old_constant
= TREE_CONSTANT (t
);
2667 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2669 recompute_tree_invariant_for_addr_expr (t
);
2670 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2671 new_constant
= TREE_CONSTANT (t
);
2673 if (old_constant
!= new_constant
)
2675 error ("constant not recomputed when ADDR_EXPR changed");
2678 if (old_side_effects
!= new_side_effects
)
2680 error ("side effects not recomputed when ADDR_EXPR changed");
2684 if (!(TREE_CODE (base
) == VAR_DECL
2685 || TREE_CODE (base
) == PARM_DECL
2686 || TREE_CODE (base
) == RESULT_DECL
))
2689 if (DECL_GIMPLE_REG_P (base
))
2691 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2698 /* Callback for walk_tree, check that all elements with address taken are
2699 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2700 inside a PHI node. */
2703 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2710 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2711 #define CHECK_OP(N, MSG) \
2712 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2713 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2715 switch (TREE_CODE (t
))
2718 if (SSA_NAME_IN_FREE_LIST (t
))
2720 error ("SSA name in freelist but still referenced");
2726 error ("INDIRECT_REF in gimple IL");
2730 x
= TREE_OPERAND (t
, 0);
2731 if (!POINTER_TYPE_P (TREE_TYPE (x
))
2732 || !is_gimple_mem_ref_addr (x
))
2734 error ("invalid first operand of MEM_REF");
2737 if (TREE_CODE (TREE_OPERAND (t
, 1)) != INTEGER_CST
2738 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
2740 error ("invalid offset operand of MEM_REF");
2741 return TREE_OPERAND (t
, 1);
2743 if (TREE_CODE (x
) == ADDR_EXPR
2744 && (x
= verify_address (x
, TREE_OPERAND (x
, 0))))
2750 x
= fold (ASSERT_EXPR_COND (t
));
2751 if (x
== boolean_false_node
)
2753 error ("ASSERT_EXPR with an always-false condition");
2759 error ("MODIFY_EXPR not expected while having tuples");
2766 gcc_assert (is_gimple_address (t
));
2768 /* Skip any references (they will be checked when we recurse down the
2769 tree) and ensure that any variable used as a prefix is marked
2771 for (x
= TREE_OPERAND (t
, 0);
2772 handled_component_p (x
);
2773 x
= TREE_OPERAND (x
, 0))
2776 if ((tem
= verify_address (t
, x
)))
2779 if (!(TREE_CODE (x
) == VAR_DECL
2780 || TREE_CODE (x
) == PARM_DECL
2781 || TREE_CODE (x
) == RESULT_DECL
))
2784 if (!TREE_ADDRESSABLE (x
))
2786 error ("address taken, but ADDRESSABLE bit not set");
2794 x
= COND_EXPR_COND (t
);
2795 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
2797 error ("non-integral used in condition");
2800 if (!is_gimple_condexpr (x
))
2802 error ("invalid conditional operand");
2807 case NON_LVALUE_EXPR
:
2808 case TRUTH_NOT_EXPR
:
2812 case FIX_TRUNC_EXPR
:
2817 CHECK_OP (0, "invalid operand to unary operator");
2823 if (!is_gimple_reg_type (TREE_TYPE (t
)))
2825 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2829 if (TREE_CODE (t
) == BIT_FIELD_REF
)
2831 tree t0
= TREE_OPERAND (t
, 0);
2832 tree t1
= TREE_OPERAND (t
, 1);
2833 tree t2
= TREE_OPERAND (t
, 2);
2834 if (!tree_fits_uhwi_p (t1
)
2835 || !tree_fits_uhwi_p (t2
))
2837 error ("invalid position or size operand to BIT_FIELD_REF");
2840 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
2841 && (TYPE_PRECISION (TREE_TYPE (t
))
2842 != tree_to_uhwi (t1
)))
2844 error ("integral result type precision does not match "
2845 "field size of BIT_FIELD_REF");
2848 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
2849 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
2850 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t
)))
2851 != tree_to_uhwi (t1
)))
2853 error ("mode precision of non-integral result does not "
2854 "match field size of BIT_FIELD_REF");
2857 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0
))
2858 && (tree_to_uhwi (t1
) + tree_to_uhwi (t2
)
2859 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0
)))))
2861 error ("position plus size exceeds size of referenced object in "
2866 t
= TREE_OPERAND (t
, 0);
2871 case ARRAY_RANGE_REF
:
2872 case VIEW_CONVERT_EXPR
:
2873 /* We have a nest of references. Verify that each of the operands
2874 that determine where to reference is either a constant or a variable,
2875 verify that the base is valid, and then show we've already checked
2877 while (handled_component_p (t
))
2879 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
2880 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2881 else if (TREE_CODE (t
) == ARRAY_REF
2882 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
2884 CHECK_OP (1, "invalid array index");
2885 if (TREE_OPERAND (t
, 2))
2886 CHECK_OP (2, "invalid array lower bound");
2887 if (TREE_OPERAND (t
, 3))
2888 CHECK_OP (3, "invalid array stride");
2890 else if (TREE_CODE (t
) == BIT_FIELD_REF
2891 || TREE_CODE (t
) == REALPART_EXPR
2892 || TREE_CODE (t
) == IMAGPART_EXPR
)
2894 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2899 t
= TREE_OPERAND (t
, 0);
2902 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
2904 error ("invalid reference prefix");
2911 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2912 POINTER_PLUS_EXPR. */
2913 if (POINTER_TYPE_P (TREE_TYPE (t
)))
2915 error ("invalid operand to plus/minus, type is a pointer");
2918 CHECK_OP (0, "invalid operand to binary operator");
2919 CHECK_OP (1, "invalid operand to binary operator");
2922 case POINTER_PLUS_EXPR
:
2923 /* Check to make sure the first operand is a pointer or reference type. */
2924 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
2926 error ("invalid operand to pointer plus, first operand is not a pointer");
2929 /* Check to make sure the second operand is a ptrofftype. */
2930 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
2932 error ("invalid operand to pointer plus, second operand is not an "
2933 "integer type of appropriate width");
2943 case UNORDERED_EXPR
:
2952 case TRUNC_DIV_EXPR
:
2954 case FLOOR_DIV_EXPR
:
2955 case ROUND_DIV_EXPR
:
2956 case TRUNC_MOD_EXPR
:
2958 case FLOOR_MOD_EXPR
:
2959 case ROUND_MOD_EXPR
:
2961 case EXACT_DIV_EXPR
:
2971 CHECK_OP (0, "invalid operand to binary operator");
2972 CHECK_OP (1, "invalid operand to binary operator");
2976 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
2980 case CASE_LABEL_EXPR
:
2983 error ("invalid CASE_CHAIN");
2997 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2998 Returns true if there is an error, otherwise false. */
3001 verify_types_in_gimple_min_lval (tree expr
)
3005 if (is_gimple_id (expr
))
3008 if (TREE_CODE (expr
) != TARGET_MEM_REF
3009 && TREE_CODE (expr
) != MEM_REF
)
3011 error ("invalid expression for min lvalue");
3015 /* TARGET_MEM_REFs are strange beasts. */
3016 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3019 op
= TREE_OPERAND (expr
, 0);
3020 if (!is_gimple_val (op
))
3022 error ("invalid operand in indirect reference");
3023 debug_generic_stmt (op
);
3026 /* Memory references now generally can involve a value conversion. */
3031 /* Verify if EXPR is a valid GIMPLE reference expression. If
3032 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3033 if there is an error, otherwise false. */
3036 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
3038 while (handled_component_p (expr
))
3040 tree op
= TREE_OPERAND (expr
, 0);
3042 if (TREE_CODE (expr
) == ARRAY_REF
3043 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
3045 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
3046 || (TREE_OPERAND (expr
, 2)
3047 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3048 || (TREE_OPERAND (expr
, 3)
3049 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
3051 error ("invalid operands to array reference");
3052 debug_generic_stmt (expr
);
3057 /* Verify if the reference array element types are compatible. */
3058 if (TREE_CODE (expr
) == ARRAY_REF
3059 && !useless_type_conversion_p (TREE_TYPE (expr
),
3060 TREE_TYPE (TREE_TYPE (op
))))
3062 error ("type mismatch in array reference");
3063 debug_generic_stmt (TREE_TYPE (expr
));
3064 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3067 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
3068 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
3069 TREE_TYPE (TREE_TYPE (op
))))
3071 error ("type mismatch in array range reference");
3072 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
3073 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3077 if ((TREE_CODE (expr
) == REALPART_EXPR
3078 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3079 && !useless_type_conversion_p (TREE_TYPE (expr
),
3080 TREE_TYPE (TREE_TYPE (op
))))
3082 error ("type mismatch in real/imagpart reference");
3083 debug_generic_stmt (TREE_TYPE (expr
));
3084 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3088 if (TREE_CODE (expr
) == COMPONENT_REF
3089 && !useless_type_conversion_p (TREE_TYPE (expr
),
3090 TREE_TYPE (TREE_OPERAND (expr
, 1))))
3092 error ("type mismatch in component reference");
3093 debug_generic_stmt (TREE_TYPE (expr
));
3094 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
3098 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3100 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3101 that their operand is not an SSA name or an invariant when
3102 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3103 bug). Otherwise there is nothing to verify, gross mismatches at
3104 most invoke undefined behavior. */
3106 && (TREE_CODE (op
) == SSA_NAME
3107 || is_gimple_min_invariant (op
)))
3109 error ("conversion of an SSA_NAME on the left hand side");
3110 debug_generic_stmt (expr
);
3113 else if (TREE_CODE (op
) == SSA_NAME
3114 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3116 error ("conversion of register to a different size");
3117 debug_generic_stmt (expr
);
3120 else if (!handled_component_p (op
))
3127 if (TREE_CODE (expr
) == MEM_REF
)
3129 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
3131 error ("invalid address operand in MEM_REF");
3132 debug_generic_stmt (expr
);
3135 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
3136 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3138 error ("invalid offset operand in MEM_REF");
3139 debug_generic_stmt (expr
);
3143 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3145 if (!TMR_BASE (expr
)
3146 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3148 error ("invalid address operand in TARGET_MEM_REF");
3151 if (!TMR_OFFSET (expr
)
3152 || TREE_CODE (TMR_OFFSET (expr
)) != INTEGER_CST
3153 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3155 error ("invalid offset operand in TARGET_MEM_REF");
3156 debug_generic_stmt (expr
);
3161 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3162 && verify_types_in_gimple_min_lval (expr
));
3165 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3166 list of pointer-to types that is trivially convertible to DEST. */
3169 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3173 if (!TYPE_POINTER_TO (src_obj
))
3176 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3177 if (useless_type_conversion_p (dest
, src
))
3183 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3184 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3187 valid_fixed_convert_types_p (tree type1
, tree type2
)
3189 return (FIXED_POINT_TYPE_P (type1
)
3190 && (INTEGRAL_TYPE_P (type2
)
3191 || SCALAR_FLOAT_TYPE_P (type2
)
3192 || FIXED_POINT_TYPE_P (type2
)));
3195 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3196 is a problem, otherwise false. */
3199 verify_gimple_call (gimple stmt
)
3201 tree fn
= gimple_call_fn (stmt
);
3202 tree fntype
, fndecl
;
3205 if (gimple_call_internal_p (stmt
))
3209 error ("gimple call has two targets");
3210 debug_generic_stmt (fn
);
3218 error ("gimple call has no target");
3223 if (fn
&& !is_gimple_call_addr (fn
))
3225 error ("invalid function in gimple call");
3226 debug_generic_stmt (fn
);
3231 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3232 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3233 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3235 error ("non-function in gimple call");
3239 fndecl
= gimple_call_fndecl (stmt
);
3241 && TREE_CODE (fndecl
) == FUNCTION_DECL
3242 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3243 && !DECL_PURE_P (fndecl
)
3244 && !TREE_READONLY (fndecl
))
3246 error ("invalid pure const state for function");
3250 if (gimple_call_lhs (stmt
)
3251 && (!is_gimple_lvalue (gimple_call_lhs (stmt
))
3252 || verify_types_in_gimple_reference (gimple_call_lhs (stmt
), true)))
3254 error ("invalid LHS in gimple call");
3258 if (gimple_call_lhs (stmt
) && gimple_call_noreturn_p (stmt
))
3260 error ("LHS in noreturn call");
3264 fntype
= gimple_call_fntype (stmt
);
3266 && gimple_call_lhs (stmt
)
3267 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt
)),
3269 /* ??? At least C++ misses conversions at assignments from
3270 void * call results.
3271 ??? Java is completely off. Especially with functions
3272 returning java.lang.Object.
3273 For now simply allow arbitrary pointer type conversions. */
3274 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt
)))
3275 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3277 error ("invalid conversion in gimple call");
3278 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt
)));
3279 debug_generic_stmt (TREE_TYPE (fntype
));
3283 if (gimple_call_chain (stmt
)
3284 && !is_gimple_val (gimple_call_chain (stmt
)))
3286 error ("invalid static chain in gimple call");
3287 debug_generic_stmt (gimple_call_chain (stmt
));
3291 /* If there is a static chain argument, this should not be an indirect
3292 call, and the decl should have DECL_STATIC_CHAIN set. */
3293 if (gimple_call_chain (stmt
))
3295 if (!gimple_call_fndecl (stmt
))
3297 error ("static chain in indirect gimple call");
3300 fn
= TREE_OPERAND (fn
, 0);
3302 if (!DECL_STATIC_CHAIN (fn
))
3304 error ("static chain with function that doesn%'t use one");
3309 /* ??? The C frontend passes unpromoted arguments in case it
3310 didn't see a function declaration before the call. So for now
3311 leave the call arguments mostly unverified. Once we gimplify
3312 unit-at-a-time we have a chance to fix this. */
3314 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3316 tree arg
= gimple_call_arg (stmt
, i
);
3317 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3318 && !is_gimple_val (arg
))
3319 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3320 && !is_gimple_lvalue (arg
)))
3322 error ("invalid argument to gimple call");
3323 debug_generic_expr (arg
);
3331 /* Verifies the gimple comparison with the result type TYPE and
3332 the operands OP0 and OP1. */
3335 verify_gimple_comparison (tree type
, tree op0
, tree op1
)
3337 tree op0_type
= TREE_TYPE (op0
);
3338 tree op1_type
= TREE_TYPE (op1
);
3340 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3342 error ("invalid operands in gimple comparison");
3346 /* For comparisons we do not have the operations type as the
3347 effective type the comparison is carried out in. Instead
3348 we require that either the first operand is trivially
3349 convertible into the second, or the other way around.
3350 Because we special-case pointers to void we allow
3351 comparisons of pointers with the same mode as well. */
3352 if (!useless_type_conversion_p (op0_type
, op1_type
)
3353 && !useless_type_conversion_p (op1_type
, op0_type
)
3354 && (!POINTER_TYPE_P (op0_type
)
3355 || !POINTER_TYPE_P (op1_type
)
3356 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3358 error ("mismatching comparison operand types");
3359 debug_generic_expr (op0_type
);
3360 debug_generic_expr (op1_type
);
3364 /* The resulting type of a comparison may be an effective boolean type. */
3365 if (INTEGRAL_TYPE_P (type
)
3366 && (TREE_CODE (type
) == BOOLEAN_TYPE
3367 || TYPE_PRECISION (type
) == 1))
3369 if (TREE_CODE (op0_type
) == VECTOR_TYPE
3370 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3372 error ("vector comparison returning a boolean");
3373 debug_generic_expr (op0_type
);
3374 debug_generic_expr (op1_type
);
3378 /* Or an integer vector type with the same size and element count
3379 as the comparison operand types. */
3380 else if (TREE_CODE (type
) == VECTOR_TYPE
3381 && TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
)
3383 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3384 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3386 error ("non-vector operands in vector comparison");
3387 debug_generic_expr (op0_type
);
3388 debug_generic_expr (op1_type
);
3392 if (TYPE_VECTOR_SUBPARTS (type
) != TYPE_VECTOR_SUBPARTS (op0_type
)
3393 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type
)))
3394 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type
))))
3395 /* The result of a vector comparison is of signed
3397 || TYPE_UNSIGNED (TREE_TYPE (type
)))
3399 error ("invalid vector comparison resulting type");
3400 debug_generic_expr (type
);
3406 error ("bogus comparison result type");
3407 debug_generic_expr (type
);
3414 /* Verify a gimple assignment statement STMT with an unary rhs.
3415 Returns true if anything is wrong. */
3418 verify_gimple_assign_unary (gimple stmt
)
3420 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3421 tree lhs
= gimple_assign_lhs (stmt
);
3422 tree lhs_type
= TREE_TYPE (lhs
);
3423 tree rhs1
= gimple_assign_rhs1 (stmt
);
3424 tree rhs1_type
= TREE_TYPE (rhs1
);
3426 if (!is_gimple_reg (lhs
))
3428 error ("non-register as LHS of unary operation");
3432 if (!is_gimple_val (rhs1
))
3434 error ("invalid operand in unary operation");
3438 /* First handle conversions. */
3443 /* Allow conversions from pointer type to integral type only if
3444 there is no sign or zero extension involved.
3445 For targets were the precision of ptrofftype doesn't match that
3446 of pointers we need to allow arbitrary conversions to ptrofftype. */
3447 if ((POINTER_TYPE_P (lhs_type
)
3448 && INTEGRAL_TYPE_P (rhs1_type
))
3449 || (POINTER_TYPE_P (rhs1_type
)
3450 && INTEGRAL_TYPE_P (lhs_type
)
3451 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3452 || ptrofftype_p (sizetype
))))
3455 /* Allow conversion from integral to offset type and vice versa. */
3456 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3457 && INTEGRAL_TYPE_P (rhs1_type
))
3458 || (INTEGRAL_TYPE_P (lhs_type
)
3459 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3462 /* Otherwise assert we are converting between types of the
3464 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3466 error ("invalid types in nop conversion");
3467 debug_generic_expr (lhs_type
);
3468 debug_generic_expr (rhs1_type
);
3475 case ADDR_SPACE_CONVERT_EXPR
:
3477 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3478 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3479 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3481 error ("invalid types in address space conversion");
3482 debug_generic_expr (lhs_type
);
3483 debug_generic_expr (rhs1_type
);
3490 case FIXED_CONVERT_EXPR
:
3492 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3493 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3495 error ("invalid types in fixed-point conversion");
3496 debug_generic_expr (lhs_type
);
3497 debug_generic_expr (rhs1_type
);
3506 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3507 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3508 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3510 error ("invalid types in conversion to floating point");
3511 debug_generic_expr (lhs_type
);
3512 debug_generic_expr (rhs1_type
);
3519 case FIX_TRUNC_EXPR
:
3521 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3522 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3523 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3525 error ("invalid types in conversion to integer");
3526 debug_generic_expr (lhs_type
);
3527 debug_generic_expr (rhs1_type
);
3534 case VEC_UNPACK_HI_EXPR
:
3535 case VEC_UNPACK_LO_EXPR
:
3536 case REDUC_MAX_EXPR
:
3537 case REDUC_MIN_EXPR
:
3538 case REDUC_PLUS_EXPR
:
3539 case VEC_UNPACK_FLOAT_HI_EXPR
:
3540 case VEC_UNPACK_FLOAT_LO_EXPR
:
3548 case NON_LVALUE_EXPR
:
3556 /* For the remaining codes assert there is no conversion involved. */
3557 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3559 error ("non-trivial conversion in unary operation");
3560 debug_generic_expr (lhs_type
);
3561 debug_generic_expr (rhs1_type
);
3568 /* Verify a gimple assignment statement STMT with a binary rhs.
3569 Returns true if anything is wrong. */
3572 verify_gimple_assign_binary (gimple stmt
)
3574 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3575 tree lhs
= gimple_assign_lhs (stmt
);
3576 tree lhs_type
= TREE_TYPE (lhs
);
3577 tree rhs1
= gimple_assign_rhs1 (stmt
);
3578 tree rhs1_type
= TREE_TYPE (rhs1
);
3579 tree rhs2
= gimple_assign_rhs2 (stmt
);
3580 tree rhs2_type
= TREE_TYPE (rhs2
);
3582 if (!is_gimple_reg (lhs
))
3584 error ("non-register as LHS of binary operation");
3588 if (!is_gimple_val (rhs1
)
3589 || !is_gimple_val (rhs2
))
3591 error ("invalid operands in binary operation");
3595 /* First handle operations that involve different types. */
3600 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3601 || !(INTEGRAL_TYPE_P (rhs1_type
)
3602 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3603 || !(INTEGRAL_TYPE_P (rhs2_type
)
3604 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3606 error ("type mismatch in complex expression");
3607 debug_generic_expr (lhs_type
);
3608 debug_generic_expr (rhs1_type
);
3609 debug_generic_expr (rhs2_type
);
3621 /* Shifts and rotates are ok on integral types, fixed point
3622 types and integer vector types. */
3623 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3624 && !FIXED_POINT_TYPE_P (rhs1_type
)
3625 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3626 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3627 || (!INTEGRAL_TYPE_P (rhs2_type
)
3628 /* Vector shifts of vectors are also ok. */
3629 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3630 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3631 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3632 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3633 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3635 error ("type mismatch in shift expression");
3636 debug_generic_expr (lhs_type
);
3637 debug_generic_expr (rhs1_type
);
3638 debug_generic_expr (rhs2_type
);
3645 case VEC_LSHIFT_EXPR
:
3646 case VEC_RSHIFT_EXPR
:
3648 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3649 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3650 || POINTER_TYPE_P (TREE_TYPE (rhs1_type
))
3651 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type
))
3652 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
)))
3653 || (!INTEGRAL_TYPE_P (rhs2_type
)
3654 && (TREE_CODE (rhs2_type
) != VECTOR_TYPE
3655 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3656 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3658 error ("type mismatch in vector shift expression");
3659 debug_generic_expr (lhs_type
);
3660 debug_generic_expr (rhs1_type
);
3661 debug_generic_expr (rhs2_type
);
3664 /* For shifting a vector of non-integral components we
3665 only allow shifting by a constant multiple of the element size. */
3666 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3667 && (TREE_CODE (rhs2
) != INTEGER_CST
3668 || !div_if_zero_remainder (rhs2
,
3669 TYPE_SIZE (TREE_TYPE (rhs1_type
)))))
3671 error ("non-element sized vector shift of floating point vector");
3678 case WIDEN_LSHIFT_EXPR
:
3680 if (!INTEGRAL_TYPE_P (lhs_type
)
3681 || !INTEGRAL_TYPE_P (rhs1_type
)
3682 || TREE_CODE (rhs2
) != INTEGER_CST
3683 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3685 error ("type mismatch in widening vector shift expression");
3686 debug_generic_expr (lhs_type
);
3687 debug_generic_expr (rhs1_type
);
3688 debug_generic_expr (rhs2_type
);
3695 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3696 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3698 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3699 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3700 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3701 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3702 || TREE_CODE (rhs2
) != INTEGER_CST
3703 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3704 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3706 error ("type mismatch in widening vector shift expression");
3707 debug_generic_expr (lhs_type
);
3708 debug_generic_expr (rhs1_type
);
3709 debug_generic_expr (rhs2_type
);
3719 tree lhs_etype
= lhs_type
;
3720 tree rhs1_etype
= rhs1_type
;
3721 tree rhs2_etype
= rhs2_type
;
3722 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3724 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3725 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3727 error ("invalid non-vector operands to vector valued plus");
3730 lhs_etype
= TREE_TYPE (lhs_type
);
3731 rhs1_etype
= TREE_TYPE (rhs1_type
);
3732 rhs2_etype
= TREE_TYPE (rhs2_type
);
3734 if (POINTER_TYPE_P (lhs_etype
)
3735 || POINTER_TYPE_P (rhs1_etype
)
3736 || POINTER_TYPE_P (rhs2_etype
))
3738 error ("invalid (pointer) operands to plus/minus");
3742 /* Continue with generic binary expression handling. */
3746 case POINTER_PLUS_EXPR
:
3748 if (!POINTER_TYPE_P (rhs1_type
)
3749 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3750 || !ptrofftype_p (rhs2_type
))
3752 error ("type mismatch in pointer plus expression");
3753 debug_generic_stmt (lhs_type
);
3754 debug_generic_stmt (rhs1_type
);
3755 debug_generic_stmt (rhs2_type
);
3762 case TRUTH_ANDIF_EXPR
:
3763 case TRUTH_ORIF_EXPR
:
3764 case TRUTH_AND_EXPR
:
3766 case TRUTH_XOR_EXPR
:
3776 case UNORDERED_EXPR
:
3784 /* Comparisons are also binary, but the result type is not
3785 connected to the operand types. */
3786 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
);
3788 case WIDEN_MULT_EXPR
:
3789 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
3791 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
3792 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
3794 case WIDEN_SUM_EXPR
:
3795 case VEC_WIDEN_MULT_HI_EXPR
:
3796 case VEC_WIDEN_MULT_LO_EXPR
:
3797 case VEC_WIDEN_MULT_EVEN_EXPR
:
3798 case VEC_WIDEN_MULT_ODD_EXPR
:
3799 case VEC_PACK_TRUNC_EXPR
:
3800 case VEC_PACK_SAT_EXPR
:
3801 case VEC_PACK_FIX_TRUNC_EXPR
:
3806 case MULT_HIGHPART_EXPR
:
3807 case TRUNC_DIV_EXPR
:
3809 case FLOOR_DIV_EXPR
:
3810 case ROUND_DIV_EXPR
:
3811 case TRUNC_MOD_EXPR
:
3813 case FLOOR_MOD_EXPR
:
3814 case ROUND_MOD_EXPR
:
3816 case EXACT_DIV_EXPR
:
3822 /* Continue with generic binary expression handling. */
3829 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3830 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3832 error ("type mismatch in binary expression");
3833 debug_generic_stmt (lhs_type
);
3834 debug_generic_stmt (rhs1_type
);
3835 debug_generic_stmt (rhs2_type
);
3842 /* Verify a gimple assignment statement STMT with a ternary rhs.
3843 Returns true if anything is wrong. */
3846 verify_gimple_assign_ternary (gimple stmt
)
3848 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3849 tree lhs
= gimple_assign_lhs (stmt
);
3850 tree lhs_type
= TREE_TYPE (lhs
);
3851 tree rhs1
= gimple_assign_rhs1 (stmt
);
3852 tree rhs1_type
= TREE_TYPE (rhs1
);
3853 tree rhs2
= gimple_assign_rhs2 (stmt
);
3854 tree rhs2_type
= TREE_TYPE (rhs2
);
3855 tree rhs3
= gimple_assign_rhs3 (stmt
);
3856 tree rhs3_type
= TREE_TYPE (rhs3
);
3858 if (!is_gimple_reg (lhs
))
3860 error ("non-register as LHS of ternary operation");
3864 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
3865 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
3866 || !is_gimple_val (rhs2
)
3867 || !is_gimple_val (rhs3
))
3869 error ("invalid operands in ternary operation");
3873 /* First handle operations that involve different types. */
3876 case WIDEN_MULT_PLUS_EXPR
:
3877 case WIDEN_MULT_MINUS_EXPR
:
3878 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3879 && !FIXED_POINT_TYPE_P (rhs1_type
))
3880 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
3881 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
3882 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
3883 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
3885 error ("type mismatch in widening multiply-accumulate expression");
3886 debug_generic_expr (lhs_type
);
3887 debug_generic_expr (rhs1_type
);
3888 debug_generic_expr (rhs2_type
);
3889 debug_generic_expr (rhs3_type
);
3895 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3896 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
3897 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3899 error ("type mismatch in fused multiply-add expression");
3900 debug_generic_expr (lhs_type
);
3901 debug_generic_expr (rhs1_type
);
3902 debug_generic_expr (rhs2_type
);
3903 debug_generic_expr (rhs3_type
);
3910 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
3911 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3913 error ("type mismatch in conditional expression");
3914 debug_generic_expr (lhs_type
);
3915 debug_generic_expr (rhs2_type
);
3916 debug_generic_expr (rhs3_type
);
3922 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3923 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3925 error ("type mismatch in vector permute expression");
3926 debug_generic_expr (lhs_type
);
3927 debug_generic_expr (rhs1_type
);
3928 debug_generic_expr (rhs2_type
);
3929 debug_generic_expr (rhs3_type
);
3933 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3934 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
3935 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
3937 error ("vector types expected in vector permute expression");
3938 debug_generic_expr (lhs_type
);
3939 debug_generic_expr (rhs1_type
);
3940 debug_generic_expr (rhs2_type
);
3941 debug_generic_expr (rhs3_type
);
3945 if (TYPE_VECTOR_SUBPARTS (rhs1_type
) != TYPE_VECTOR_SUBPARTS (rhs2_type
)
3946 || TYPE_VECTOR_SUBPARTS (rhs2_type
)
3947 != TYPE_VECTOR_SUBPARTS (rhs3_type
)
3948 || TYPE_VECTOR_SUBPARTS (rhs3_type
)
3949 != TYPE_VECTOR_SUBPARTS (lhs_type
))
3951 error ("vectors with different element number found "
3952 "in vector permute expression");
3953 debug_generic_expr (lhs_type
);
3954 debug_generic_expr (rhs1_type
);
3955 debug_generic_expr (rhs2_type
);
3956 debug_generic_expr (rhs3_type
);
3960 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
3961 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type
)))
3962 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
))))
3964 error ("invalid mask type in vector permute expression");
3965 debug_generic_expr (lhs_type
);
3966 debug_generic_expr (rhs1_type
);
3967 debug_generic_expr (rhs2_type
);
3968 debug_generic_expr (rhs3_type
);
3975 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
)
3976 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
3977 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
3978 (TYPE_MODE (TREE_TYPE (rhs1_type
))))
3979 > GET_MODE_BITSIZE (GET_MODE_INNER
3980 (TYPE_MODE (TREE_TYPE (lhs_type
)))))
3982 error ("type mismatch in sad expression");
3983 debug_generic_expr (lhs_type
);
3984 debug_generic_expr (rhs1_type
);
3985 debug_generic_expr (rhs2_type
);
3986 debug_generic_expr (rhs3_type
);
3990 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3991 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
3992 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
3994 error ("vector types expected in sad expression");
3995 debug_generic_expr (lhs_type
);
3996 debug_generic_expr (rhs1_type
);
3997 debug_generic_expr (rhs2_type
);
3998 debug_generic_expr (rhs3_type
);
4005 case REALIGN_LOAD_EXPR
:
4015 /* Verify a gimple assignment statement STMT with a single rhs.
4016 Returns true if anything is wrong. */
4019 verify_gimple_assign_single (gimple stmt
)
4021 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4022 tree lhs
= gimple_assign_lhs (stmt
);
4023 tree lhs_type
= TREE_TYPE (lhs
);
4024 tree rhs1
= gimple_assign_rhs1 (stmt
);
4025 tree rhs1_type
= TREE_TYPE (rhs1
);
4028 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
4030 error ("non-trivial conversion at assignment");
4031 debug_generic_expr (lhs_type
);
4032 debug_generic_expr (rhs1_type
);
4036 if (gimple_clobber_p (stmt
)
4037 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
4039 error ("non-decl/MEM_REF LHS in clobber statement");
4040 debug_generic_expr (lhs
);
4044 if (handled_component_p (lhs
)
4045 || TREE_CODE (lhs
) == MEM_REF
4046 || TREE_CODE (lhs
) == TARGET_MEM_REF
)
4047 res
|= verify_types_in_gimple_reference (lhs
, true);
4049 /* Special codes we cannot handle via their class. */
4054 tree op
= TREE_OPERAND (rhs1
, 0);
4055 if (!is_gimple_addressable (op
))
4057 error ("invalid operand in unary expression");
4061 /* Technically there is no longer a need for matching types, but
4062 gimple hygiene asks for this check. In LTO we can end up
4063 combining incompatible units and thus end up with addresses
4064 of globals that change their type to a common one. */
4066 && !types_compatible_p (TREE_TYPE (op
),
4067 TREE_TYPE (TREE_TYPE (rhs1
)))
4068 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
4071 error ("type mismatch in address expression");
4072 debug_generic_stmt (TREE_TYPE (rhs1
));
4073 debug_generic_stmt (TREE_TYPE (op
));
4077 return verify_types_in_gimple_reference (op
, true);
4082 error ("INDIRECT_REF in gimple IL");
4088 case ARRAY_RANGE_REF
:
4089 case VIEW_CONVERT_EXPR
:
4092 case TARGET_MEM_REF
:
4094 if (!is_gimple_reg (lhs
)
4095 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4097 error ("invalid rhs for gimple memory store");
4098 debug_generic_stmt (lhs
);
4099 debug_generic_stmt (rhs1
);
4102 return res
|| verify_types_in_gimple_reference (rhs1
, false);
4114 /* tcc_declaration */
4119 if (!is_gimple_reg (lhs
)
4120 && !is_gimple_reg (rhs1
)
4121 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4123 error ("invalid rhs for gimple memory store");
4124 debug_generic_stmt (lhs
);
4125 debug_generic_stmt (rhs1
);
4131 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
4134 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
4136 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
4138 /* For vector CONSTRUCTORs we require that either it is empty
4139 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4140 (then the element count must be correct to cover the whole
4141 outer vector and index must be NULL on all elements, or it is
4142 a CONSTRUCTOR of scalar elements, where we as an exception allow
4143 smaller number of elements (assuming zero filling) and
4144 consecutive indexes as compared to NULL indexes (such
4145 CONSTRUCTORs can appear in the IL from FEs). */
4146 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4148 if (elt_t
== NULL_TREE
)
4150 elt_t
= TREE_TYPE (elt_v
);
4151 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4153 tree elt_t
= TREE_TYPE (elt_v
);
4154 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4157 error ("incorrect type of vector CONSTRUCTOR"
4159 debug_generic_stmt (rhs1
);
4162 else if (CONSTRUCTOR_NELTS (rhs1
)
4163 * TYPE_VECTOR_SUBPARTS (elt_t
)
4164 != TYPE_VECTOR_SUBPARTS (rhs1_type
))
4166 error ("incorrect number of vector CONSTRUCTOR"
4168 debug_generic_stmt (rhs1
);
4172 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4175 error ("incorrect type of vector CONSTRUCTOR elements");
4176 debug_generic_stmt (rhs1
);
4179 else if (CONSTRUCTOR_NELTS (rhs1
)
4180 > TYPE_VECTOR_SUBPARTS (rhs1_type
))
4182 error ("incorrect number of vector CONSTRUCTOR elements");
4183 debug_generic_stmt (rhs1
);
4187 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4189 error ("incorrect type of vector CONSTRUCTOR elements");
4190 debug_generic_stmt (rhs1
);
4193 if (elt_i
!= NULL_TREE
4194 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4195 || TREE_CODE (elt_i
) != INTEGER_CST
4196 || compare_tree_int (elt_i
, i
) != 0))
4198 error ("vector CONSTRUCTOR with non-NULL element index");
4199 debug_generic_stmt (rhs1
);
4207 case WITH_SIZE_EXPR
:
4217 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4218 is a problem, otherwise false. */
4221 verify_gimple_assign (gimple stmt
)
4223 switch (gimple_assign_rhs_class (stmt
))
4225 case GIMPLE_SINGLE_RHS
:
4226 return verify_gimple_assign_single (stmt
);
4228 case GIMPLE_UNARY_RHS
:
4229 return verify_gimple_assign_unary (stmt
);
4231 case GIMPLE_BINARY_RHS
:
4232 return verify_gimple_assign_binary (stmt
);
4234 case GIMPLE_TERNARY_RHS
:
4235 return verify_gimple_assign_ternary (stmt
);
4242 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4243 is a problem, otherwise false. */
4246 verify_gimple_return (gimple stmt
)
4248 tree op
= gimple_return_retval (stmt
);
4249 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4251 /* We cannot test for present return values as we do not fix up missing
4252 return values from the original source. */
4256 if (!is_gimple_val (op
)
4257 && TREE_CODE (op
) != RESULT_DECL
)
4259 error ("invalid operand in return statement");
4260 debug_generic_stmt (op
);
4264 if ((TREE_CODE (op
) == RESULT_DECL
4265 && DECL_BY_REFERENCE (op
))
4266 || (TREE_CODE (op
) == SSA_NAME
4267 && SSA_NAME_VAR (op
)
4268 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4269 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4270 op
= TREE_TYPE (op
);
4272 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4274 error ("invalid conversion in return statement");
4275 debug_generic_stmt (restype
);
4276 debug_generic_stmt (TREE_TYPE (op
));
4284 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4285 is a problem, otherwise false. */
4288 verify_gimple_goto (gimple stmt
)
4290 tree dest
= gimple_goto_dest (stmt
);
4292 /* ??? We have two canonical forms of direct goto destinations, a
4293 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4294 if (TREE_CODE (dest
) != LABEL_DECL
4295 && (!is_gimple_val (dest
)
4296 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4298 error ("goto destination is neither a label nor a pointer");
4305 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4306 is a problem, otherwise false. */
4309 verify_gimple_switch (gimple stmt
)
4312 tree elt
, prev_upper_bound
= NULL_TREE
;
4313 tree index_type
, elt_type
= NULL_TREE
;
4315 if (!is_gimple_val (gimple_switch_index (stmt
)))
4317 error ("invalid operand to switch statement");
4318 debug_generic_stmt (gimple_switch_index (stmt
));
4322 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4323 if (! INTEGRAL_TYPE_P (index_type
))
4325 error ("non-integral type switch statement");
4326 debug_generic_expr (index_type
);
4330 elt
= gimple_switch_label (stmt
, 0);
4331 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4333 error ("invalid default case label in switch statement");
4334 debug_generic_expr (elt
);
4338 n
= gimple_switch_num_labels (stmt
);
4339 for (i
= 1; i
< n
; i
++)
4341 elt
= gimple_switch_label (stmt
, i
);
4343 if (! CASE_LOW (elt
))
4345 error ("invalid case label in switch statement");
4346 debug_generic_expr (elt
);
4350 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4352 error ("invalid case range in switch statement");
4353 debug_generic_expr (elt
);
4359 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4360 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4362 error ("type mismatch for case label in switch statement");
4363 debug_generic_expr (elt
);
4369 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4370 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4372 error ("type precision mismatch in switch statement");
4377 if (prev_upper_bound
)
4379 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4381 error ("case labels not sorted in switch statement");
4386 prev_upper_bound
= CASE_HIGH (elt
);
4387 if (! prev_upper_bound
)
4388 prev_upper_bound
= CASE_LOW (elt
);
4394 /* Verify a gimple debug statement STMT.
4395 Returns true if anything is wrong. */
4398 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED
)
4400 /* There isn't much that could be wrong in a gimple debug stmt. A
4401 gimple debug bind stmt, for example, maps a tree, that's usually
4402 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4403 component or member of an aggregate type, to another tree, that
4404 can be an arbitrary expression. These stmts expand into debug
4405 insns, and are converted to debug notes by var-tracking.c. */
4409 /* Verify a gimple label statement STMT.
4410 Returns true if anything is wrong. */
4413 verify_gimple_label (gimple stmt
)
4415 tree decl
= gimple_label_label (stmt
);
4419 if (TREE_CODE (decl
) != LABEL_DECL
)
4421 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4422 && DECL_CONTEXT (decl
) != current_function_decl
)
4424 error ("label's context is not the current function decl");
4428 uid
= LABEL_DECL_UID (decl
);
4431 || (*label_to_block_map_for_fn (cfun
))[uid
] != gimple_bb (stmt
)))
4433 error ("incorrect entry in label_to_block_map");
4437 uid
= EH_LANDING_PAD_NR (decl
);
4440 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4441 if (decl
!= lp
->post_landing_pad
)
4443 error ("incorrect setting of landing pad number");
4451 /* Verify the GIMPLE statement STMT. Returns true if there is an
4452 error, otherwise false. */
4455 verify_gimple_stmt (gimple stmt
)
4457 switch (gimple_code (stmt
))
4460 return verify_gimple_assign (stmt
);
4463 return verify_gimple_label (stmt
);
4466 return verify_gimple_call (stmt
);
4469 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4471 error ("invalid comparison code in gimple cond");
4474 if (!(!gimple_cond_true_label (stmt
)
4475 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4476 || !(!gimple_cond_false_label (stmt
)
4477 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4479 error ("invalid labels in gimple cond");
4483 return verify_gimple_comparison (boolean_type_node
,
4484 gimple_cond_lhs (stmt
),
4485 gimple_cond_rhs (stmt
));
4488 return verify_gimple_goto (stmt
);
4491 return verify_gimple_switch (stmt
);
4494 return verify_gimple_return (stmt
);
4499 case GIMPLE_TRANSACTION
:
4500 return verify_gimple_transaction (stmt
);
4502 /* Tuples that do not have tree operands. */
4504 case GIMPLE_PREDICT
:
4506 case GIMPLE_EH_DISPATCH
:
4507 case GIMPLE_EH_MUST_NOT_THROW
:
4511 /* OpenMP directives are validated by the FE and never operated
4512 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4513 non-gimple expressions when the main index variable has had
4514 its address taken. This does not affect the loop itself
4515 because the header of an GIMPLE_OMP_FOR is merely used to determine
4516 how to setup the parallel iteration. */
4520 return verify_gimple_debug (stmt
);
4527 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4528 and false otherwise. */
4531 verify_gimple_phi (gimple phi
)
4535 tree phi_result
= gimple_phi_result (phi
);
4540 error ("invalid PHI result");
4544 virtual_p
= virtual_operand_p (phi_result
);
4545 if (TREE_CODE (phi_result
) != SSA_NAME
4547 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4549 error ("invalid PHI result");
4553 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4555 tree t
= gimple_phi_arg_def (phi
, i
);
4559 error ("missing PHI def");
4563 /* Addressable variables do have SSA_NAMEs but they
4564 are not considered gimple values. */
4565 else if ((TREE_CODE (t
) == SSA_NAME
4566 && virtual_p
!= virtual_operand_p (t
))
4568 && (TREE_CODE (t
) != SSA_NAME
4569 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
4571 && !is_gimple_val (t
)))
4573 error ("invalid PHI argument");
4574 debug_generic_expr (t
);
4577 #ifdef ENABLE_TYPES_CHECKING
4578 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
4580 error ("incompatible types in PHI argument %u", i
);
4581 debug_generic_stmt (TREE_TYPE (phi_result
));
4582 debug_generic_stmt (TREE_TYPE (t
));
4591 /* Verify the GIMPLE statements inside the sequence STMTS. */
4594 verify_gimple_in_seq_2 (gimple_seq stmts
)
4596 gimple_stmt_iterator ittr
;
4599 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
4601 gimple stmt
= gsi_stmt (ittr
);
4603 switch (gimple_code (stmt
))
4606 err
|= verify_gimple_in_seq_2 (gimple_bind_body (stmt
));
4610 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
4611 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
4614 case GIMPLE_EH_FILTER
:
4615 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
4618 case GIMPLE_EH_ELSE
:
4619 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt
));
4620 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt
));
4624 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (stmt
));
4627 case GIMPLE_TRANSACTION
:
4628 err
|= verify_gimple_transaction (stmt
);
4633 bool err2
= verify_gimple_stmt (stmt
);
4635 debug_gimple_stmt (stmt
);
4644 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4645 is a problem, otherwise false. */
4648 verify_gimple_transaction (gimple stmt
)
4650 tree lab
= gimple_transaction_label (stmt
);
4651 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4653 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
4657 /* Verify the GIMPLE statements inside the statement list STMTS. */
4660 verify_gimple_in_seq (gimple_seq stmts
)
4662 timevar_push (TV_TREE_STMT_VERIFY
);
4663 if (verify_gimple_in_seq_2 (stmts
))
4664 internal_error ("verify_gimple failed");
4665 timevar_pop (TV_TREE_STMT_VERIFY
);
4668 /* Return true when the T can be shared. */
4671 tree_node_can_be_shared (tree t
)
4673 if (IS_TYPE_OR_DECL_P (t
)
4674 || is_gimple_min_invariant (t
)
4675 || TREE_CODE (t
) == SSA_NAME
4676 || t
== error_mark_node
4677 || TREE_CODE (t
) == IDENTIFIER_NODE
)
4680 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
4689 /* Called via walk_tree. Verify tree sharing. */
4692 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4694 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
4696 if (tree_node_can_be_shared (*tp
))
4698 *walk_subtrees
= false;
4702 if (pointer_set_insert (visited
, *tp
))
4708 /* Called via walk_gimple_stmt. Verify tree sharing. */
4711 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
4713 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4714 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
4717 static bool eh_error_found
;
4719 verify_eh_throw_stmt_node (void **slot
, void *data
)
4721 struct throw_stmt_node
*node
= (struct throw_stmt_node
*)*slot
;
4722 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
4724 if (!pointer_set_contains (visited
, node
->stmt
))
4726 error ("dead STMT in EH table");
4727 debug_gimple_stmt (node
->stmt
);
4728 eh_error_found
= true;
4733 /* Verify if the location LOCs block is in BLOCKS. */
4736 verify_location (pointer_set_t
*blocks
, location_t loc
)
4738 tree block
= LOCATION_BLOCK (loc
);
4739 if (block
!= NULL_TREE
4740 && !pointer_set_contains (blocks
, block
))
4742 error ("location references block not in block tree");
4745 if (block
!= NULL_TREE
)
4746 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
4750 /* Called via walk_tree. Verify that expressions have no blocks. */
4753 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
4757 *walk_subtrees
= false;
4761 location_t loc
= EXPR_LOCATION (*tp
);
4762 if (LOCATION_BLOCK (loc
) != NULL
)
4768 /* Called via walk_tree. Verify locations of expressions. */
4771 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4773 struct pointer_set_t
*blocks
= (struct pointer_set_t
*) data
;
4775 if (TREE_CODE (*tp
) == VAR_DECL
4776 && DECL_HAS_DEBUG_EXPR_P (*tp
))
4778 tree t
= DECL_DEBUG_EXPR (*tp
);
4779 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4783 if ((TREE_CODE (*tp
) == VAR_DECL
4784 || TREE_CODE (*tp
) == PARM_DECL
4785 || TREE_CODE (*tp
) == RESULT_DECL
)
4786 && DECL_HAS_VALUE_EXPR_P (*tp
))
4788 tree t
= DECL_VALUE_EXPR (*tp
);
4789 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4796 *walk_subtrees
= false;
4800 location_t loc
= EXPR_LOCATION (*tp
);
4801 if (verify_location (blocks
, loc
))
4807 /* Called via walk_gimple_op. Verify locations of expressions. */
4810 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
4812 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4813 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
4816 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4819 collect_subblocks (pointer_set_t
*blocks
, tree block
)
4822 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
4824 pointer_set_insert (blocks
, t
);
4825 collect_subblocks (blocks
, t
);
4829 /* Verify the GIMPLE statements in the CFG of FN. */
4832 verify_gimple_in_cfg (struct function
*fn
, bool verify_nothrow
)
4836 struct pointer_set_t
*visited
, *visited_stmts
, *blocks
;
4838 timevar_push (TV_TREE_STMT_VERIFY
);
4839 visited
= pointer_set_create ();
4840 visited_stmts
= pointer_set_create ();
4842 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4843 blocks
= pointer_set_create ();
4844 if (DECL_INITIAL (fn
->decl
))
4846 pointer_set_insert (blocks
, DECL_INITIAL (fn
->decl
));
4847 collect_subblocks (blocks
, DECL_INITIAL (fn
->decl
));
4850 FOR_EACH_BB_FN (bb
, fn
)
4852 gimple_stmt_iterator gsi
;
4854 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4856 gimple phi
= gsi_stmt (gsi
);
4860 pointer_set_insert (visited_stmts
, phi
);
4862 if (gimple_bb (phi
) != bb
)
4864 error ("gimple_bb (phi) is set to a wrong basic block");
4868 err2
|= verify_gimple_phi (phi
);
4870 /* Only PHI arguments have locations. */
4871 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
4873 error ("PHI node with location");
4877 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4879 tree arg
= gimple_phi_arg_def (phi
, i
);
4880 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
4884 error ("incorrect sharing of tree nodes");
4885 debug_generic_expr (addr
);
4888 location_t loc
= gimple_phi_arg_location (phi
, i
);
4889 if (virtual_operand_p (gimple_phi_result (phi
))
4890 && loc
!= UNKNOWN_LOCATION
)
4892 error ("virtual PHI with argument locations");
4895 addr
= walk_tree (&arg
, verify_expr_location_1
, blocks
, NULL
);
4898 debug_generic_expr (addr
);
4901 err2
|= verify_location (blocks
, loc
);
4905 debug_gimple_stmt (phi
);
4909 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4911 gimple stmt
= gsi_stmt (gsi
);
4913 struct walk_stmt_info wi
;
4917 pointer_set_insert (visited_stmts
, stmt
);
4919 if (gimple_bb (stmt
) != bb
)
4921 error ("gimple_bb (stmt) is set to a wrong basic block");
4925 err2
|= verify_gimple_stmt (stmt
);
4926 err2
|= verify_location (blocks
, gimple_location (stmt
));
4928 memset (&wi
, 0, sizeof (wi
));
4929 wi
.info
= (void *) visited
;
4930 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
4933 error ("incorrect sharing of tree nodes");
4934 debug_generic_expr (addr
);
4938 memset (&wi
, 0, sizeof (wi
));
4939 wi
.info
= (void *) blocks
;
4940 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
4943 debug_generic_expr (addr
);
4947 /* ??? Instead of not checking these stmts at all the walker
4948 should know its context via wi. */
4949 if (!is_gimple_debug (stmt
)
4950 && !is_gimple_omp (stmt
))
4952 memset (&wi
, 0, sizeof (wi
));
4953 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
4956 debug_generic_expr (addr
);
4957 inform (gimple_location (stmt
), "in statement");
4962 /* If the statement is marked as part of an EH region, then it is
4963 expected that the statement could throw. Verify that when we
4964 have optimizations that simplify statements such that we prove
4965 that they cannot throw, that we update other data structures
4967 lp_nr
= lookup_stmt_eh_lp (stmt
);
4970 if (!stmt_could_throw_p (stmt
))
4974 error ("statement marked for throw, but doesn%'t");
4978 else if (!gsi_one_before_end_p (gsi
))
4980 error ("statement marked for throw in middle of block");
4986 debug_gimple_stmt (stmt
);
4991 eh_error_found
= false;
4992 if (get_eh_throw_stmt_table (cfun
))
4993 htab_traverse (get_eh_throw_stmt_table (cfun
),
4994 verify_eh_throw_stmt_node
,
4997 if (err
|| eh_error_found
)
4998 internal_error ("verify_gimple failed");
5000 pointer_set_destroy (visited
);
5001 pointer_set_destroy (visited_stmts
);
5002 pointer_set_destroy (blocks
);
5003 verify_histograms ();
5004 timevar_pop (TV_TREE_STMT_VERIFY
);
5008 /* Verifies that the flow information is OK. */
5011 gimple_verify_flow_info (void)
5015 gimple_stmt_iterator gsi
;
5020 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5021 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5023 error ("ENTRY_BLOCK has IL associated with it");
5027 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5028 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5030 error ("EXIT_BLOCK has IL associated with it");
5034 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5035 if (e
->flags
& EDGE_FALLTHRU
)
5037 error ("fallthru to exit from bb %d", e
->src
->index
);
5041 FOR_EACH_BB_FN (bb
, cfun
)
5043 bool found_ctrl_stmt
= false;
5047 /* Skip labels on the start of basic block. */
5048 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5051 gimple prev_stmt
= stmt
;
5053 stmt
= gsi_stmt (gsi
);
5055 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5058 label
= gimple_label_label (stmt
);
5059 if (prev_stmt
&& DECL_NONLOCAL (label
))
5061 error ("nonlocal label ");
5062 print_generic_expr (stderr
, label
, 0);
5063 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5068 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
5070 error ("EH landing pad label ");
5071 print_generic_expr (stderr
, label
, 0);
5072 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5077 if (label_to_block (label
) != bb
)
5080 print_generic_expr (stderr
, label
, 0);
5081 fprintf (stderr
, " to block does not match in bb %d",
5086 if (decl_function_context (label
) != current_function_decl
)
5089 print_generic_expr (stderr
, label
, 0);
5090 fprintf (stderr
, " has incorrect context in bb %d",
5096 /* Verify that body of basic block BB is free of control flow. */
5097 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5099 gimple stmt
= gsi_stmt (gsi
);
5101 if (found_ctrl_stmt
)
5103 error ("control flow in the middle of basic block %d",
5108 if (stmt_ends_bb_p (stmt
))
5109 found_ctrl_stmt
= true;
5111 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5114 print_generic_expr (stderr
, gimple_label_label (stmt
), 0);
5115 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
5120 gsi
= gsi_last_bb (bb
);
5121 if (gsi_end_p (gsi
))
5124 stmt
= gsi_stmt (gsi
);
5126 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5129 err
|= verify_eh_edges (stmt
);
5131 if (is_ctrl_stmt (stmt
))
5133 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5134 if (e
->flags
& EDGE_FALLTHRU
)
5136 error ("fallthru edge after a control statement in bb %d",
5142 if (gimple_code (stmt
) != GIMPLE_COND
)
5144 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5145 after anything else but if statement. */
5146 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5147 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5149 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5155 switch (gimple_code (stmt
))
5162 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5166 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5167 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5168 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5169 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5170 || EDGE_COUNT (bb
->succs
) >= 3)
5172 error ("wrong outgoing edge flags at end of bb %d",
5180 if (simple_goto_p (stmt
))
5182 error ("explicit goto at end of bb %d", bb
->index
);
5187 /* FIXME. We should double check that the labels in the
5188 destination blocks have their address taken. */
5189 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5190 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5191 | EDGE_FALSE_VALUE
))
5192 || !(e
->flags
& EDGE_ABNORMAL
))
5194 error ("wrong outgoing edge flags at end of bb %d",
5202 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5204 /* ... fallthru ... */
5206 if (!single_succ_p (bb
)
5207 || (single_succ_edge (bb
)->flags
5208 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5209 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5211 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5214 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5216 error ("return edge does not point to exit in bb %d",
5228 n
= gimple_switch_num_labels (stmt
);
5230 /* Mark all the destination basic blocks. */
5231 for (i
= 0; i
< n
; ++i
)
5233 tree lab
= CASE_LABEL (gimple_switch_label (stmt
, i
));
5234 basic_block label_bb
= label_to_block (lab
);
5235 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5236 label_bb
->aux
= (void *)1;
5239 /* Verify that the case labels are sorted. */
5240 prev
= gimple_switch_label (stmt
, 0);
5241 for (i
= 1; i
< n
; ++i
)
5243 tree c
= gimple_switch_label (stmt
, i
);
5246 error ("found default case not at the start of "
5252 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5254 error ("case labels not sorted: ");
5255 print_generic_expr (stderr
, prev
, 0);
5256 fprintf (stderr
," is greater than ");
5257 print_generic_expr (stderr
, c
, 0);
5258 fprintf (stderr
," but comes before it.\n");
5263 /* VRP will remove the default case if it can prove it will
5264 never be executed. So do not verify there always exists
5265 a default case here. */
5267 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5271 error ("extra outgoing edge %d->%d",
5272 bb
->index
, e
->dest
->index
);
5276 e
->dest
->aux
= (void *)2;
5277 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5278 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5280 error ("wrong outgoing edge flags at end of bb %d",
5286 /* Check that we have all of them. */
5287 for (i
= 0; i
< n
; ++i
)
5289 tree lab
= CASE_LABEL (gimple_switch_label (stmt
, i
));
5290 basic_block label_bb
= label_to_block (lab
);
5292 if (label_bb
->aux
!= (void *)2)
5294 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5299 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5300 e
->dest
->aux
= (void *)0;
5304 case GIMPLE_EH_DISPATCH
:
5305 err
|= verify_eh_dispatch_edge (stmt
);
5313 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5314 verify_dominators (CDI_DOMINATORS
);
5320 /* Updates phi nodes after creating a forwarder block joined
5321 by edge FALLTHRU. */
5324 gimple_make_forwarder_block (edge fallthru
)
5328 basic_block dummy
, bb
;
5330 gimple_stmt_iterator gsi
;
5332 dummy
= fallthru
->src
;
5333 bb
= fallthru
->dest
;
5335 if (single_pred_p (bb
))
5338 /* If we redirected a branch we must create new PHI nodes at the
5340 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5342 gimple phi
, new_phi
;
5344 phi
= gsi_stmt (gsi
);
5345 var
= gimple_phi_result (phi
);
5346 new_phi
= create_phi_node (var
, bb
);
5347 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5348 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5352 /* Add the arguments we have stored on edges. */
5353 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5358 flush_pending_stmts (e
);
5363 /* Return a non-special label in the head of basic block BLOCK.
5364 Create one if it doesn't exist. */
5367 gimple_block_label (basic_block bb
)
5369 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5374 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5376 stmt
= gsi_stmt (i
);
5377 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5379 label
= gimple_label_label (stmt
);
5380 if (!DECL_NONLOCAL (label
))
5383 gsi_move_before (&i
, &s
);
5388 label
= create_artificial_label (UNKNOWN_LOCATION
);
5389 stmt
= gimple_build_label (label
);
5390 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5395 /* Attempt to perform edge redirection by replacing a possibly complex
5396 jump instruction by a goto or by removing the jump completely.
5397 This can apply only if all edges now point to the same block. The
5398 parameters and return values are equivalent to
5399 redirect_edge_and_branch. */
5402 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5404 basic_block src
= e
->src
;
5405 gimple_stmt_iterator i
;
5408 /* We can replace or remove a complex jump only when we have exactly
5410 if (EDGE_COUNT (src
->succs
) != 2
5411 /* Verify that all targets will be TARGET. Specifically, the
5412 edge that is not E must also go to TARGET. */
5413 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5416 i
= gsi_last_bb (src
);
5420 stmt
= gsi_stmt (i
);
5422 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5424 gsi_remove (&i
, true);
5425 e
= ssa_redirect_edge (e
, target
);
5426 e
->flags
= EDGE_FALLTHRU
;
5434 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5435 edge representing the redirected branch. */
5438 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5440 basic_block bb
= e
->src
;
5441 gimple_stmt_iterator gsi
;
5445 if (e
->flags
& EDGE_ABNORMAL
)
5448 if (e
->dest
== dest
)
5451 if (e
->flags
& EDGE_EH
)
5452 return redirect_eh_edge (e
, dest
);
5454 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5456 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5461 gsi
= gsi_last_bb (bb
);
5462 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5464 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5467 /* For COND_EXPR, we only need to redirect the edge. */
5471 /* No non-abnormal edges should lead from a non-simple goto, and
5472 simple ones should be represented implicitly. */
5477 tree label
= gimple_block_label (dest
);
5478 tree cases
= get_cases_for_edge (e
, stmt
);
5480 /* If we have a list of cases associated with E, then use it
5481 as it's a lot faster than walking the entire case vector. */
5484 edge e2
= find_edge (e
->src
, dest
);
5491 CASE_LABEL (cases
) = label
;
5492 cases
= CASE_CHAIN (cases
);
5495 /* If there was already an edge in the CFG, then we need
5496 to move all the cases associated with E to E2. */
5499 tree cases2
= get_cases_for_edge (e2
, stmt
);
5501 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5502 CASE_CHAIN (cases2
) = first
;
5504 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5508 size_t i
, n
= gimple_switch_num_labels (stmt
);
5510 for (i
= 0; i
< n
; i
++)
5512 tree elt
= gimple_switch_label (stmt
, i
);
5513 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
5514 CASE_LABEL (elt
) = label
;
5522 int i
, n
= gimple_asm_nlabels (stmt
);
5525 for (i
= 0; i
< n
; ++i
)
5527 tree cons
= gimple_asm_label_op (stmt
, i
);
5528 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
5531 label
= gimple_block_label (dest
);
5532 TREE_VALUE (cons
) = label
;
5536 /* If we didn't find any label matching the former edge in the
5537 asm labels, we must be redirecting the fallthrough
5539 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5544 gsi_remove (&gsi
, true);
5545 e
->flags
|= EDGE_FALLTHRU
;
5548 case GIMPLE_OMP_RETURN
:
5549 case GIMPLE_OMP_CONTINUE
:
5550 case GIMPLE_OMP_SECTIONS_SWITCH
:
5551 case GIMPLE_OMP_FOR
:
5552 /* The edges from OMP constructs can be simply redirected. */
5555 case GIMPLE_EH_DISPATCH
:
5556 if (!(e
->flags
& EDGE_FALLTHRU
))
5557 redirect_eh_dispatch_edge (stmt
, e
, dest
);
5560 case GIMPLE_TRANSACTION
:
5561 /* The ABORT edge has a stored label associated with it, otherwise
5562 the edges are simply redirectable. */
5564 gimple_transaction_set_label (stmt
, gimple_block_label (dest
));
5568 /* Otherwise it must be a fallthru edge, and we don't need to
5569 do anything besides redirecting it. */
5570 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
5574 /* Update/insert PHI nodes as necessary. */
5576 /* Now update the edges in the CFG. */
5577 e
= ssa_redirect_edge (e
, dest
);
5582 /* Returns true if it is possible to remove edge E by redirecting
5583 it to the destination of the other edge from E->src. */
5586 gimple_can_remove_branch_p (const_edge e
)
5588 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
5594 /* Simple wrapper, as we can always redirect fallthru edges. */
5597 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
5599 e
= gimple_redirect_edge_and_branch (e
, dest
);
5606 /* Splits basic block BB after statement STMT (but at least after the
5607 labels). If STMT is NULL, BB is split just after the labels. */
5610 gimple_split_block (basic_block bb
, void *stmt
)
5612 gimple_stmt_iterator gsi
;
5613 gimple_stmt_iterator gsi_tgt
;
5620 new_bb
= create_empty_bb (bb
);
5622 /* Redirect the outgoing edges. */
5623 new_bb
->succs
= bb
->succs
;
5625 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
5628 if (stmt
&& gimple_code ((gimple
) stmt
) == GIMPLE_LABEL
)
5631 /* Move everything from GSI to the new basic block. */
5632 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5634 act
= gsi_stmt (gsi
);
5635 if (gimple_code (act
) == GIMPLE_LABEL
)
5648 if (gsi_end_p (gsi
))
5651 /* Split the statement list - avoid re-creating new containers as this
5652 brings ugly quadratic memory consumption in the inliner.
5653 (We are still quadratic since we need to update stmt BB pointers,
5655 gsi_split_seq_before (&gsi
, &list
);
5656 set_bb_seq (new_bb
, list
);
5657 for (gsi_tgt
= gsi_start (list
);
5658 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
5659 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
5665 /* Moves basic block BB after block AFTER. */
5668 gimple_move_block_after (basic_block bb
, basic_block after
)
5670 if (bb
->prev_bb
== after
)
5674 link_block (bb
, after
);
5680 /* Return TRUE if block BB has no executable statements, otherwise return
5684 gimple_empty_block_p (basic_block bb
)
5686 /* BB must have no executable statements. */
5687 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
5690 if (gsi_end_p (gsi
))
5692 if (is_gimple_debug (gsi_stmt (gsi
)))
5693 gsi_next_nondebug (&gsi
);
5694 return gsi_end_p (gsi
);
5698 /* Split a basic block if it ends with a conditional branch and if the
5699 other part of the block is not empty. */
5702 gimple_split_block_before_cond_jump (basic_block bb
)
5704 gimple last
, split_point
;
5705 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
5706 if (gsi_end_p (gsi
))
5708 last
= gsi_stmt (gsi
);
5709 if (gimple_code (last
) != GIMPLE_COND
5710 && gimple_code (last
) != GIMPLE_SWITCH
)
5712 gsi_prev_nondebug (&gsi
);
5713 split_point
= gsi_stmt (gsi
);
5714 return split_block (bb
, split_point
)->dest
;
5718 /* Return true if basic_block can be duplicated. */
5721 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
5726 /* Create a duplicate of the basic block BB. NOTE: This does not
5727 preserve SSA form. */
5730 gimple_duplicate_bb (basic_block bb
)
5733 gimple_stmt_iterator gsi
, gsi_tgt
;
5734 gimple_seq phis
= phi_nodes (bb
);
5735 gimple phi
, stmt
, copy
;
5737 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
5739 /* Copy the PHI nodes. We ignore PHI node arguments here because
5740 the incoming edges have not been setup yet. */
5741 for (gsi
= gsi_start (phis
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5743 phi
= gsi_stmt (gsi
);
5744 copy
= create_phi_node (NULL_TREE
, new_bb
);
5745 create_new_def_for (gimple_phi_result (phi
), copy
,
5746 gimple_phi_result_ptr (copy
));
5747 gimple_set_uid (copy
, gimple_uid (phi
));
5750 gsi_tgt
= gsi_start_bb (new_bb
);
5751 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5753 def_operand_p def_p
;
5754 ssa_op_iter op_iter
;
5757 stmt
= gsi_stmt (gsi
);
5758 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5761 /* Don't duplicate label debug stmts. */
5762 if (gimple_debug_bind_p (stmt
)
5763 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
5767 /* Create a new copy of STMT and duplicate STMT's virtual
5769 copy
= gimple_copy (stmt
);
5770 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
5772 maybe_duplicate_eh_stmt (copy
, stmt
);
5773 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
5775 /* When copying around a stmt writing into a local non-user
5776 aggregate, make sure it won't share stack slot with other
5778 lhs
= gimple_get_lhs (stmt
);
5779 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
5781 tree base
= get_base_address (lhs
);
5783 && (TREE_CODE (base
) == VAR_DECL
5784 || TREE_CODE (base
) == RESULT_DECL
)
5785 && DECL_IGNORED_P (base
)
5786 && !TREE_STATIC (base
)
5787 && !DECL_EXTERNAL (base
)
5788 && (TREE_CODE (base
) != VAR_DECL
5789 || !DECL_HAS_VALUE_EXPR_P (base
)))
5790 DECL_NONSHAREABLE (base
) = 1;
5793 /* Create new names for all the definitions created by COPY and
5794 add replacement mappings for each new name. */
5795 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
5796 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
5802 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5805 add_phi_args_after_copy_edge (edge e_copy
)
5807 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
5810 gimple phi
, phi_copy
;
5812 gimple_stmt_iterator psi
, psi_copy
;
5814 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
5817 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
5819 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
5820 dest
= get_bb_original (e_copy
->dest
);
5822 dest
= e_copy
->dest
;
5824 e
= find_edge (bb
, dest
);
5827 /* During loop unrolling the target of the latch edge is copied.
5828 In this case we are not looking for edge to dest, but to
5829 duplicated block whose original was dest. */
5830 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5832 if ((e
->dest
->flags
& BB_DUPLICATED
)
5833 && get_bb_original (e
->dest
) == dest
)
5837 gcc_assert (e
!= NULL
);
5840 for (psi
= gsi_start_phis (e
->dest
),
5841 psi_copy
= gsi_start_phis (e_copy
->dest
);
5843 gsi_next (&psi
), gsi_next (&psi_copy
))
5845 phi
= gsi_stmt (psi
);
5846 phi_copy
= gsi_stmt (psi_copy
);
5847 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
5848 add_phi_arg (phi_copy
, def
, e_copy
,
5849 gimple_phi_arg_location_from_edge (phi
, e
));
5854 /* Basic block BB_COPY was created by code duplication. Add phi node
5855 arguments for edges going out of BB_COPY. The blocks that were
5856 duplicated have BB_DUPLICATED set. */
5859 add_phi_args_after_copy_bb (basic_block bb_copy
)
5864 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
5866 add_phi_args_after_copy_edge (e_copy
);
5870 /* Blocks in REGION_COPY array of length N_REGION were created by
5871 duplication of basic blocks. Add phi node arguments for edges
5872 going from these blocks. If E_COPY is not NULL, also add
5873 phi node arguments for its destination.*/
5876 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
5881 for (i
= 0; i
< n_region
; i
++)
5882 region_copy
[i
]->flags
|= BB_DUPLICATED
;
5884 for (i
= 0; i
< n_region
; i
++)
5885 add_phi_args_after_copy_bb (region_copy
[i
]);
5887 add_phi_args_after_copy_edge (e_copy
);
5889 for (i
= 0; i
< n_region
; i
++)
5890 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
5893 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5894 important exit edge EXIT. By important we mean that no SSA name defined
5895 inside region is live over the other exit edges of the region. All entry
5896 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5897 to the duplicate of the region. Dominance and loop information is
5898 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5899 UPDATE_DOMINANCE is false then we assume that the caller will update the
5900 dominance information after calling this function. The new basic
5901 blocks are stored to REGION_COPY in the same order as they had in REGION,
5902 provided that REGION_COPY is not NULL.
5903 The function returns false if it is unable to copy the region,
5907 gimple_duplicate_sese_region (edge entry
, edge exit
,
5908 basic_block
*region
, unsigned n_region
,
5909 basic_block
*region_copy
,
5910 bool update_dominance
)
5913 bool free_region_copy
= false, copying_header
= false;
5914 struct loop
*loop
= entry
->dest
->loop_father
;
5916 vec
<basic_block
> doms
;
5918 int total_freq
= 0, entry_freq
= 0;
5919 gcov_type total_count
= 0, entry_count
= 0;
5921 if (!can_copy_bbs_p (region
, n_region
))
5924 /* Some sanity checking. Note that we do not check for all possible
5925 missuses of the functions. I.e. if you ask to copy something weird,
5926 it will work, but the state of structures probably will not be
5928 for (i
= 0; i
< n_region
; i
++)
5930 /* We do not handle subloops, i.e. all the blocks must belong to the
5932 if (region
[i
]->loop_father
!= loop
)
5935 if (region
[i
] != entry
->dest
5936 && region
[i
] == loop
->header
)
5940 /* In case the function is used for loop header copying (which is the primary
5941 use), ensure that EXIT and its copy will be new latch and entry edges. */
5942 if (loop
->header
== entry
->dest
)
5944 copying_header
= true;
5946 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
5949 for (i
= 0; i
< n_region
; i
++)
5950 if (region
[i
] != exit
->src
5951 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
5955 initialize_original_copy_tables ();
5958 set_loop_copy (loop
, loop_outer (loop
));
5960 set_loop_copy (loop
, loop
);
5964 region_copy
= XNEWVEC (basic_block
, n_region
);
5965 free_region_copy
= true;
5968 /* Record blocks outside the region that are dominated by something
5970 if (update_dominance
)
5973 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
5976 if (entry
->dest
->count
)
5978 total_count
= entry
->dest
->count
;
5979 entry_count
= entry
->count
;
5980 /* Fix up corner cases, to avoid division by zero or creation of negative
5982 if (entry_count
> total_count
)
5983 entry_count
= total_count
;
5987 total_freq
= entry
->dest
->frequency
;
5988 entry_freq
= EDGE_FREQUENCY (entry
);
5989 /* Fix up corner cases, to avoid division by zero or creation of negative
5991 if (total_freq
== 0)
5993 else if (entry_freq
> total_freq
)
5994 entry_freq
= total_freq
;
5997 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
5998 split_edge_bb_loc (entry
), update_dominance
);
6001 scale_bbs_frequencies_gcov_type (region
, n_region
,
6002 total_count
- entry_count
,
6004 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, entry_count
,
6009 scale_bbs_frequencies_int (region
, n_region
, total_freq
- entry_freq
,
6011 scale_bbs_frequencies_int (region_copy
, n_region
, entry_freq
, total_freq
);
6016 loop
->header
= exit
->dest
;
6017 loop
->latch
= exit
->src
;
6020 /* Redirect the entry and add the phi node arguments. */
6021 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
6022 gcc_assert (redirected
!= NULL
);
6023 flush_pending_stmts (entry
);
6025 /* Concerning updating of dominators: We must recount dominators
6026 for entry block and its copy. Anything that is outside of the
6027 region, but was dominated by something inside needs recounting as
6029 if (update_dominance
)
6031 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
6032 doms
.safe_push (get_bb_original (entry
->dest
));
6033 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6037 /* Add the other PHI node arguments. */
6038 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
6040 if (free_region_copy
)
6043 free_original_copy_tables ();
6047 /* Checks if BB is part of the region defined by N_REGION BBS. */
6049 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
6053 for (n
= 0; n
< n_region
; n
++)
6061 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6062 are stored to REGION_COPY in the same order in that they appear
6063 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6064 the region, EXIT an exit from it. The condition guarding EXIT
6065 is moved to ENTRY. Returns true if duplication succeeds, false
6091 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED
, edge exit ATTRIBUTE_UNUSED
,
6092 basic_block
*region ATTRIBUTE_UNUSED
, unsigned n_region ATTRIBUTE_UNUSED
,
6093 basic_block
*region_copy ATTRIBUTE_UNUSED
)
6096 bool free_region_copy
= false;
6097 struct loop
*loop
= exit
->dest
->loop_father
;
6098 struct loop
*orig_loop
= entry
->dest
->loop_father
;
6099 basic_block switch_bb
, entry_bb
, nentry_bb
;
6100 vec
<basic_block
> doms
;
6101 int total_freq
= 0, exit_freq
= 0;
6102 gcov_type total_count
= 0, exit_count
= 0;
6103 edge exits
[2], nexits
[2], e
;
6104 gimple_stmt_iterator gsi
;
6107 basic_block exit_bb
;
6108 gimple_stmt_iterator psi
;
6111 struct loop
*target
, *aloop
, *cloop
;
6113 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
6115 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
6117 if (!can_copy_bbs_p (region
, n_region
))
6120 initialize_original_copy_tables ();
6121 set_loop_copy (orig_loop
, loop
);
6124 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
6126 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
6128 cloop
= duplicate_loop (aloop
, target
);
6129 duplicate_subloops (aloop
, cloop
);
6135 region_copy
= XNEWVEC (basic_block
, n_region
);
6136 free_region_copy
= true;
6139 gcc_assert (!need_ssa_update_p (cfun
));
6141 /* Record blocks outside the region that are dominated by something
6143 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6145 if (exit
->src
->count
)
6147 total_count
= exit
->src
->count
;
6148 exit_count
= exit
->count
;
6149 /* Fix up corner cases, to avoid division by zero or creation of negative
6151 if (exit_count
> total_count
)
6152 exit_count
= total_count
;
6156 total_freq
= exit
->src
->frequency
;
6157 exit_freq
= EDGE_FREQUENCY (exit
);
6158 /* Fix up corner cases, to avoid division by zero or creation of negative
6160 if (total_freq
== 0)
6162 if (exit_freq
> total_freq
)
6163 exit_freq
= total_freq
;
6166 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6167 split_edge_bb_loc (exit
), true);
6170 scale_bbs_frequencies_gcov_type (region
, n_region
,
6171 total_count
- exit_count
,
6173 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, exit_count
,
6178 scale_bbs_frequencies_int (region
, n_region
, total_freq
- exit_freq
,
6180 scale_bbs_frequencies_int (region_copy
, n_region
, exit_freq
, total_freq
);
6183 /* Create the switch block, and put the exit condition to it. */
6184 entry_bb
= entry
->dest
;
6185 nentry_bb
= get_bb_copy (entry_bb
);
6186 if (!last_stmt (entry
->src
)
6187 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6188 switch_bb
= entry
->src
;
6190 switch_bb
= split_edge (entry
);
6191 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6193 gsi
= gsi_last_bb (switch_bb
);
6194 cond_stmt
= last_stmt (exit
->src
);
6195 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6196 cond_stmt
= gimple_copy (cond_stmt
);
6198 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6200 sorig
= single_succ_edge (switch_bb
);
6201 sorig
->flags
= exits
[1]->flags
;
6202 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6204 /* Register the new edge from SWITCH_BB in loop exit lists. */
6205 rescan_loop_exit (snew
, true, false);
6207 /* Add the PHI node arguments. */
6208 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6210 /* Get rid of now superfluous conditions and associated edges (and phi node
6212 exit_bb
= exit
->dest
;
6214 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6215 PENDING_STMT (e
) = NULL
;
6217 /* The latch of ORIG_LOOP was copied, and so was the backedge
6218 to the original header. We redirect this backedge to EXIT_BB. */
6219 for (i
= 0; i
< n_region
; i
++)
6220 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6222 gcc_assert (single_succ_edge (region_copy
[i
]));
6223 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6224 PENDING_STMT (e
) = NULL
;
6225 for (psi
= gsi_start_phis (exit_bb
);
6229 phi
= gsi_stmt (psi
);
6230 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6231 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6234 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6235 PENDING_STMT (e
) = NULL
;
6237 /* Anything that is outside of the region, but was dominated by something
6238 inside needs to update dominance info. */
6239 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6241 /* Update the SSA web. */
6242 update_ssa (TODO_update_ssa
);
6244 if (free_region_copy
)
6247 free_original_copy_tables ();
6251 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6252 adding blocks when the dominator traversal reaches EXIT. This
6253 function silently assumes that ENTRY strictly dominates EXIT. */
6256 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6257 vec
<basic_block
> *bbs_p
)
6261 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6263 son
= next_dom_son (CDI_DOMINATORS
, son
))
6265 bbs_p
->safe_push (son
);
6267 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6271 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6272 The duplicates are recorded in VARS_MAP. */
6275 replace_by_duplicate_decl (tree
*tp
, struct pointer_map_t
*vars_map
,
6278 tree t
= *tp
, new_t
;
6279 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6282 if (DECL_CONTEXT (t
) == to_context
)
6285 loc
= pointer_map_contains (vars_map
, t
);
6289 loc
= pointer_map_insert (vars_map
, t
);
6293 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6294 add_local_decl (f
, new_t
);
6298 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6299 new_t
= copy_node (t
);
6301 DECL_CONTEXT (new_t
) = to_context
;
6306 new_t
= (tree
) *loc
;
6312 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6313 VARS_MAP maps old ssa names and var_decls to the new ones. */
6316 replace_ssa_name (tree name
, struct pointer_map_t
*vars_map
,
6322 gcc_assert (!virtual_operand_p (name
));
6324 loc
= pointer_map_contains (vars_map
, name
);
6328 tree decl
= SSA_NAME_VAR (name
);
6331 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6332 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6333 decl
, SSA_NAME_DEF_STMT (name
));
6334 if (SSA_NAME_IS_DEFAULT_DEF (name
))
6335 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context
),
6339 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6340 name
, SSA_NAME_DEF_STMT (name
));
6342 loc
= pointer_map_insert (vars_map
, name
);
6346 new_name
= (tree
) *loc
;
6357 struct pointer_map_t
*vars_map
;
6358 htab_t new_label_map
;
6359 struct pointer_map_t
*eh_map
;
6363 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6364 contained in *TP if it has been ORIG_BLOCK previously and change the
6365 DECL_CONTEXT of every local variable referenced in *TP. */
6368 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6370 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6371 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6376 tree block
= TREE_BLOCK (t
);
6377 if (block
== p
->orig_block
6378 || (p
->orig_block
== NULL_TREE
6379 && block
!= NULL_TREE
))
6380 TREE_SET_BLOCK (t
, p
->new_block
);
6381 #ifdef ENABLE_CHECKING
6382 else if (block
!= NULL_TREE
)
6384 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6385 block
= BLOCK_SUPERCONTEXT (block
);
6386 gcc_assert (block
== p
->orig_block
);
6390 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6392 if (TREE_CODE (t
) == SSA_NAME
)
6393 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6394 else if (TREE_CODE (t
) == LABEL_DECL
)
6396 if (p
->new_label_map
)
6398 struct tree_map in
, *out
;
6400 out
= (struct tree_map
*)
6401 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6406 DECL_CONTEXT (t
) = p
->to_context
;
6408 else if (p
->remap_decls_p
)
6410 /* Replace T with its duplicate. T should no longer appear in the
6411 parent function, so this looks wasteful; however, it may appear
6412 in referenced_vars, and more importantly, as virtual operands of
6413 statements, and in alias lists of other variables. It would be
6414 quite difficult to expunge it from all those places. ??? It might
6415 suffice to do this for addressable variables. */
6416 if ((TREE_CODE (t
) == VAR_DECL
6417 && !is_global_var (t
))
6418 || TREE_CODE (t
) == CONST_DECL
)
6419 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6423 else if (TYPE_P (t
))
6429 /* Helper for move_stmt_r. Given an EH region number for the source
6430 function, map that to the duplicate EH regio number in the dest. */
6433 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6435 eh_region old_r
, new_r
;
6438 old_r
= get_eh_region_from_number (old_nr
);
6439 slot
= pointer_map_contains (p
->eh_map
, old_r
);
6440 new_r
= (eh_region
) *slot
;
6442 return new_r
->index
;
6445 /* Similar, but operate on INTEGER_CSTs. */
6448 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6452 old_nr
= tree_to_shwi (old_t_nr
);
6453 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6455 return build_int_cst (integer_type_node
, new_nr
);
6458 /* Like move_stmt_op, but for gimple statements.
6460 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6461 contained in the current statement in *GSI_P and change the
6462 DECL_CONTEXT of every local variable referenced in the current
6466 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6467 struct walk_stmt_info
*wi
)
6469 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6470 gimple stmt
= gsi_stmt (*gsi_p
);
6471 tree block
= gimple_block (stmt
);
6473 if (block
== p
->orig_block
6474 || (p
->orig_block
== NULL_TREE
6475 && block
!= NULL_TREE
))
6476 gimple_set_block (stmt
, p
->new_block
);
6478 switch (gimple_code (stmt
))
6481 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6483 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6484 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
6485 switch (DECL_FUNCTION_CODE (fndecl
))
6487 case BUILT_IN_EH_COPY_VALUES
:
6488 r
= gimple_call_arg (stmt
, 1);
6489 r
= move_stmt_eh_region_tree_nr (r
, p
);
6490 gimple_call_set_arg (stmt
, 1, r
);
6493 case BUILT_IN_EH_POINTER
:
6494 case BUILT_IN_EH_FILTER
:
6495 r
= gimple_call_arg (stmt
, 0);
6496 r
= move_stmt_eh_region_tree_nr (r
, p
);
6497 gimple_call_set_arg (stmt
, 0, r
);
6508 int r
= gimple_resx_region (stmt
);
6509 r
= move_stmt_eh_region_nr (r
, p
);
6510 gimple_resx_set_region (stmt
, r
);
6514 case GIMPLE_EH_DISPATCH
:
6516 int r
= gimple_eh_dispatch_region (stmt
);
6517 r
= move_stmt_eh_region_nr (r
, p
);
6518 gimple_eh_dispatch_set_region (stmt
, r
);
6522 case GIMPLE_OMP_RETURN
:
6523 case GIMPLE_OMP_CONTINUE
:
6526 if (is_gimple_omp (stmt
))
6528 /* Do not remap variables inside OMP directives. Variables
6529 referenced in clauses and directive header belong to the
6530 parent function and should not be moved into the child
6532 bool save_remap_decls_p
= p
->remap_decls_p
;
6533 p
->remap_decls_p
= false;
6534 *handled_ops_p
= true;
6536 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
6539 p
->remap_decls_p
= save_remap_decls_p
;
6547 /* Move basic block BB from function CFUN to function DEST_FN. The
6548 block is moved out of the original linked list and placed after
6549 block AFTER in the new list. Also, the block is removed from the
6550 original array of blocks and placed in DEST_FN's array of blocks.
6551 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6552 updated to reflect the moved edges.
6554 The local variables are remapped to new instances, VARS_MAP is used
6555 to record the mapping. */
6558 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
6559 basic_block after
, bool update_edge_count_p
,
6560 struct move_stmt_d
*d
)
6562 struct control_flow_graph
*cfg
;
6565 gimple_stmt_iterator si
;
6566 unsigned old_len
, new_len
;
6568 /* Remove BB from dominance structures. */
6569 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
6571 /* Move BB from its current loop to the copy in the new function. */
6574 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
6576 bb
->loop_father
= new_loop
;
6579 /* Link BB to the new linked list. */
6580 move_block_after (bb
, after
);
6582 /* Update the edge count in the corresponding flowgraphs. */
6583 if (update_edge_count_p
)
6584 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6586 cfun
->cfg
->x_n_edges
--;
6587 dest_cfun
->cfg
->x_n_edges
++;
6590 /* Remove BB from the original basic block array. */
6591 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
6592 cfun
->cfg
->x_n_basic_blocks
--;
6594 /* Grow DEST_CFUN's basic block array if needed. */
6595 cfg
= dest_cfun
->cfg
;
6596 cfg
->x_n_basic_blocks
++;
6597 if (bb
->index
>= cfg
->x_last_basic_block
)
6598 cfg
->x_last_basic_block
= bb
->index
+ 1;
6600 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
6601 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
6603 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
6604 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
6607 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
6609 /* Remap the variables in phi nodes. */
6610 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); )
6612 gimple phi
= gsi_stmt (si
);
6614 tree op
= PHI_RESULT (phi
);
6618 if (virtual_operand_p (op
))
6620 /* Remove the phi nodes for virtual operands (alias analysis will be
6621 run for the new function, anyway). */
6622 remove_phi_node (&si
, true);
6626 SET_PHI_RESULT (phi
,
6627 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6628 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
6630 op
= USE_FROM_PTR (use
);
6631 if (TREE_CODE (op
) == SSA_NAME
)
6632 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6635 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
6637 location_t locus
= gimple_phi_arg_location (phi
, i
);
6638 tree block
= LOCATION_BLOCK (locus
);
6640 if (locus
== UNKNOWN_LOCATION
)
6642 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
6644 if (d
->new_block
== NULL_TREE
)
6645 locus
= LOCATION_LOCUS (locus
);
6647 locus
= COMBINE_LOCATION_DATA (line_table
, locus
, d
->new_block
);
6648 gimple_phi_arg_set_location (phi
, i
, locus
);
6655 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6657 gimple stmt
= gsi_stmt (si
);
6658 struct walk_stmt_info wi
;
6660 memset (&wi
, 0, sizeof (wi
));
6662 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
6664 if (gimple_code (stmt
) == GIMPLE_LABEL
)
6666 tree label
= gimple_label_label (stmt
);
6667 int uid
= LABEL_DECL_UID (label
);
6669 gcc_assert (uid
> -1);
6671 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
6672 if (old_len
<= (unsigned) uid
)
6674 new_len
= 3 * uid
/ 2 + 1;
6675 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
6678 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
6679 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
6681 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
6683 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
6684 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
6687 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
6688 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
6690 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
6691 gimple_remove_stmt_histograms (cfun
, stmt
);
6693 /* We cannot leave any operands allocated from the operand caches of
6694 the current function. */
6695 free_stmt_operands (cfun
, stmt
);
6696 push_cfun (dest_cfun
);
6701 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6702 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
6704 tree block
= LOCATION_BLOCK (e
->goto_locus
);
6705 if (d
->orig_block
== NULL_TREE
6706 || block
== d
->orig_block
)
6707 e
->goto_locus
= d
->new_block
?
6708 COMBINE_LOCATION_DATA (line_table
, e
->goto_locus
, d
->new_block
) :
6709 LOCATION_LOCUS (e
->goto_locus
);
6713 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6714 the outermost EH region. Use REGION as the incoming base EH region. */
6717 find_outermost_region_in_block (struct function
*src_cfun
,
6718 basic_block bb
, eh_region region
)
6720 gimple_stmt_iterator si
;
6722 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6724 gimple stmt
= gsi_stmt (si
);
6725 eh_region stmt_region
;
6728 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
6729 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
6733 region
= stmt_region
;
6734 else if (stmt_region
!= region
)
6736 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
6737 gcc_assert (region
!= NULL
);
6746 new_label_mapper (tree decl
, void *data
)
6748 htab_t hash
= (htab_t
) data
;
6752 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
6754 m
= XNEW (struct tree_map
);
6755 m
->hash
= DECL_UID (decl
);
6756 m
->base
.from
= decl
;
6757 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
6758 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
6759 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
6760 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
6762 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
6763 gcc_assert (*slot
== NULL
);
6770 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6774 replace_block_vars_by_duplicates (tree block
, struct pointer_map_t
*vars_map
,
6779 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
6782 if (TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != CONST_DECL
)
6784 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
6787 if (TREE_CODE (*tp
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (*tp
))
6789 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (*tp
));
6790 DECL_HAS_VALUE_EXPR_P (t
) = 1;
6792 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
6797 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
6798 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
6801 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6805 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
6808 /* Discard it from the old loop array. */
6809 (*get_loops (fn1
))[loop
->num
] = NULL
;
6811 /* Place it in the new loop array, assigning it a new number. */
6812 loop
->num
= number_of_loops (fn2
);
6813 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
6815 /* Recurse to children. */
6816 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
6817 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
6820 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6821 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6822 single basic block in the original CFG and the new basic block is
6823 returned. DEST_CFUN must not have a CFG yet.
6825 Note that the region need not be a pure SESE region. Blocks inside
6826 the region may contain calls to abort/exit. The only restriction
6827 is that ENTRY_BB should be the only entry point and it must
6830 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6831 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6832 to the new function.
6834 All local variables referenced in the region are assumed to be in
6835 the corresponding BLOCK_VARS and unexpanded variable lists
6836 associated with DEST_CFUN. */
6839 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
6840 basic_block exit_bb
, tree orig_block
)
6842 vec
<basic_block
> bbs
, dom_bbs
;
6843 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
6844 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
6845 struct function
*saved_cfun
= cfun
;
6846 int *entry_flag
, *exit_flag
;
6847 unsigned *entry_prob
, *exit_prob
;
6848 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
6851 htab_t new_label_map
;
6852 struct pointer_map_t
*vars_map
, *eh_map
;
6853 struct loop
*loop
= entry_bb
->loop_father
;
6854 struct loop
*loop0
= get_loop (saved_cfun
, 0);
6855 struct move_stmt_d d
;
6857 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6859 gcc_assert (entry_bb
!= exit_bb
6861 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
6863 /* Collect all the blocks in the region. Manually add ENTRY_BB
6864 because it won't be added by dfs_enumerate_from. */
6866 bbs
.safe_push (entry_bb
);
6867 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
6869 /* The blocks that used to be dominated by something in BBS will now be
6870 dominated by the new block. */
6871 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
6875 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6876 the predecessor edges to ENTRY_BB and the successor edges to
6877 EXIT_BB so that we can re-attach them to the new basic block that
6878 will replace the region. */
6879 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
6880 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
6881 entry_flag
= XNEWVEC (int, num_entry_edges
);
6882 entry_prob
= XNEWVEC (unsigned, num_entry_edges
);
6884 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
6886 entry_prob
[i
] = e
->probability
;
6887 entry_flag
[i
] = e
->flags
;
6888 entry_pred
[i
++] = e
->src
;
6894 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
6895 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
6896 exit_flag
= XNEWVEC (int, num_exit_edges
);
6897 exit_prob
= XNEWVEC (unsigned, num_exit_edges
);
6899 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
6901 exit_prob
[i
] = e
->probability
;
6902 exit_flag
[i
] = e
->flags
;
6903 exit_succ
[i
++] = e
->dest
;
6915 /* Switch context to the child function to initialize DEST_FN's CFG. */
6916 gcc_assert (dest_cfun
->cfg
== NULL
);
6917 push_cfun (dest_cfun
);
6919 init_empty_tree_cfg ();
6921 /* Initialize EH information for the new function. */
6923 new_label_map
= NULL
;
6926 eh_region region
= NULL
;
6928 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6929 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
6931 init_eh_for_function ();
6934 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
6935 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
6936 new_label_mapper
, new_label_map
);
6940 /* Initialize an empty loop tree. */
6941 struct loops
*loops
= ggc_cleared_alloc
<struct loops
> ();
6942 init_loops_structure (dest_cfun
, loops
, 1);
6943 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
6944 set_loops_for_fn (dest_cfun
, loops
);
6946 /* Move the outlined loop tree part. */
6947 num_nodes
= bbs
.length ();
6948 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6950 if (bb
->loop_father
->header
== bb
)
6952 struct loop
*this_loop
= bb
->loop_father
;
6953 struct loop
*outer
= loop_outer (this_loop
);
6955 /* If the SESE region contains some bbs ending with
6956 a noreturn call, those are considered to belong
6957 to the outermost loop in saved_cfun, rather than
6958 the entry_bb's loop_father. */
6962 num_nodes
-= this_loop
->num_nodes
;
6963 flow_loop_tree_node_remove (bb
->loop_father
);
6964 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
6965 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
6968 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
6971 /* Remove loop exits from the outlined region. */
6972 if (loops_for_fn (saved_cfun
)->exits
)
6973 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6975 void **slot
= htab_find_slot_with_hash
6976 (loops_for_fn (saved_cfun
)->exits
, e
,
6977 htab_hash_pointer (e
), NO_INSERT
);
6979 htab_clear_slot (loops_for_fn (saved_cfun
)->exits
, slot
);
6984 /* Adjust the number of blocks in the tree root of the outlined part. */
6985 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
6987 /* Setup a mapping to be used by move_block_to_fn. */
6988 loop
->aux
= current_loops
->tree_root
;
6989 loop0
->aux
= current_loops
->tree_root
;
6993 /* Move blocks from BBS into DEST_CFUN. */
6994 gcc_assert (bbs
.length () >= 2);
6995 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
6996 vars_map
= pointer_map_create ();
6998 memset (&d
, 0, sizeof (d
));
6999 d
.orig_block
= orig_block
;
7000 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
7001 d
.from_context
= cfun
->decl
;
7002 d
.to_context
= dest_cfun
->decl
;
7003 d
.vars_map
= vars_map
;
7004 d
.new_label_map
= new_label_map
;
7006 d
.remap_decls_p
= true;
7008 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7010 /* No need to update edge counts on the last block. It has
7011 already been updated earlier when we detached the region from
7012 the original CFG. */
7013 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
7019 /* Loop sizes are no longer correct, fix them up. */
7020 loop
->num_nodes
-= num_nodes
;
7021 for (struct loop
*outer
= loop_outer (loop
);
7022 outer
; outer
= loop_outer (outer
))
7023 outer
->num_nodes
-= num_nodes
;
7024 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
7026 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vectorize_loops
)
7029 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
7034 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
7036 dest_cfun
->has_simduid_loops
= true;
7038 if (aloop
->force_vectorize
)
7039 dest_cfun
->has_force_vectorize_loops
= true;
7043 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7047 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7049 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7050 = BLOCK_SUBBLOCKS (orig_block
);
7051 for (block
= BLOCK_SUBBLOCKS (orig_block
);
7052 block
; block
= BLOCK_CHAIN (block
))
7053 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
7054 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
7057 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
7058 vars_map
, dest_cfun
->decl
);
7061 htab_delete (new_label_map
);
7063 pointer_map_destroy (eh_map
);
7064 pointer_map_destroy (vars_map
);
7066 /* Rewire the entry and exit blocks. The successor to the entry
7067 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7068 the child function. Similarly, the predecessor of DEST_FN's
7069 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7070 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7071 various CFG manipulation function get to the right CFG.
7073 FIXME, this is silly. The CFG ought to become a parameter to
7075 push_cfun (dest_cfun
);
7076 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
7078 make_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
7081 /* Back in the original function, the SESE region has disappeared,
7082 create a new basic block in its place. */
7083 bb
= create_empty_bb (entry_pred
[0]);
7085 add_bb_to_loop (bb
, loop
);
7086 for (i
= 0; i
< num_entry_edges
; i
++)
7088 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
7089 e
->probability
= entry_prob
[i
];
7092 for (i
= 0; i
< num_exit_edges
; i
++)
7094 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
7095 e
->probability
= exit_prob
[i
];
7098 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
7099 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
7100 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
7118 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7122 dump_function_to_file (tree fndecl
, FILE *file
, int flags
)
7124 tree arg
, var
, old_current_fndecl
= current_function_decl
;
7125 struct function
*dsf
;
7126 bool ignore_topmost_bind
= false, any_var
= false;
7129 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
7130 && decl_is_tm_clone (fndecl
));
7131 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
7133 current_function_decl
= fndecl
;
7134 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
7136 arg
= DECL_ARGUMENTS (fndecl
);
7139 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
7140 fprintf (file
, " ");
7141 print_generic_expr (file
, arg
, dump_flags
);
7142 if (flags
& TDF_VERBOSE
)
7143 print_node (file
, "", arg
, 4);
7144 if (DECL_CHAIN (arg
))
7145 fprintf (file
, ", ");
7146 arg
= DECL_CHAIN (arg
);
7148 fprintf (file
, ")\n");
7150 if (flags
& TDF_VERBOSE
)
7151 print_node (file
, "", fndecl
, 2);
7153 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
7154 if (dsf
&& (flags
& TDF_EH
))
7155 dump_eh_tree (file
, dsf
);
7157 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
7159 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
7160 current_function_decl
= old_current_fndecl
;
7164 /* When GIMPLE is lowered, the variables are no longer available in
7165 BIND_EXPRs, so display them separately. */
7166 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
7169 ignore_topmost_bind
= true;
7171 fprintf (file
, "{\n");
7172 if (!vec_safe_is_empty (fun
->local_decls
))
7173 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7175 print_generic_decl (file
, var
, flags
);
7176 if (flags
& TDF_VERBOSE
)
7177 print_node (file
, "", var
, 4);
7178 fprintf (file
, "\n");
7182 if (gimple_in_ssa_p (cfun
))
7183 for (ix
= 1; ix
< num_ssa_names
; ++ix
)
7185 tree name
= ssa_name (ix
);
7186 if (name
&& !SSA_NAME_VAR (name
))
7188 fprintf (file
, " ");
7189 print_generic_expr (file
, TREE_TYPE (name
), flags
);
7190 fprintf (file
, " ");
7191 print_generic_expr (file
, name
, flags
);
7192 fprintf (file
, ";\n");
7199 if (fun
&& fun
->decl
== fndecl
7201 && basic_block_info_for_fn (fun
))
7203 /* If the CFG has been built, emit a CFG-based dump. */
7204 if (!ignore_topmost_bind
)
7205 fprintf (file
, "{\n");
7207 if (any_var
&& n_basic_blocks_for_fn (fun
))
7208 fprintf (file
, "\n");
7210 FOR_EACH_BB_FN (bb
, fun
)
7211 dump_bb (file
, bb
, 2, flags
| TDF_COMMENT
);
7213 fprintf (file
, "}\n");
7215 else if (DECL_SAVED_TREE (fndecl
) == NULL
)
7217 /* The function is now in GIMPLE form but the CFG has not been
7218 built yet. Emit the single sequence of GIMPLE statements
7219 that make up its body. */
7220 gimple_seq body
= gimple_body (fndecl
);
7222 if (gimple_seq_first_stmt (body
)
7223 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
7224 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
7225 print_gimple_seq (file
, body
, 0, flags
);
7228 if (!ignore_topmost_bind
)
7229 fprintf (file
, "{\n");
7232 fprintf (file
, "\n");
7234 print_gimple_seq (file
, body
, 2, flags
);
7235 fprintf (file
, "}\n");
7242 /* Make a tree based dump. */
7243 chain
= DECL_SAVED_TREE (fndecl
);
7244 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
7246 if (ignore_topmost_bind
)
7248 chain
= BIND_EXPR_BODY (chain
);
7256 if (!ignore_topmost_bind
)
7257 fprintf (file
, "{\n");
7262 fprintf (file
, "\n");
7264 print_generic_stmt_indented (file
, chain
, flags
, indent
);
7265 if (ignore_topmost_bind
)
7266 fprintf (file
, "}\n");
7269 if (flags
& TDF_ENUMERATE_LOCALS
)
7270 dump_enumerated_decls (file
, flags
);
7271 fprintf (file
, "\n\n");
7273 current_function_decl
= old_current_fndecl
;
7276 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7279 debug_function (tree fn
, int flags
)
7281 dump_function_to_file (fn
, stderr
, flags
);
7285 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7288 print_pred_bbs (FILE *file
, basic_block bb
)
7293 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7294 fprintf (file
, "bb_%d ", e
->src
->index
);
7298 /* Print on FILE the indexes for the successors of basic_block BB. */
7301 print_succ_bbs (FILE *file
, basic_block bb
)
7306 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7307 fprintf (file
, "bb_%d ", e
->dest
->index
);
7310 /* Print to FILE the basic block BB following the VERBOSITY level. */
7313 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
7315 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
7316 memset ((void *) s_indent
, ' ', (size_t) indent
);
7317 s_indent
[indent
] = '\0';
7319 /* Print basic_block's header. */
7322 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
7323 print_pred_bbs (file
, bb
);
7324 fprintf (file
, "}, succs = {");
7325 print_succ_bbs (file
, bb
);
7326 fprintf (file
, "})\n");
7329 /* Print basic_block's body. */
7332 fprintf (file
, "%s {\n", s_indent
);
7333 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
7334 fprintf (file
, "%s }\n", s_indent
);
7338 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
7340 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7341 VERBOSITY level this outputs the contents of the loop, or just its
7345 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
7353 s_indent
= (char *) alloca ((size_t) indent
+ 1);
7354 memset ((void *) s_indent
, ' ', (size_t) indent
);
7355 s_indent
[indent
] = '\0';
7357 /* Print loop's header. */
7358 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
7360 fprintf (file
, "header = %d", loop
->header
->index
);
7363 fprintf (file
, "deleted)\n");
7367 fprintf (file
, ", latch = %d", loop
->latch
->index
);
7369 fprintf (file
, ", multiple latches");
7370 fprintf (file
, ", niter = ");
7371 print_generic_expr (file
, loop
->nb_iterations
, 0);
7373 if (loop
->any_upper_bound
)
7375 fprintf (file
, ", upper_bound = ");
7376 print_decu (loop
->nb_iterations_upper_bound
, file
);
7379 if (loop
->any_estimate
)
7381 fprintf (file
, ", estimate = ");
7382 print_decu (loop
->nb_iterations_estimate
, file
);
7384 fprintf (file
, ")\n");
7386 /* Print loop's body. */
7389 fprintf (file
, "%s{\n", s_indent
);
7390 FOR_EACH_BB_FN (bb
, cfun
)
7391 if (bb
->loop_father
== loop
)
7392 print_loops_bb (file
, bb
, indent
, verbosity
);
7394 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
7395 fprintf (file
, "%s}\n", s_indent
);
7399 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7400 spaces. Following VERBOSITY level this outputs the contents of the
7401 loop, or just its structure. */
7404 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
7410 print_loop (file
, loop
, indent
, verbosity
);
7411 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
7414 /* Follow a CFG edge from the entry point of the program, and on entry
7415 of a loop, pretty print the loop structure on FILE. */
7418 print_loops (FILE *file
, int verbosity
)
7422 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
7423 if (bb
&& bb
->loop_father
)
7424 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
7430 debug (struct loop
&ref
)
7432 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
7436 debug (struct loop
*ptr
)
7441 fprintf (stderr
, "<nil>\n");
7444 /* Dump a loop verbosely. */
7447 debug_verbose (struct loop
&ref
)
7449 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
7453 debug_verbose (struct loop
*ptr
)
7458 fprintf (stderr
, "<nil>\n");
7462 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7465 debug_loops (int verbosity
)
7467 print_loops (stderr
, verbosity
);
7470 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7473 debug_loop (struct loop
*loop
, int verbosity
)
7475 print_loop (stderr
, loop
, 0, verbosity
);
7478 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7482 debug_loop_num (unsigned num
, int verbosity
)
7484 debug_loop (get_loop (cfun
, num
), verbosity
);
7487 /* Return true if BB ends with a call, possibly followed by some
7488 instructions that must stay with the call. Return false,
7492 gimple_block_ends_with_call_p (basic_block bb
)
7494 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7495 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
7499 /* Return true if BB ends with a conditional branch. Return false,
7503 gimple_block_ends_with_condjump_p (const_basic_block bb
)
7505 gimple stmt
= last_stmt (CONST_CAST_BB (bb
));
7506 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
7510 /* Return true if we need to add fake edge to exit at statement T.
7511 Helper function for gimple_flow_call_edges_add. */
7514 need_fake_edge_p (gimple t
)
7516 tree fndecl
= NULL_TREE
;
7519 /* NORETURN and LONGJMP calls already have an edge to exit.
7520 CONST and PURE calls do not need one.
7521 We don't currently check for CONST and PURE here, although
7522 it would be a good idea, because those attributes are
7523 figured out from the RTL in mark_constant_function, and
7524 the counter incrementation code from -fprofile-arcs
7525 leads to different results from -fbranch-probabilities. */
7526 if (is_gimple_call (t
))
7528 fndecl
= gimple_call_fndecl (t
);
7529 call_flags
= gimple_call_flags (t
);
7532 if (is_gimple_call (t
)
7534 && DECL_BUILT_IN (fndecl
)
7535 && (call_flags
& ECF_NOTHROW
)
7536 && !(call_flags
& ECF_RETURNS_TWICE
)
7537 /* fork() doesn't really return twice, but the effect of
7538 wrapping it in __gcov_fork() which calls __gcov_flush()
7539 and clears the counters before forking has the same
7540 effect as returning twice. Force a fake edge. */
7541 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7542 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
7545 if (is_gimple_call (t
))
7551 if (!(call_flags
& ECF_NORETURN
))
7555 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7556 if ((e
->flags
& EDGE_FAKE
) == 0)
7560 if (gimple_code (t
) == GIMPLE_ASM
7561 && (gimple_asm_volatile_p (t
) || gimple_asm_input_p (t
)))
7568 /* Add fake edges to the function exit for any non constant and non
7569 noreturn calls (or noreturn calls with EH/abnormal edges),
7570 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7571 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7574 The goal is to expose cases in which entering a basic block does
7575 not imply that all subsequent instructions must be executed. */
7578 gimple_flow_call_edges_add (sbitmap blocks
)
7581 int blocks_split
= 0;
7582 int last_bb
= last_basic_block_for_fn (cfun
);
7583 bool check_last_block
= false;
7585 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
7589 check_last_block
= true;
7591 check_last_block
= bitmap_bit_p (blocks
,
7592 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
7594 /* In the last basic block, before epilogue generation, there will be
7595 a fallthru edge to EXIT. Special care is required if the last insn
7596 of the last basic block is a call because make_edge folds duplicate
7597 edges, which would result in the fallthru edge also being marked
7598 fake, which would result in the fallthru edge being removed by
7599 remove_fake_edges, which would result in an invalid CFG.
7601 Moreover, we can't elide the outgoing fake edge, since the block
7602 profiler needs to take this into account in order to solve the minimal
7603 spanning tree in the case that the call doesn't return.
7605 Handle this by adding a dummy instruction in a new last basic block. */
7606 if (check_last_block
)
7608 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
7609 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7612 if (!gsi_end_p (gsi
))
7615 if (t
&& need_fake_edge_p (t
))
7619 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7622 gsi_insert_on_edge (e
, gimple_build_nop ());
7623 gsi_commit_edge_inserts ();
7628 /* Now add fake edges to the function exit for any non constant
7629 calls since there is no way that we can determine if they will
7631 for (i
= 0; i
< last_bb
; i
++)
7633 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7634 gimple_stmt_iterator gsi
;
7635 gimple stmt
, last_stmt
;
7640 if (blocks
&& !bitmap_bit_p (blocks
, i
))
7643 gsi
= gsi_last_nondebug_bb (bb
);
7644 if (!gsi_end_p (gsi
))
7646 last_stmt
= gsi_stmt (gsi
);
7649 stmt
= gsi_stmt (gsi
);
7650 if (need_fake_edge_p (stmt
))
7654 /* The handling above of the final block before the
7655 epilogue should be enough to verify that there is
7656 no edge to the exit block in CFG already.
7657 Calling make_edge in such case would cause us to
7658 mark that edge as fake and remove it later. */
7659 #ifdef ENABLE_CHECKING
7660 if (stmt
== last_stmt
)
7662 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7663 gcc_assert (e
== NULL
);
7667 /* Note that the following may create a new basic block
7668 and renumber the existing basic blocks. */
7669 if (stmt
!= last_stmt
)
7671 e
= split_block (bb
, stmt
);
7675 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
7679 while (!gsi_end_p (gsi
));
7684 verify_flow_info ();
7686 return blocks_split
;
7689 /* Removes edge E and all the blocks dominated by it, and updates dominance
7690 information. The IL in E->src needs to be updated separately.
7691 If dominance info is not available, only the edge E is removed.*/
7694 remove_edge_and_dominated_blocks (edge e
)
7696 vec
<basic_block
> bbs_to_remove
= vNULL
;
7697 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
7701 bool none_removed
= false;
7703 basic_block bb
, dbb
;
7706 if (!dom_info_available_p (CDI_DOMINATORS
))
7712 /* No updating is needed for edges to exit. */
7713 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7715 if (cfgcleanup_altered_bbs
)
7716 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7721 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7722 that is not dominated by E->dest, then this set is empty. Otherwise,
7723 all the basic blocks dominated by E->dest are removed.
7725 Also, to DF_IDOM we store the immediate dominators of the blocks in
7726 the dominance frontier of E (i.e., of the successors of the
7727 removed blocks, if there are any, and of E->dest otherwise). */
7728 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
7733 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
7735 none_removed
= true;
7740 df
= BITMAP_ALLOC (NULL
);
7741 df_idom
= BITMAP_ALLOC (NULL
);
7744 bitmap_set_bit (df_idom
,
7745 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
7748 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
7749 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7751 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
7753 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
7754 bitmap_set_bit (df
, f
->dest
->index
);
7757 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7758 bitmap_clear_bit (df
, bb
->index
);
7760 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
7762 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7763 bitmap_set_bit (df_idom
,
7764 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
7768 if (cfgcleanup_altered_bbs
)
7770 /* Record the set of the altered basic blocks. */
7771 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7772 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
7775 /* Remove E and the cancelled blocks. */
7780 /* Walk backwards so as to get a chance to substitute all
7781 released DEFs into debug stmts. See
7782 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7784 for (i
= bbs_to_remove
.length (); i
-- > 0; )
7785 delete_basic_block (bbs_to_remove
[i
]);
7788 /* Update the dominance information. The immediate dominator may change only
7789 for blocks whose immediate dominator belongs to DF_IDOM:
7791 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7792 removal. Let Z the arbitrary block such that idom(Z) = Y and
7793 Z dominates X after the removal. Before removal, there exists a path P
7794 from Y to X that avoids Z. Let F be the last edge on P that is
7795 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7796 dominates W, and because of P, Z does not dominate W), and W belongs to
7797 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7798 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
7800 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7801 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
7803 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
7804 bbs_to_fix_dom
.safe_push (dbb
);
7807 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
7810 BITMAP_FREE (df_idom
);
7811 bbs_to_remove
.release ();
7812 bbs_to_fix_dom
.release ();
7815 /* Purge dead EH edges from basic block BB. */
7818 gimple_purge_dead_eh_edges (basic_block bb
)
7820 bool changed
= false;
7823 gimple stmt
= last_stmt (bb
);
7825 if (stmt
&& stmt_can_throw_internal (stmt
))
7828 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7830 if (e
->flags
& EDGE_EH
)
7832 remove_edge_and_dominated_blocks (e
);
7842 /* Purge dead EH edges from basic block listed in BLOCKS. */
7845 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
7847 bool changed
= false;
7851 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7853 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7855 /* Earlier gimple_purge_dead_eh_edges could have removed
7856 this basic block already. */
7857 gcc_assert (bb
|| changed
);
7859 changed
|= gimple_purge_dead_eh_edges (bb
);
7865 /* Purge dead abnormal call edges from basic block BB. */
7868 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
7870 bool changed
= false;
7873 gimple stmt
= last_stmt (bb
);
7875 if (!cfun
->has_nonlocal_label
7876 && !cfun
->calls_setjmp
)
7879 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
7882 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7884 if (e
->flags
& EDGE_ABNORMAL
)
7886 if (e
->flags
& EDGE_FALLTHRU
)
7887 e
->flags
&= ~EDGE_ABNORMAL
;
7889 remove_edge_and_dominated_blocks (e
);
7899 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7902 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
7904 bool changed
= false;
7908 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7910 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7912 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7913 this basic block already. */
7914 gcc_assert (bb
|| changed
);
7916 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
7922 /* This function is called whenever a new edge is created or
7926 gimple_execute_on_growing_pred (edge e
)
7928 basic_block bb
= e
->dest
;
7930 if (!gimple_seq_empty_p (phi_nodes (bb
)))
7931 reserve_phi_args_for_new_edge (bb
);
7934 /* This function is called immediately before edge E is removed from
7935 the edge vector E->dest->preds. */
7938 gimple_execute_on_shrinking_pred (edge e
)
7940 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
7941 remove_phi_args (e
);
7944 /*---------------------------------------------------------------------------
7945 Helper functions for Loop versioning
7946 ---------------------------------------------------------------------------*/
7948 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7949 of 'first'. Both of them are dominated by 'new_head' basic block. When
7950 'new_head' was created by 'second's incoming edge it received phi arguments
7951 on the edge by split_edge(). Later, additional edge 'e' was created to
7952 connect 'new_head' and 'first'. Now this routine adds phi args on this
7953 additional edge 'e' that new_head to second edge received as part of edge
7957 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
7958 basic_block new_head
, edge e
)
7961 gimple_stmt_iterator psi1
, psi2
;
7963 edge e2
= find_edge (new_head
, second
);
7965 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7966 edge, we should always have an edge from NEW_HEAD to SECOND. */
7967 gcc_assert (e2
!= NULL
);
7969 /* Browse all 'second' basic block phi nodes and add phi args to
7970 edge 'e' for 'first' head. PHI args are always in correct order. */
7972 for (psi2
= gsi_start_phis (second
),
7973 psi1
= gsi_start_phis (first
);
7974 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
7975 gsi_next (&psi2
), gsi_next (&psi1
))
7977 phi1
= gsi_stmt (psi1
);
7978 phi2
= gsi_stmt (psi2
);
7979 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
7980 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
7985 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7986 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7987 the destination of the ELSE part. */
7990 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
7991 basic_block second_head ATTRIBUTE_UNUSED
,
7992 basic_block cond_bb
, void *cond_e
)
7994 gimple_stmt_iterator gsi
;
7995 gimple new_cond_expr
;
7996 tree cond_expr
= (tree
) cond_e
;
7999 /* Build new conditional expr */
8000 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
8001 NULL_TREE
, NULL_TREE
);
8003 /* Add new cond in cond_bb. */
8004 gsi
= gsi_last_bb (cond_bb
);
8005 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
8007 /* Adjust edges appropriately to connect new head with first head
8008 as well as second head. */
8009 e0
= single_succ_edge (cond_bb
);
8010 e0
->flags
&= ~EDGE_FALLTHRU
;
8011 e0
->flags
|= EDGE_FALSE_VALUE
;
8015 /* Do book-keeping of basic block BB for the profile consistency checker.
8016 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8017 then do post-pass accounting. Store the counting in RECORD. */
8019 gimple_account_profile_record (basic_block bb
, int after_pass
,
8020 struct profile_record
*record
)
8022 gimple_stmt_iterator i
;
8023 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
8025 record
->size
[after_pass
]
8026 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
8027 if (profile_status_for_fn (cfun
) == PROFILE_READ
)
8028 record
->time
[after_pass
]
8029 += estimate_num_insns (gsi_stmt (i
),
8030 &eni_time_weights
) * bb
->count
;
8031 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
8032 record
->time
[after_pass
]
8033 += estimate_num_insns (gsi_stmt (i
),
8034 &eni_time_weights
) * bb
->frequency
;
8038 struct cfg_hooks gimple_cfg_hooks
= {
8040 gimple_verify_flow_info
,
8041 gimple_dump_bb
, /* dump_bb */
8042 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
8043 create_bb
, /* create_basic_block */
8044 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
8045 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
8046 gimple_can_remove_branch_p
, /* can_remove_branch_p */
8047 remove_bb
, /* delete_basic_block */
8048 gimple_split_block
, /* split_block */
8049 gimple_move_block_after
, /* move_block_after */
8050 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
8051 gimple_merge_blocks
, /* merge_blocks */
8052 gimple_predict_edge
, /* predict_edge */
8053 gimple_predicted_by_p
, /* predicted_by_p */
8054 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
8055 gimple_duplicate_bb
, /* duplicate_block */
8056 gimple_split_edge
, /* split_edge */
8057 gimple_make_forwarder_block
, /* make_forward_block */
8058 NULL
, /* tidy_fallthru_edge */
8059 NULL
, /* force_nonfallthru */
8060 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
8061 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
8062 gimple_flow_call_edges_add
, /* flow_call_edges_add */
8063 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
8064 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
8065 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
8066 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
8067 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
8068 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
8069 flush_pending_stmts
, /* flush_pending_stmts */
8070 gimple_empty_block_p
, /* block_empty_p */
8071 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
8072 gimple_account_profile_record
,
8076 /* Split all critical edges. */
8079 split_critical_edges (void)
8085 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8086 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8087 mappings around the calls to split_edge. */
8088 start_recording_case_labels ();
8089 FOR_ALL_BB_FN (bb
, cfun
)
8091 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8093 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
8095 /* PRE inserts statements to edges and expects that
8096 since split_critical_edges was done beforehand, committing edge
8097 insertions will not split more edges. In addition to critical
8098 edges we must split edges that have multiple successors and
8099 end by control flow statements, such as RESX.
8100 Go ahead and split them too. This matches the logic in
8101 gimple_find_edge_insert_loc. */
8102 else if ((!single_pred_p (e
->dest
)
8103 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
8104 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8105 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
8106 && !(e
->flags
& EDGE_ABNORMAL
))
8108 gimple_stmt_iterator gsi
;
8110 gsi
= gsi_last_bb (e
->src
);
8111 if (!gsi_end_p (gsi
)
8112 && stmt_ends_bb_p (gsi_stmt (gsi
))
8113 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
8114 && !gimple_call_builtin_p (gsi_stmt (gsi
),
8120 end_recording_case_labels ();
8126 const pass_data pass_data_split_crit_edges
=
8128 GIMPLE_PASS
, /* type */
8129 "crited", /* name */
8130 OPTGROUP_NONE
, /* optinfo_flags */
8131 TV_TREE_SPLIT_EDGES
, /* tv_id */
8132 PROP_cfg
, /* properties_required */
8133 PROP_no_crit_edges
, /* properties_provided */
8134 0, /* properties_destroyed */
8135 0, /* todo_flags_start */
8136 0, /* todo_flags_finish */
8139 class pass_split_crit_edges
: public gimple_opt_pass
8142 pass_split_crit_edges (gcc::context
*ctxt
)
8143 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
8146 /* opt_pass methods: */
8147 virtual unsigned int execute (function
*) { return split_critical_edges (); }
8149 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
8150 }; // class pass_split_crit_edges
8155 make_pass_split_crit_edges (gcc::context
*ctxt
)
8157 return new pass_split_crit_edges (ctxt
);
8161 /* Build a ternary operation and gimplify it. Emit code before GSI.
8162 Return the gimple_val holding the result. */
8165 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8166 tree type
, tree a
, tree b
, tree c
)
8169 location_t loc
= gimple_location (gsi_stmt (*gsi
));
8171 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
8174 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8178 /* Build a binary operation and gimplify it. Emit code before GSI.
8179 Return the gimple_val holding the result. */
8182 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8183 tree type
, tree a
, tree b
)
8187 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
8190 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8194 /* Build a unary operation and gimplify it. Emit code before GSI.
8195 Return the gimple_val holding the result. */
8198 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
8203 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
8206 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8212 /* Given a basic block B which ends with a conditional and has
8213 precisely two successors, determine which of the edges is taken if
8214 the conditional is true and which is taken if the conditional is
8215 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8218 extract_true_false_edges_from_block (basic_block b
,
8222 edge e
= EDGE_SUCC (b
, 0);
8224 if (e
->flags
& EDGE_TRUE_VALUE
)
8227 *false_edge
= EDGE_SUCC (b
, 1);
8232 *true_edge
= EDGE_SUCC (b
, 1);
8236 /* Emit return warnings. */
8240 const pass_data pass_data_warn_function_return
=
8242 GIMPLE_PASS
, /* type */
8243 "*warn_function_return", /* name */
8244 OPTGROUP_NONE
, /* optinfo_flags */
8245 TV_NONE
, /* tv_id */
8246 PROP_cfg
, /* properties_required */
8247 0, /* properties_provided */
8248 0, /* properties_destroyed */
8249 0, /* todo_flags_start */
8250 0, /* todo_flags_finish */
8253 class pass_warn_function_return
: public gimple_opt_pass
8256 pass_warn_function_return (gcc::context
*ctxt
)
8257 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
8260 /* opt_pass methods: */
8261 virtual unsigned int execute (function
*);
8263 }; // class pass_warn_function_return
8266 pass_warn_function_return::execute (function
*fun
)
8268 source_location location
;
8273 if (!targetm
.warn_func_return (fun
->decl
))
8276 /* If we have a path to EXIT, then we do return. */
8277 if (TREE_THIS_VOLATILE (fun
->decl
)
8278 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0)
8280 location
= UNKNOWN_LOCATION
;
8281 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
8283 last
= last_stmt (e
->src
);
8284 if ((gimple_code (last
) == GIMPLE_RETURN
8285 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
8286 && (location
= gimple_location (last
)) != UNKNOWN_LOCATION
)
8289 if (location
== UNKNOWN_LOCATION
)
8290 location
= cfun
->function_end_locus
;
8291 warning_at (location
, 0, "%<noreturn%> function does return");
8294 /* If we see "return;" in some basic block, then we do reach the end
8295 without returning a value. */
8296 else if (warn_return_type
8297 && !TREE_NO_WARNING (fun
->decl
)
8298 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0
8299 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun
->decl
))))
8301 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
8303 gimple last
= last_stmt (e
->src
);
8304 if (gimple_code (last
) == GIMPLE_RETURN
8305 && gimple_return_retval (last
) == NULL
8306 && !gimple_no_warning_p (last
))
8308 location
= gimple_location (last
);
8309 if (location
== UNKNOWN_LOCATION
)
8310 location
= fun
->function_end_locus
;
8311 warning_at (location
, OPT_Wreturn_type
, "control reaches end of non-void function");
8312 TREE_NO_WARNING (fun
->decl
) = 1;
8323 make_pass_warn_function_return (gcc::context
*ctxt
)
8325 return new pass_warn_function_return (ctxt
);
8328 /* Walk a gimplified function and warn for functions whose return value is
8329 ignored and attribute((warn_unused_result)) is set. This is done before
8330 inlining, so we don't have to worry about that. */
8333 do_warn_unused_result (gimple_seq seq
)
8336 gimple_stmt_iterator i
;
8338 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
8340 gimple g
= gsi_stmt (i
);
8342 switch (gimple_code (g
))
8345 do_warn_unused_result (gimple_bind_body (g
));
8348 do_warn_unused_result (gimple_try_eval (g
));
8349 do_warn_unused_result (gimple_try_cleanup (g
));
8352 do_warn_unused_result (gimple_catch_handler (g
));
8354 case GIMPLE_EH_FILTER
:
8355 do_warn_unused_result (gimple_eh_filter_failure (g
));
8359 if (gimple_call_lhs (g
))
8361 if (gimple_call_internal_p (g
))
8364 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8365 LHS. All calls whose value is ignored should be
8366 represented like this. Look for the attribute. */
8367 fdecl
= gimple_call_fndecl (g
);
8368 ftype
= gimple_call_fntype (g
);
8370 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
8372 location_t loc
= gimple_location (g
);
8375 warning_at (loc
, OPT_Wunused_result
,
8376 "ignoring return value of %qD, "
8377 "declared with attribute warn_unused_result",
8380 warning_at (loc
, OPT_Wunused_result
,
8381 "ignoring return value of function "
8382 "declared with attribute warn_unused_result");
8387 /* Not a container, not a call, or a call whose value is used. */
8395 const pass_data pass_data_warn_unused_result
=
8397 GIMPLE_PASS
, /* type */
8398 "*warn_unused_result", /* name */
8399 OPTGROUP_NONE
, /* optinfo_flags */
8400 TV_NONE
, /* tv_id */
8401 PROP_gimple_any
, /* properties_required */
8402 0, /* properties_provided */
8403 0, /* properties_destroyed */
8404 0, /* todo_flags_start */
8405 0, /* todo_flags_finish */
8408 class pass_warn_unused_result
: public gimple_opt_pass
8411 pass_warn_unused_result (gcc::context
*ctxt
)
8412 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
8415 /* opt_pass methods: */
8416 virtual bool gate (function
*) { return flag_warn_unused_result
; }
8417 virtual unsigned int execute (function
*)
8419 do_warn_unused_result (gimple_body (current_function_decl
));
8423 }; // class pass_warn_unused_result
8428 make_pass_warn_unused_result (gcc::context
*ctxt
)
8430 return new pass_warn_unused_result (ctxt
);
8433 /* IPA passes, compilation of earlier functions or inlining
8434 might have changed some properties, such as marked functions nothrow,
8435 pure, const or noreturn.
8436 Remove redundant edges and basic blocks, and create new ones if necessary.
8438 This pass can't be executed as stand alone pass from pass manager, because
8439 in between inlining and this fixup the verify_flow_info would fail. */
8442 execute_fixup_cfg (void)
8445 gimple_stmt_iterator gsi
;
8447 gcov_type count_scale
;
8452 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl
)->count
,
8453 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
);
8455 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
8456 cgraph_get_node (current_function_decl
)->count
;
8457 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
8458 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
,
8461 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
8462 e
->count
= apply_scale (e
->count
, count_scale
);
8464 FOR_EACH_BB_FN (bb
, cfun
)
8466 bb
->count
= apply_scale (bb
->count
, count_scale
);
8467 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
8469 gimple stmt
= gsi_stmt (gsi
);
8470 tree decl
= is_gimple_call (stmt
)
8471 ? gimple_call_fndecl (stmt
)
8475 int flags
= gimple_call_flags (stmt
);
8476 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
8478 if (gimple_purge_dead_abnormal_call_edges (bb
))
8479 todo
|= TODO_cleanup_cfg
;
8481 if (gimple_in_ssa_p (cfun
))
8483 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8488 if (flags
& ECF_NORETURN
8489 && fixup_noreturn_call (stmt
))
8490 todo
|= TODO_cleanup_cfg
;
8493 /* Remove stores to variables we marked write-only.
8494 Keep access when store has side effect, i.e. in case when source
8496 if (gimple_store_p (stmt
)
8497 && !gimple_has_side_effects (stmt
))
8499 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
8501 if (TREE_CODE (lhs
) == VAR_DECL
8502 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
8503 && varpool_get_node (lhs
)->writeonly
)
8505 unlink_stmt_vdef (stmt
);
8506 gsi_remove (&gsi
, true);
8507 release_defs (stmt
);
8508 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8512 /* For calls we can simply remove LHS when it is known
8513 to be write-only. */
8514 if (is_gimple_call (stmt
)
8515 && gimple_get_lhs (stmt
))
8517 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
8519 if (TREE_CODE (lhs
) == VAR_DECL
8520 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
8521 && varpool_get_node (lhs
)->writeonly
)
8523 gimple_call_set_lhs (stmt
, NULL
);
8525 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8529 if (maybe_clean_eh_stmt (stmt
)
8530 && gimple_purge_dead_eh_edges (bb
))
8531 todo
|= TODO_cleanup_cfg
;
8535 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8536 e
->count
= apply_scale (e
->count
, count_scale
);
8538 /* If we have a basic block with no successors that does not
8539 end with a control statement or a noreturn call end it with
8540 a call to __builtin_unreachable. This situation can occur
8541 when inlining a noreturn call that does in fact return. */
8542 if (EDGE_COUNT (bb
->succs
) == 0)
8544 gimple stmt
= last_stmt (bb
);
8546 || (!is_ctrl_stmt (stmt
)
8547 && (!is_gimple_call (stmt
)
8548 || (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0)))
8550 stmt
= gimple_build_call
8551 (builtin_decl_implicit (BUILT_IN_UNREACHABLE
), 0);
8552 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
8553 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
8557 if (count_scale
!= REG_BR_PROB_BASE
)
8558 compute_function_frequency ();
8560 /* We just processed all calls. */
8561 if (cfun
->gimple_df
)
8562 vec_free (MODIFIED_NORETURN_CALLS (cfun
));
8564 /* Dump a textual representation of the flowgraph. */
8566 gimple_dump_cfg (dump_file
, dump_flags
);
8569 && (todo
& TODO_cleanup_cfg
))
8570 loops_state_set (LOOPS_NEED_FIXUP
);
8577 const pass_data pass_data_fixup_cfg
=
8579 GIMPLE_PASS
, /* type */
8580 "*free_cfg_annotations", /* name */
8581 OPTGROUP_NONE
, /* optinfo_flags */
8582 TV_NONE
, /* tv_id */
8583 PROP_cfg
, /* properties_required */
8584 0, /* properties_provided */
8585 0, /* properties_destroyed */
8586 0, /* todo_flags_start */
8587 0, /* todo_flags_finish */
8590 class pass_fixup_cfg
: public gimple_opt_pass
8593 pass_fixup_cfg (gcc::context
*ctxt
)
8594 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
8597 /* opt_pass methods: */
8598 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
8599 virtual unsigned int execute (function
*) { return execute_fixup_cfg (); }
8601 }; // class pass_fixup_cfg
8606 make_pass_fixup_cfg (gcc::context
*ctxt
)
8608 return new pass_fixup_cfg (ctxt
);
8611 /* Garbage collection support for edge_def. */
8613 extern void gt_ggc_mx (tree
&);
8614 extern void gt_ggc_mx (gimple
&);
8615 extern void gt_ggc_mx (rtx
&);
8616 extern void gt_ggc_mx (basic_block
&);
8619 gt_ggc_mx (edge_def
*e
)
8621 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8623 gt_ggc_mx (e
->dest
);
8624 if (current_ir_type () == IR_GIMPLE
)
8625 gt_ggc_mx (e
->insns
.g
);
8627 gt_ggc_mx (e
->insns
.r
);
8631 /* PCH support for edge_def. */
8633 extern void gt_pch_nx (tree
&);
8634 extern void gt_pch_nx (gimple
&);
8635 extern void gt_pch_nx (rtx
&);
8636 extern void gt_pch_nx (basic_block
&);
8639 gt_pch_nx (edge_def
*e
)
8641 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8643 gt_pch_nx (e
->dest
);
8644 if (current_ir_type () == IR_GIMPLE
)
8645 gt_pch_nx (e
->insns
.g
);
8647 gt_pch_nx (e
->insns
.r
);
8652 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
8654 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8655 op (&(e
->src
), cookie
);
8656 op (&(e
->dest
), cookie
);
8657 if (current_ir_type () == IR_GIMPLE
)
8658 op (&(e
->insns
.g
), cookie
);
8660 op (&(e
->insns
.r
), cookie
);
8661 op (&(block
), cookie
);