1 /* Control flow functions for trees.
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
58 #include "tree-cfgcleanup.h"
63 /* This file contains functions for building the Control Flow Graph (CFG)
64 for a function tree. */
66 /* Local declarations. */
68 /* Initial capacity for the basic block array. */
69 static const int initial_cfg_capacity
= 20;
71 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
72 which use a particular edge. The CASE_LABEL_EXPRs are chained together
73 via their CASE_CHAIN field, which we clear after we're done with the
74 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
76 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
77 update the case vector in response to edge redirections.
79 Right now this table is set up and torn down at key points in the
80 compilation process. It would be nice if we could make the table
81 more persistent. The key is getting notification of changes to
82 the CFG (particularly edge removal, creation and redirection). */
84 static hash_map
<edge
, tree
> *edge_to_cases
;
86 /* If we record edge_to_cases, this bitmap will hold indexes
87 of basic blocks that end in a GIMPLE_SWITCH which we touched
88 due to edge manipulations. */
90 static bitmap touched_switch_bbs
;
95 long num_merged_labels
;
98 static struct cfg_stats_d cfg_stats
;
100 /* Data to pass to replace_block_vars_by_duplicates_1. */
101 struct replace_decls_d
103 hash_map
<tree
, tree
> *vars_map
;
107 /* Hash table to store last discriminator assigned for each locus. */
108 struct locus_discrim_map
114 /* Hashtable helpers. */
116 struct locus_discrim_hasher
: free_ptr_hash
<locus_discrim_map
>
118 static inline hashval_t
hash (const locus_discrim_map
*);
119 static inline bool equal (const locus_discrim_map
*,
120 const locus_discrim_map
*);
123 /* Trivial hash function for a location_t. ITEM is a pointer to
124 a hash table entry that maps a location_t to a discriminator. */
127 locus_discrim_hasher::hash (const locus_discrim_map
*item
)
129 return LOCATION_LINE (item
->locus
);
132 /* Equality function for the locus-to-discriminator map. A and B
133 point to the two hash table entries to compare. */
136 locus_discrim_hasher::equal (const locus_discrim_map
*a
,
137 const locus_discrim_map
*b
)
139 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
142 static hash_table
<locus_discrim_hasher
> *discriminator_per_locus
;
144 /* Basic blocks and flowgraphs. */
145 static void make_blocks (gimple_seq
);
148 static void make_edges (void);
149 static void assign_discriminators (void);
150 static void make_cond_expr_edges (basic_block
);
151 static void make_gimple_switch_edges (gswitch
*, basic_block
);
152 static bool make_goto_expr_edges (basic_block
);
153 static void make_gimple_asm_edges (basic_block
);
154 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
155 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
157 /* Various helpers. */
158 static inline bool stmt_starts_bb_p (gimple
*, gimple
*);
159 static int gimple_verify_flow_info (void);
160 static void gimple_make_forwarder_block (edge
);
161 static gimple
*first_non_label_stmt (basic_block
);
162 static bool verify_gimple_transaction (gtransaction
*);
163 static bool call_can_make_abnormal_goto (gimple
*);
165 /* Flowgraph optimization and cleanup. */
166 static void gimple_merge_blocks (basic_block
, basic_block
);
167 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
168 static void remove_bb (basic_block
);
169 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
170 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
171 static edge
find_taken_edge_switch_expr (gswitch
*, basic_block
, tree
);
172 static tree
find_case_label_for_value (gswitch
*, tree
);
175 init_empty_tree_cfg_for_function (struct function
*fn
)
177 /* Initialize the basic block array. */
179 profile_status_for_fn (fn
) = PROFILE_ABSENT
;
180 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
181 last_basic_block_for_fn (fn
) = NUM_FIXED_BLOCKS
;
182 vec_alloc (basic_block_info_for_fn (fn
), initial_cfg_capacity
);
183 vec_safe_grow_cleared (basic_block_info_for_fn (fn
),
184 initial_cfg_capacity
);
186 /* Build a mapping of labels to their associated blocks. */
187 vec_alloc (label_to_block_map_for_fn (fn
), initial_cfg_capacity
);
188 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
),
189 initial_cfg_capacity
);
191 SET_BASIC_BLOCK_FOR_FN (fn
, ENTRY_BLOCK
, ENTRY_BLOCK_PTR_FOR_FN (fn
));
192 SET_BASIC_BLOCK_FOR_FN (fn
, EXIT_BLOCK
, EXIT_BLOCK_PTR_FOR_FN (fn
));
194 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
195 = EXIT_BLOCK_PTR_FOR_FN (fn
);
196 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
197 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
201 init_empty_tree_cfg (void)
203 init_empty_tree_cfg_for_function (cfun
);
206 /*---------------------------------------------------------------------------
208 ---------------------------------------------------------------------------*/
210 /* Entry point to the CFG builder for trees. SEQ is the sequence of
211 statements to be added to the flowgraph. */
214 build_gimple_cfg (gimple_seq seq
)
216 /* Register specific gimple functions. */
217 gimple_register_cfg_hooks ();
219 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
221 init_empty_tree_cfg ();
225 /* Make sure there is always at least one block, even if it's empty. */
226 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
227 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
229 /* Adjust the size of the array. */
230 if (basic_block_info_for_fn (cfun
)->length ()
231 < (size_t) n_basic_blocks_for_fn (cfun
))
232 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
233 n_basic_blocks_for_fn (cfun
));
235 /* To speed up statement iterator walks, we first purge dead labels. */
236 cleanup_dead_labels ();
238 /* Group case nodes to reduce the number of edges.
239 We do this after cleaning up dead labels because otherwise we miss
240 a lot of obvious case merging opportunities. */
241 group_case_labels ();
243 /* Create the edges of the flowgraph. */
244 discriminator_per_locus
= new hash_table
<locus_discrim_hasher
> (13);
246 assign_discriminators ();
247 cleanup_dead_labels ();
248 delete discriminator_per_locus
;
249 discriminator_per_locus
= NULL
;
252 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
253 them and propagate the information to LOOP. We assume that the annotations
254 come immediately before the condition in BB, if any. */
257 replace_loop_annotate_in_block (basic_block bb
, struct loop
*loop
)
259 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
260 gimple
*stmt
= gsi_stmt (gsi
);
262 if (!(stmt
&& gimple_code (stmt
) == GIMPLE_COND
))
265 for (gsi_prev_nondebug (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
267 stmt
= gsi_stmt (gsi
);
268 if (gimple_code (stmt
) != GIMPLE_CALL
)
270 if (!gimple_call_internal_p (stmt
)
271 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
274 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
276 case annot_expr_ivdep_kind
:
277 loop
->safelen
= INT_MAX
;
279 case annot_expr_no_vector_kind
:
280 loop
->dont_vectorize
= true;
282 case annot_expr_vector_kind
:
283 loop
->force_vectorize
= true;
284 cfun
->has_force_vectorize_loops
= true;
290 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
291 gimple_call_arg (stmt
, 0));
292 gsi_replace (&gsi
, stmt
, true);
296 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
297 them and propagate the information to the loop. We assume that the
298 annotations come immediately before the condition of the loop. */
301 replace_loop_annotate (void)
305 gimple_stmt_iterator gsi
;
308 FOR_EACH_LOOP (loop
, 0)
310 /* First look into the header. */
311 replace_loop_annotate_in_block (loop
->header
, loop
);
313 /* Then look into the latch, if any. */
315 replace_loop_annotate_in_block (loop
->latch
, loop
);
318 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
319 FOR_EACH_BB_FN (bb
, cfun
)
321 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
323 stmt
= gsi_stmt (gsi
);
324 if (gimple_code (stmt
) != GIMPLE_CALL
)
326 if (!gimple_call_internal_p (stmt
)
327 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
330 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
332 case annot_expr_ivdep_kind
:
333 case annot_expr_no_vector_kind
:
334 case annot_expr_vector_kind
:
340 warning_at (gimple_location (stmt
), 0, "ignoring loop annotation");
341 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
342 gimple_call_arg (stmt
, 0));
343 gsi_replace (&gsi
, stmt
, true);
350 execute_build_cfg (void)
352 gimple_seq body
= gimple_body (current_function_decl
);
354 build_gimple_cfg (body
);
355 gimple_set_body (current_function_decl
, NULL
);
356 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
358 fprintf (dump_file
, "Scope blocks:\n");
359 dump_scope_blocks (dump_file
, dump_flags
);
362 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
363 replace_loop_annotate ();
369 const pass_data pass_data_build_cfg
=
371 GIMPLE_PASS
, /* type */
373 OPTGROUP_NONE
, /* optinfo_flags */
374 TV_TREE_CFG
, /* tv_id */
375 PROP_gimple_leh
, /* properties_required */
376 ( PROP_cfg
| PROP_loops
), /* properties_provided */
377 0, /* properties_destroyed */
378 0, /* todo_flags_start */
379 0, /* todo_flags_finish */
382 class pass_build_cfg
: public gimple_opt_pass
385 pass_build_cfg (gcc::context
*ctxt
)
386 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
389 /* opt_pass methods: */
390 virtual unsigned int execute (function
*) { return execute_build_cfg (); }
392 }; // class pass_build_cfg
397 make_pass_build_cfg (gcc::context
*ctxt
)
399 return new pass_build_cfg (ctxt
);
403 /* Return true if T is a computed goto. */
406 computed_goto_p (gimple
*t
)
408 return (gimple_code (t
) == GIMPLE_GOTO
409 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
412 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
413 the other edge points to a bb with just __builtin_unreachable ().
414 I.e. return true for C->M edge in:
422 __builtin_unreachable ();
426 assert_unreachable_fallthru_edge_p (edge e
)
428 basic_block pred_bb
= e
->src
;
429 gimple
*last
= last_stmt (pred_bb
);
430 if (last
&& gimple_code (last
) == GIMPLE_COND
)
432 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
433 if (other_bb
== e
->dest
)
434 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
435 if (EDGE_COUNT (other_bb
->succs
) == 0)
437 gimple_stmt_iterator gsi
= gsi_after_labels (other_bb
);
442 stmt
= gsi_stmt (gsi
);
443 while (is_gimple_debug (stmt
) || gimple_clobber_p (stmt
))
448 stmt
= gsi_stmt (gsi
);
450 return gimple_call_builtin_p (stmt
, BUILT_IN_UNREACHABLE
);
457 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
458 could alter control flow except via eh. We initialize the flag at
459 CFG build time and only ever clear it later. */
462 gimple_call_initialize_ctrl_altering (gimple
*stmt
)
464 int flags
= gimple_call_flags (stmt
);
466 /* A call alters control flow if it can make an abnormal goto. */
467 if (call_can_make_abnormal_goto (stmt
)
468 /* A call also alters control flow if it does not return. */
469 || flags
& ECF_NORETURN
470 /* TM ending statements have backedges out of the transaction.
471 Return true so we split the basic block containing them.
472 Note that the TM_BUILTIN test is merely an optimization. */
473 || ((flags
& ECF_TM_BUILTIN
)
474 && is_tm_ending_fndecl (gimple_call_fndecl (stmt
)))
475 /* BUILT_IN_RETURN call is same as return statement. */
476 || gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
)
477 /* IFN_UNIQUE should be the last insn, to make checking for it
478 as cheap as possible. */
479 || (gimple_call_internal_p (stmt
)
480 && gimple_call_internal_unique_p (stmt
)))
481 gimple_call_set_ctrl_altering (stmt
, true);
483 gimple_call_set_ctrl_altering (stmt
, false);
487 /* Insert SEQ after BB and build a flowgraph. */
490 make_blocks_1 (gimple_seq seq
, basic_block bb
)
492 gimple_stmt_iterator i
= gsi_start (seq
);
494 bool start_new_block
= true;
495 bool first_stmt_of_seq
= true;
497 while (!gsi_end_p (i
))
504 if (stmt
&& is_gimple_call (stmt
))
505 gimple_call_initialize_ctrl_altering (stmt
);
507 /* If the statement starts a new basic block or if we have determined
508 in a previous pass that we need to create a new block for STMT, do
510 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
512 if (!first_stmt_of_seq
)
513 gsi_split_seq_before (&i
, &seq
);
514 bb
= create_basic_block (seq
, bb
);
515 start_new_block
= false;
518 /* Now add STMT to BB and create the subgraphs for special statement
520 gimple_set_bb (stmt
, bb
);
522 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
524 if (stmt_ends_bb_p (stmt
))
526 /* If the stmt can make abnormal goto use a new temporary
527 for the assignment to the LHS. This makes sure the old value
528 of the LHS is available on the abnormal edge. Otherwise
529 we will end up with overlapping life-ranges for abnormal
531 if (gimple_has_lhs (stmt
)
532 && stmt_can_make_abnormal_goto (stmt
)
533 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
535 tree lhs
= gimple_get_lhs (stmt
);
536 tree tmp
= create_tmp_var (TREE_TYPE (lhs
));
537 gimple
*s
= gimple_build_assign (lhs
, tmp
);
538 gimple_set_location (s
, gimple_location (stmt
));
539 gimple_set_block (s
, gimple_block (stmt
));
540 gimple_set_lhs (stmt
, tmp
);
541 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
542 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
543 DECL_GIMPLE_REG_P (tmp
) = 1;
544 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
546 start_new_block
= true;
550 first_stmt_of_seq
= false;
555 /* Build a flowgraph for the sequence of stmts SEQ. */
558 make_blocks (gimple_seq seq
)
560 make_blocks_1 (seq
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
563 /* Create and return a new empty basic block after bb AFTER. */
566 create_bb (void *h
, void *e
, basic_block after
)
572 /* Create and initialize a new basic block. Since alloc_block uses
573 GC allocation that clears memory to allocate a basic block, we do
574 not have to clear the newly allocated basic block here. */
577 bb
->index
= last_basic_block_for_fn (cfun
);
579 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
581 /* Add the new block to the linked list of blocks. */
582 link_block (bb
, after
);
584 /* Grow the basic block array if needed. */
585 if ((size_t) last_basic_block_for_fn (cfun
)
586 == basic_block_info_for_fn (cfun
)->length ())
589 (last_basic_block_for_fn (cfun
)
590 + (last_basic_block_for_fn (cfun
) + 3) / 4);
591 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
), new_size
);
594 /* Add the newly created block to the array. */
595 SET_BASIC_BLOCK_FOR_FN (cfun
, last_basic_block_for_fn (cfun
), bb
);
597 n_basic_blocks_for_fn (cfun
)++;
598 last_basic_block_for_fn (cfun
)++;
604 /*---------------------------------------------------------------------------
606 ---------------------------------------------------------------------------*/
608 /* If basic block BB has an abnormal edge to a basic block
609 containing IFN_ABNORMAL_DISPATCHER internal call, return
610 that the dispatcher's basic block, otherwise return NULL. */
613 get_abnormal_succ_dispatcher (basic_block bb
)
618 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
619 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)) == EDGE_ABNORMAL
)
621 gimple_stmt_iterator gsi
622 = gsi_start_nondebug_after_labels_bb (e
->dest
);
623 gimple
*g
= gsi_stmt (gsi
);
625 && is_gimple_call (g
)
626 && gimple_call_internal_p (g
)
627 && gimple_call_internal_fn (g
) == IFN_ABNORMAL_DISPATCHER
)
633 /* Helper function for make_edges. Create a basic block with
634 with ABNORMAL_DISPATCHER internal call in it if needed, and
635 create abnormal edges from BBS to it and from it to FOR_BB
636 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
639 handle_abnormal_edges (basic_block
*dispatcher_bbs
,
640 basic_block for_bb
, int *bb_to_omp_idx
,
641 auto_vec
<basic_block
> *bbs
, bool computed_goto
)
643 basic_block
*dispatcher
= dispatcher_bbs
+ (computed_goto
? 1 : 0);
644 unsigned int idx
= 0;
650 dispatcher
= dispatcher_bbs
+ 2 * bb_to_omp_idx
[for_bb
->index
];
651 if (bb_to_omp_idx
[for_bb
->index
] != 0)
655 /* If the dispatcher has been created already, then there are basic
656 blocks with abnormal edges to it, so just make a new edge to
658 if (*dispatcher
== NULL
)
660 /* Check if there are any basic blocks that need to have
661 abnormal edges to this dispatcher. If there are none, return
663 if (bb_to_omp_idx
== NULL
)
665 if (bbs
->is_empty ())
670 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
671 if (bb_to_omp_idx
[bb
->index
] == bb_to_omp_idx
[for_bb
->index
])
677 /* Create the dispatcher bb. */
678 *dispatcher
= create_basic_block (NULL
, for_bb
);
681 /* Factor computed gotos into a common computed goto site. Also
682 record the location of that site so that we can un-factor the
683 gotos after we have converted back to normal form. */
684 gimple_stmt_iterator gsi
= gsi_start_bb (*dispatcher
);
686 /* Create the destination of the factored goto. Each original
687 computed goto will put its desired destination into this
688 variable and jump to the label we create immediately below. */
689 tree var
= create_tmp_var (ptr_type_node
, "gotovar");
691 /* Build a label for the new block which will contain the
692 factored computed goto. */
693 tree factored_label_decl
694 = create_artificial_label (UNKNOWN_LOCATION
);
695 gimple
*factored_computed_goto_label
696 = gimple_build_label (factored_label_decl
);
697 gsi_insert_after (&gsi
, factored_computed_goto_label
, GSI_NEW_STMT
);
699 /* Build our new computed goto. */
700 gimple
*factored_computed_goto
= gimple_build_goto (var
);
701 gsi_insert_after (&gsi
, factored_computed_goto
, GSI_NEW_STMT
);
703 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
706 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
709 gsi
= gsi_last_bb (bb
);
710 gimple
*last
= gsi_stmt (gsi
);
712 gcc_assert (computed_goto_p (last
));
714 /* Copy the original computed goto's destination into VAR. */
716 = gimple_build_assign (var
, gimple_goto_dest (last
));
717 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
719 edge e
= make_edge (bb
, *dispatcher
, EDGE_FALLTHRU
);
720 e
->goto_locus
= gimple_location (last
);
721 gsi_remove (&gsi
, true);
726 tree arg
= inner
? boolean_true_node
: boolean_false_node
;
727 gimple
*g
= gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER
,
729 gimple_stmt_iterator gsi
= gsi_after_labels (*dispatcher
);
730 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
732 /* Create predecessor edges of the dispatcher. */
733 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
736 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
738 make_edge (bb
, *dispatcher
, EDGE_ABNORMAL
);
743 make_edge (*dispatcher
, for_bb
, EDGE_ABNORMAL
);
746 /* Creates outgoing edges for BB. Returns 1 when it ends with an
747 computed goto, returns 2 when it ends with a statement that
748 might return to this function via an nonlocal goto, otherwise
749 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
752 make_edges_bb (basic_block bb
, struct omp_region
**pcur_region
, int *pomp_index
)
754 gimple
*last
= last_stmt (bb
);
755 bool fallthru
= false;
761 switch (gimple_code (last
))
764 if (make_goto_expr_edges (bb
))
770 edge e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
771 e
->goto_locus
= gimple_location (last
);
776 make_cond_expr_edges (bb
);
780 make_gimple_switch_edges (as_a
<gswitch
*> (last
), bb
);
784 make_eh_edges (last
);
787 case GIMPLE_EH_DISPATCH
:
788 fallthru
= make_eh_dispatch_edges (as_a
<geh_dispatch
*> (last
));
792 /* If this function receives a nonlocal goto, then we need to
793 make edges from this call site to all the nonlocal goto
795 if (stmt_can_make_abnormal_goto (last
))
798 /* If this statement has reachable exception handlers, then
799 create abnormal edges to them. */
800 make_eh_edges (last
);
802 /* BUILTIN_RETURN is really a return statement. */
803 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
805 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
808 /* Some calls are known not to return. */
810 fallthru
= !(gimple_call_flags (last
) & ECF_NORETURN
);
814 /* A GIMPLE_ASSIGN may throw internally and thus be considered
816 if (is_ctrl_altering_stmt (last
))
817 make_eh_edges (last
);
822 make_gimple_asm_edges (bb
);
827 fallthru
= make_gimple_omp_edges (bb
, pcur_region
, pomp_index
);
830 case GIMPLE_TRANSACTION
:
832 gtransaction
*txn
= as_a
<gtransaction
*> (last
);
833 tree label1
= gimple_transaction_label_norm (txn
);
834 tree label2
= gimple_transaction_label_uninst (txn
);
837 make_edge (bb
, label_to_block (label1
), EDGE_FALLTHRU
);
839 make_edge (bb
, label_to_block (label2
),
840 EDGE_TM_UNINSTRUMENTED
| (label1
? 0 : EDGE_FALLTHRU
));
842 tree label3
= gimple_transaction_label_over (txn
);
843 if (gimple_transaction_subcode (txn
)
844 & (GTMA_HAVE_ABORT
| GTMA_IS_OUTER
))
845 make_edge (bb
, label_to_block (label3
), EDGE_TM_ABORT
);
852 gcc_assert (!stmt_ends_bb_p (last
));
858 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
863 /* Join all the blocks in the flowgraph. */
869 struct omp_region
*cur_region
= NULL
;
870 auto_vec
<basic_block
> ab_edge_goto
;
871 auto_vec
<basic_block
> ab_edge_call
;
872 int *bb_to_omp_idx
= NULL
;
873 int cur_omp_region_idx
= 0;
875 /* Create an edge from entry to the first block with executable
877 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
878 BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
),
881 /* Traverse the basic block array placing edges. */
882 FOR_EACH_BB_FN (bb
, cfun
)
887 bb_to_omp_idx
[bb
->index
] = cur_omp_region_idx
;
889 mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
891 ab_edge_goto
.safe_push (bb
);
893 ab_edge_call
.safe_push (bb
);
895 if (cur_region
&& bb_to_omp_idx
== NULL
)
896 bb_to_omp_idx
= XCNEWVEC (int, n_basic_blocks_for_fn (cfun
));
899 /* Computed gotos are hell to deal with, especially if there are
900 lots of them with a large number of destinations. So we factor
901 them to a common computed goto location before we build the
902 edge list. After we convert back to normal form, we will un-factor
903 the computed gotos since factoring introduces an unwanted jump.
904 For non-local gotos and abnormal edges from calls to calls that return
905 twice or forced labels, factor the abnormal edges too, by having all
906 abnormal edges from the calls go to a common artificial basic block
907 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
908 basic block to all forced labels and calls returning twice.
909 We do this per-OpenMP structured block, because those regions
910 are guaranteed to be single entry single exit by the standard,
911 so it is not allowed to enter or exit such regions abnormally this way,
912 thus all computed gotos, non-local gotos and setjmp/longjmp calls
913 must not transfer control across SESE region boundaries. */
914 if (!ab_edge_goto
.is_empty () || !ab_edge_call
.is_empty ())
916 gimple_stmt_iterator gsi
;
917 basic_block dispatcher_bb_array
[2] = { NULL
, NULL
};
918 basic_block
*dispatcher_bbs
= dispatcher_bb_array
;
919 int count
= n_basic_blocks_for_fn (cfun
);
922 dispatcher_bbs
= XCNEWVEC (basic_block
, 2 * count
);
924 FOR_EACH_BB_FN (bb
, cfun
)
926 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
928 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
934 target
= gimple_label_label (label_stmt
);
936 /* Make an edge to every label block that has been marked as a
937 potential target for a computed goto or a non-local goto. */
938 if (FORCED_LABEL (target
))
939 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
940 &ab_edge_goto
, true);
941 if (DECL_NONLOCAL (target
))
943 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
944 &ab_edge_call
, false);
949 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
950 gsi_next_nondebug (&gsi
);
951 if (!gsi_end_p (gsi
))
953 /* Make an edge to every setjmp-like call. */
954 gimple
*call_stmt
= gsi_stmt (gsi
);
955 if (is_gimple_call (call_stmt
)
956 && ((gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
)
957 || gimple_call_builtin_p (call_stmt
,
958 BUILT_IN_SETJMP_RECEIVER
)))
959 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
960 &ab_edge_call
, false);
965 XDELETE (dispatcher_bbs
);
968 XDELETE (bb_to_omp_idx
);
973 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
974 needed. Returns true if new bbs were created.
975 Note: This is transitional code, and should not be used for new code. We
976 should be able to get rid of this by rewriting all target va-arg
977 gimplification hooks to use an interface gimple_build_cond_value as described
978 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
981 gimple_find_sub_bbs (gimple_seq seq
, gimple_stmt_iterator
*gsi
)
983 gimple
*stmt
= gsi_stmt (*gsi
);
984 basic_block bb
= gimple_bb (stmt
);
985 basic_block lastbb
, afterbb
;
986 int old_num_bbs
= n_basic_blocks_for_fn (cfun
);
988 lastbb
= make_blocks_1 (seq
, bb
);
989 if (old_num_bbs
== n_basic_blocks_for_fn (cfun
))
991 e
= split_block (bb
, stmt
);
992 /* Move e->dest to come after the new basic blocks. */
994 unlink_block (afterbb
);
995 link_block (afterbb
, lastbb
);
996 redirect_edge_succ (e
, bb
->next_bb
);
998 while (bb
!= afterbb
)
1000 struct omp_region
*cur_region
= NULL
;
1001 int cur_omp_region_idx
= 0;
1002 int mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
1003 gcc_assert (!mer
&& !cur_region
);
1004 add_bb_to_loop (bb
, afterbb
->loop_father
);
1010 /* Find the next available discriminator value for LOCUS. The
1011 discriminator distinguishes among several basic blocks that
1012 share a common locus, allowing for more accurate sample-based
1016 next_discriminator_for_locus (location_t locus
)
1018 struct locus_discrim_map item
;
1019 struct locus_discrim_map
**slot
;
1022 item
.discriminator
= 0;
1023 slot
= discriminator_per_locus
->find_slot_with_hash (
1024 &item
, LOCATION_LINE (locus
), INSERT
);
1026 if (*slot
== HTAB_EMPTY_ENTRY
)
1028 *slot
= XNEW (struct locus_discrim_map
);
1030 (*slot
)->locus
= locus
;
1031 (*slot
)->discriminator
= 0;
1033 (*slot
)->discriminator
++;
1034 return (*slot
)->discriminator
;
1037 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1040 same_line_p (location_t locus1
, location_t locus2
)
1042 expanded_location from
, to
;
1044 if (locus1
== locus2
)
1047 from
= expand_location (locus1
);
1048 to
= expand_location (locus2
);
1050 if (from
.line
!= to
.line
)
1052 if (from
.file
== to
.file
)
1054 return (from
.file
!= NULL
1056 && filename_cmp (from
.file
, to
.file
) == 0);
1059 /* Assign discriminators to each basic block. */
1062 assign_discriminators (void)
1066 FOR_EACH_BB_FN (bb
, cfun
)
1070 gimple
*last
= last_stmt (bb
);
1071 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
1073 if (locus
== UNKNOWN_LOCATION
)
1076 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1078 gimple
*first
= first_non_label_stmt (e
->dest
);
1079 gimple
*last
= last_stmt (e
->dest
);
1080 if ((first
&& same_line_p (locus
, gimple_location (first
)))
1081 || (last
&& same_line_p (locus
, gimple_location (last
))))
1083 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
1084 bb
->discriminator
= next_discriminator_for_locus (locus
);
1086 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
1092 /* Create the edges for a GIMPLE_COND starting at block BB. */
1095 make_cond_expr_edges (basic_block bb
)
1097 gcond
*entry
= as_a
<gcond
*> (last_stmt (bb
));
1098 gimple
*then_stmt
, *else_stmt
;
1099 basic_block then_bb
, else_bb
;
1100 tree then_label
, else_label
;
1104 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
1106 /* Entry basic blocks for each component. */
1107 then_label
= gimple_cond_true_label (entry
);
1108 else_label
= gimple_cond_false_label (entry
);
1109 then_bb
= label_to_block (then_label
);
1110 else_bb
= label_to_block (else_label
);
1111 then_stmt
= first_stmt (then_bb
);
1112 else_stmt
= first_stmt (else_bb
);
1114 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1115 e
->goto_locus
= gimple_location (then_stmt
);
1116 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1118 e
->goto_locus
= gimple_location (else_stmt
);
1120 /* We do not need the labels anymore. */
1121 gimple_cond_set_true_label (entry
, NULL_TREE
);
1122 gimple_cond_set_false_label (entry
, NULL_TREE
);
1126 /* Called for each element in the hash table (P) as we delete the
1127 edge to cases hash table.
1129 Clear all the CASE_CHAINs to prevent problems with copying of
1130 SWITCH_EXPRs and structure sharing rules, then free the hash table
1134 edge_to_cases_cleanup (edge
const &, tree
const &value
, void *)
1138 for (t
= value
; t
; t
= next
)
1140 next
= CASE_CHAIN (t
);
1141 CASE_CHAIN (t
) = NULL
;
1147 /* Start recording information mapping edges to case labels. */
1150 start_recording_case_labels (void)
1152 gcc_assert (edge_to_cases
== NULL
);
1153 edge_to_cases
= new hash_map
<edge
, tree
>;
1154 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
1157 /* Return nonzero if we are recording information for case labels. */
1160 recording_case_labels_p (void)
1162 return (edge_to_cases
!= NULL
);
1165 /* Stop recording information mapping edges to case labels and
1166 remove any information we have recorded. */
1168 end_recording_case_labels (void)
1172 edge_to_cases
->traverse
<void *, edge_to_cases_cleanup
> (NULL
);
1173 delete edge_to_cases
;
1174 edge_to_cases
= NULL
;
1175 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
1177 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
1180 gimple
*stmt
= last_stmt (bb
);
1181 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1182 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1185 BITMAP_FREE (touched_switch_bbs
);
1188 /* If we are inside a {start,end}_recording_cases block, then return
1189 a chain of CASE_LABEL_EXPRs from T which reference E.
1191 Otherwise return NULL. */
1194 get_cases_for_edge (edge e
, gswitch
*t
)
1199 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1200 chains available. Return NULL so the caller can detect this case. */
1201 if (!recording_case_labels_p ())
1204 slot
= edge_to_cases
->get (e
);
1208 /* If we did not find E in the hash table, then this must be the first
1209 time we have been queried for information about E & T. Add all the
1210 elements from T to the hash table then perform the query again. */
1212 n
= gimple_switch_num_labels (t
);
1213 for (i
= 0; i
< n
; i
++)
1215 tree elt
= gimple_switch_label (t
, i
);
1216 tree lab
= CASE_LABEL (elt
);
1217 basic_block label_bb
= label_to_block (lab
);
1218 edge this_edge
= find_edge (e
->src
, label_bb
);
1220 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1222 tree
&s
= edge_to_cases
->get_or_insert (this_edge
);
1223 CASE_CHAIN (elt
) = s
;
1227 return *edge_to_cases
->get (e
);
1230 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1233 make_gimple_switch_edges (gswitch
*entry
, basic_block bb
)
1237 n
= gimple_switch_num_labels (entry
);
1239 for (i
= 0; i
< n
; ++i
)
1241 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1242 basic_block label_bb
= label_to_block (lab
);
1243 make_edge (bb
, label_bb
, 0);
1248 /* Return the basic block holding label DEST. */
1251 label_to_block_fn (struct function
*ifun
, tree dest
)
1253 int uid
= LABEL_DECL_UID (dest
);
1255 /* We would die hard when faced by an undefined label. Emit a label to
1256 the very first basic block. This will hopefully make even the dataflow
1257 and undefined variable warnings quite right. */
1258 if (seen_error () && uid
< 0)
1260 gimple_stmt_iterator gsi
=
1261 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
));
1264 stmt
= gimple_build_label (dest
);
1265 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1266 uid
= LABEL_DECL_UID (dest
);
1268 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1270 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1273 /* Create edges for a goto statement at block BB. Returns true
1274 if abnormal edges should be created. */
1277 make_goto_expr_edges (basic_block bb
)
1279 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1280 gimple
*goto_t
= gsi_stmt (last
);
1282 /* A simple GOTO creates normal edges. */
1283 if (simple_goto_p (goto_t
))
1285 tree dest
= gimple_goto_dest (goto_t
);
1286 basic_block label_bb
= label_to_block (dest
);
1287 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1288 e
->goto_locus
= gimple_location (goto_t
);
1289 gsi_remove (&last
, true);
1293 /* A computed GOTO creates abnormal edges. */
1297 /* Create edges for an asm statement with labels at block BB. */
1300 make_gimple_asm_edges (basic_block bb
)
1302 gasm
*stmt
= as_a
<gasm
*> (last_stmt (bb
));
1303 int i
, n
= gimple_asm_nlabels (stmt
);
1305 for (i
= 0; i
< n
; ++i
)
1307 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1308 basic_block label_bb
= label_to_block (label
);
1309 make_edge (bb
, label_bb
, 0);
1313 /*---------------------------------------------------------------------------
1315 ---------------------------------------------------------------------------*/
1317 /* Cleanup useless labels in basic blocks. This is something we wish
1318 to do early because it allows us to group case labels before creating
1319 the edges for the CFG, and it speeds up block statement iterators in
1320 all passes later on.
1321 We rerun this pass after CFG is created, to get rid of the labels that
1322 are no longer referenced. After then we do not run it any more, since
1323 (almost) no new labels should be created. */
1325 /* A map from basic block index to the leading label of that block. */
1326 static struct label_record
1331 /* True if the label is referenced from somewhere. */
1335 /* Given LABEL return the first label in the same basic block. */
1338 main_block_label (tree label
)
1340 basic_block bb
= label_to_block (label
);
1341 tree main_label
= label_for_bb
[bb
->index
].label
;
1343 /* label_to_block possibly inserted undefined label into the chain. */
1346 label_for_bb
[bb
->index
].label
= label
;
1350 label_for_bb
[bb
->index
].used
= true;
1354 /* Clean up redundant labels within the exception tree. */
1357 cleanup_dead_labels_eh (void)
1364 if (cfun
->eh
== NULL
)
1367 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1368 if (lp
&& lp
->post_landing_pad
)
1370 lab
= main_block_label (lp
->post_landing_pad
);
1371 if (lab
!= lp
->post_landing_pad
)
1373 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1374 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1378 FOR_ALL_EH_REGION (r
)
1382 case ERT_MUST_NOT_THROW
:
1388 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1392 c
->label
= main_block_label (lab
);
1397 case ERT_ALLOWED_EXCEPTIONS
:
1398 lab
= r
->u
.allowed
.label
;
1400 r
->u
.allowed
.label
= main_block_label (lab
);
1406 /* Cleanup redundant labels. This is a three-step process:
1407 1) Find the leading label for each block.
1408 2) Redirect all references to labels to the leading labels.
1409 3) Cleanup all useless labels. */
1412 cleanup_dead_labels (void)
1415 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block_for_fn (cfun
));
1417 /* Find a suitable label for each block. We use the first user-defined
1418 label if there is one, or otherwise just the first label we see. */
1419 FOR_EACH_BB_FN (bb
, cfun
)
1421 gimple_stmt_iterator i
;
1423 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1426 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1431 label
= gimple_label_label (label_stmt
);
1433 /* If we have not yet seen a label for the current block,
1434 remember this one and see if there are more labels. */
1435 if (!label_for_bb
[bb
->index
].label
)
1437 label_for_bb
[bb
->index
].label
= label
;
1441 /* If we did see a label for the current block already, but it
1442 is an artificially created label, replace it if the current
1443 label is a user defined label. */
1444 if (!DECL_ARTIFICIAL (label
)
1445 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1447 label_for_bb
[bb
->index
].label
= label
;
1453 /* Now redirect all jumps/branches to the selected label.
1454 First do so for each block ending in a control statement. */
1455 FOR_EACH_BB_FN (bb
, cfun
)
1457 gimple
*stmt
= last_stmt (bb
);
1458 tree label
, new_label
;
1463 switch (gimple_code (stmt
))
1467 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
1468 label
= gimple_cond_true_label (cond_stmt
);
1471 new_label
= main_block_label (label
);
1472 if (new_label
!= label
)
1473 gimple_cond_set_true_label (cond_stmt
, new_label
);
1476 label
= gimple_cond_false_label (cond_stmt
);
1479 new_label
= main_block_label (label
);
1480 if (new_label
!= label
)
1481 gimple_cond_set_false_label (cond_stmt
, new_label
);
1488 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
1489 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
1491 /* Replace all destination labels. */
1492 for (i
= 0; i
< n
; ++i
)
1494 tree case_label
= gimple_switch_label (switch_stmt
, i
);
1495 label
= CASE_LABEL (case_label
);
1496 new_label
= main_block_label (label
);
1497 if (new_label
!= label
)
1498 CASE_LABEL (case_label
) = new_label
;
1505 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1506 int i
, n
= gimple_asm_nlabels (asm_stmt
);
1508 for (i
= 0; i
< n
; ++i
)
1510 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
1511 tree label
= main_block_label (TREE_VALUE (cons
));
1512 TREE_VALUE (cons
) = label
;
1517 /* We have to handle gotos until they're removed, and we don't
1518 remove them until after we've created the CFG edges. */
1520 if (!computed_goto_p (stmt
))
1522 ggoto
*goto_stmt
= as_a
<ggoto
*> (stmt
);
1523 label
= gimple_goto_dest (goto_stmt
);
1524 new_label
= main_block_label (label
);
1525 if (new_label
!= label
)
1526 gimple_goto_set_dest (goto_stmt
, new_label
);
1530 case GIMPLE_TRANSACTION
:
1532 gtransaction
*txn
= as_a
<gtransaction
*> (stmt
);
1534 label
= gimple_transaction_label_norm (txn
);
1537 new_label
= main_block_label (label
);
1538 if (new_label
!= label
)
1539 gimple_transaction_set_label_norm (txn
, new_label
);
1542 label
= gimple_transaction_label_uninst (txn
);
1545 new_label
= main_block_label (label
);
1546 if (new_label
!= label
)
1547 gimple_transaction_set_label_uninst (txn
, new_label
);
1550 label
= gimple_transaction_label_over (txn
);
1553 new_label
= main_block_label (label
);
1554 if (new_label
!= label
)
1555 gimple_transaction_set_label_over (txn
, new_label
);
1565 /* Do the same for the exception region tree labels. */
1566 cleanup_dead_labels_eh ();
1568 /* Finally, purge dead labels. All user-defined labels and labels that
1569 can be the target of non-local gotos and labels which have their
1570 address taken are preserved. */
1571 FOR_EACH_BB_FN (bb
, cfun
)
1573 gimple_stmt_iterator i
;
1574 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1576 if (!label_for_this_bb
)
1579 /* If the main label of the block is unused, we may still remove it. */
1580 if (!label_for_bb
[bb
->index
].used
)
1581 label_for_this_bb
= NULL
;
1583 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1586 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1591 label
= gimple_label_label (label_stmt
);
1593 if (label
== label_for_this_bb
1594 || !DECL_ARTIFICIAL (label
)
1595 || DECL_NONLOCAL (label
)
1596 || FORCED_LABEL (label
))
1599 gsi_remove (&i
, true);
1603 free (label_for_bb
);
1606 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1607 the ones jumping to the same label.
1608 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1611 group_case_labels_stmt (gswitch
*stmt
)
1613 int old_size
= gimple_switch_num_labels (stmt
);
1614 int i
, j
, new_size
= old_size
;
1615 basic_block default_bb
= NULL
;
1617 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1619 /* Look for possible opportunities to merge cases. */
1621 while (i
< old_size
)
1623 tree base_case
, base_high
;
1624 basic_block base_bb
;
1626 base_case
= gimple_switch_label (stmt
, i
);
1628 gcc_assert (base_case
);
1629 base_bb
= label_to_block (CASE_LABEL (base_case
));
1631 /* Discard cases that have the same destination as the
1633 if (base_bb
== default_bb
)
1635 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1641 base_high
= CASE_HIGH (base_case
)
1642 ? CASE_HIGH (base_case
)
1643 : CASE_LOW (base_case
);
1646 /* Try to merge case labels. Break out when we reach the end
1647 of the label vector or when we cannot merge the next case
1648 label with the current one. */
1649 while (i
< old_size
)
1651 tree merge_case
= gimple_switch_label (stmt
, i
);
1652 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1653 wide_int bhp1
= wi::add (base_high
, 1);
1655 /* Merge the cases if they jump to the same place,
1656 and their ranges are consecutive. */
1657 if (merge_bb
== base_bb
1658 && wi::eq_p (CASE_LOW (merge_case
), bhp1
))
1660 base_high
= CASE_HIGH (merge_case
) ?
1661 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1662 CASE_HIGH (base_case
) = base_high
;
1663 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1672 /* Compress the case labels in the label vector, and adjust the
1673 length of the vector. */
1674 for (i
= 0, j
= 0; i
< new_size
; i
++)
1676 while (! gimple_switch_label (stmt
, j
))
1678 gimple_switch_set_label (stmt
, i
,
1679 gimple_switch_label (stmt
, j
++));
1682 gcc_assert (new_size
<= old_size
);
1683 gimple_switch_set_num_labels (stmt
, new_size
);
1686 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1687 and scan the sorted vector of cases. Combine the ones jumping to the
1691 group_case_labels (void)
1695 FOR_EACH_BB_FN (bb
, cfun
)
1697 gimple
*stmt
= last_stmt (bb
);
1698 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1699 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1703 /* Checks whether we can merge block B into block A. */
1706 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1710 if (!single_succ_p (a
))
1713 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1716 if (single_succ (a
) != b
)
1719 if (!single_pred_p (b
))
1722 if (a
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
1723 || b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1726 /* If A ends by a statement causing exceptions or something similar, we
1727 cannot merge the blocks. */
1728 stmt
= last_stmt (a
);
1729 if (stmt
&& stmt_ends_bb_p (stmt
))
1732 /* Do not allow a block with only a non-local label to be merged. */
1734 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
1735 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
1738 /* Examine the labels at the beginning of B. */
1739 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);
1743 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1746 lab
= gimple_label_label (label_stmt
);
1748 /* Do not remove user forced labels or for -O0 any user labels. */
1749 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1753 /* Protect simple loop latches. We only want to avoid merging
1754 the latch with the loop header or with a block in another
1755 loop in this case. */
1757 && b
->loop_father
->latch
== b
1758 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
)
1759 && (b
->loop_father
->header
== a
1760 || b
->loop_father
!= a
->loop_father
))
1763 /* It must be possible to eliminate all phi nodes in B. If ssa form
1764 is not up-to-date and a name-mapping is registered, we cannot eliminate
1765 any phis. Symbols marked for renaming are never a problem though. */
1766 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);
1769 gphi
*phi
= gsi
.phi ();
1770 /* Technically only new names matter. */
1771 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1775 /* When not optimizing, don't merge if we'd lose goto_locus. */
1777 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1779 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1780 gimple_stmt_iterator prev
, next
;
1781 prev
= gsi_last_nondebug_bb (a
);
1782 next
= gsi_after_labels (b
);
1783 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1784 gsi_next_nondebug (&next
);
1785 if ((gsi_end_p (prev
)
1786 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1787 && (gsi_end_p (next
)
1788 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1795 /* Replaces all uses of NAME by VAL. */
1798 replace_uses_by (tree name
, tree val
)
1800 imm_use_iterator imm_iter
;
1805 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1807 /* Mark the block if we change the last stmt in it. */
1808 if (cfgcleanup_altered_bbs
1809 && stmt_ends_bb_p (stmt
))
1810 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1812 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1814 replace_exp (use
, val
);
1816 if (gimple_code (stmt
) == GIMPLE_PHI
)
1818 e
= gimple_phi_arg_edge (as_a
<gphi
*> (stmt
),
1819 PHI_ARG_INDEX_FROM_USE (use
));
1820 if (e
->flags
& EDGE_ABNORMAL
1821 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
))
1823 /* This can only occur for virtual operands, since
1824 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1825 would prevent replacement. */
1826 gcc_checking_assert (virtual_operand_p (name
));
1827 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1832 if (gimple_code (stmt
) != GIMPLE_PHI
)
1834 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1835 gimple
*orig_stmt
= stmt
;
1838 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1839 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1840 only change sth from non-invariant to invariant, and only
1841 when propagating constants. */
1842 if (is_gimple_min_invariant (val
))
1843 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1845 tree op
= gimple_op (stmt
, i
);
1846 /* Operands may be empty here. For example, the labels
1847 of a GIMPLE_COND are nulled out following the creation
1848 of the corresponding CFG edges. */
1849 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
1850 recompute_tree_invariant_for_addr_expr (op
);
1853 if (fold_stmt (&gsi
))
1854 stmt
= gsi_stmt (gsi
);
1856 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
1857 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
1863 gcc_checking_assert (has_zero_uses (name
));
1865 /* Also update the trees stored in loop structures. */
1870 FOR_EACH_LOOP (loop
, 0)
1872 substitute_in_loop_info (loop
, name
, val
);
1877 /* Merge block B into block A. */
1880 gimple_merge_blocks (basic_block a
, basic_block b
)
1882 gimple_stmt_iterator last
, gsi
;
1886 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1888 /* Remove all single-valued PHI nodes from block B of the form
1889 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1890 gsi
= gsi_last_bb (a
);
1891 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
1893 gimple
*phi
= gsi_stmt (psi
);
1894 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
1896 bool may_replace_uses
= (virtual_operand_p (def
)
1897 || may_propagate_copy (def
, use
));
1899 /* In case we maintain loop closed ssa form, do not propagate arguments
1900 of loop exit phi nodes. */
1902 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
1903 && !virtual_operand_p (def
)
1904 && TREE_CODE (use
) == SSA_NAME
1905 && a
->loop_father
!= b
->loop_father
)
1906 may_replace_uses
= false;
1908 if (!may_replace_uses
)
1910 gcc_assert (!virtual_operand_p (def
));
1912 /* Note that just emitting the copies is fine -- there is no problem
1913 with ordering of phi nodes. This is because A is the single
1914 predecessor of B, therefore results of the phi nodes cannot
1915 appear as arguments of the phi nodes. */
1916 copy
= gimple_build_assign (def
, use
);
1917 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
1918 remove_phi_node (&psi
, false);
1922 /* If we deal with a PHI for virtual operands, we can simply
1923 propagate these without fussing with folding or updating
1925 if (virtual_operand_p (def
))
1927 imm_use_iterator iter
;
1928 use_operand_p use_p
;
1931 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
1932 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1933 SET_USE (use_p
, use
);
1935 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
1936 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
1939 replace_uses_by (def
, use
);
1941 remove_phi_node (&psi
, true);
1945 /* Ensure that B follows A. */
1946 move_block_after (b
, a
);
1948 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
1949 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1951 /* Remove labels from B and set gimple_bb to A for other statements. */
1952 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
1954 gimple
*stmt
= gsi_stmt (gsi
);
1955 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
1957 tree label
= gimple_label_label (label_stmt
);
1960 gsi_remove (&gsi
, false);
1962 /* Now that we can thread computed gotos, we might have
1963 a situation where we have a forced label in block B
1964 However, the label at the start of block B might still be
1965 used in other ways (think about the runtime checking for
1966 Fortran assigned gotos). So we can not just delete the
1967 label. Instead we move the label to the start of block A. */
1968 if (FORCED_LABEL (label
))
1970 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
1971 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
1973 /* Other user labels keep around in a form of a debug stmt. */
1974 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_STMTS
)
1976 gimple
*dbg
= gimple_build_debug_bind (label
,
1979 gimple_debug_bind_reset_value (dbg
);
1980 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
1983 lp_nr
= EH_LANDING_PAD_NR (label
);
1986 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
1987 lp
->post_landing_pad
= NULL
;
1992 gimple_set_bb (stmt
, a
);
1997 /* When merging two BBs, if their counts are different, the larger count
1998 is selected as the new bb count. This is to handle inconsistent
2000 if (a
->loop_father
== b
->loop_father
)
2002 a
->count
= MAX (a
->count
, b
->count
);
2003 a
->frequency
= MAX (a
->frequency
, b
->frequency
);
2006 /* Merge the sequences. */
2007 last
= gsi_last_bb (a
);
2008 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
2009 set_bb_seq (b
, NULL
);
2011 if (cfgcleanup_altered_bbs
)
2012 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
2016 /* Return the one of two successors of BB that is not reachable by a
2017 complex edge, if there is one. Else, return BB. We use
2018 this in optimizations that use post-dominators for their heuristics,
2019 to catch the cases in C++ where function calls are involved. */
2022 single_noncomplex_succ (basic_block bb
)
2025 if (EDGE_COUNT (bb
->succs
) != 2)
2028 e0
= EDGE_SUCC (bb
, 0);
2029 e1
= EDGE_SUCC (bb
, 1);
2030 if (e0
->flags
& EDGE_COMPLEX
)
2032 if (e1
->flags
& EDGE_COMPLEX
)
2038 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2041 notice_special_calls (gcall
*call
)
2043 int flags
= gimple_call_flags (call
);
2045 if (flags
& ECF_MAY_BE_ALLOCA
)
2046 cfun
->calls_alloca
= true;
2047 if (flags
& ECF_RETURNS_TWICE
)
2048 cfun
->calls_setjmp
= true;
2052 /* Clear flags set by notice_special_calls. Used by dead code removal
2053 to update the flags. */
2056 clear_special_calls (void)
2058 cfun
->calls_alloca
= false;
2059 cfun
->calls_setjmp
= false;
2062 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2065 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
2067 /* Since this block is no longer reachable, we can just delete all
2068 of its PHI nodes. */
2069 remove_phi_nodes (bb
);
2071 /* Remove edges to BB's successors. */
2072 while (EDGE_COUNT (bb
->succs
) > 0)
2073 remove_edge (EDGE_SUCC (bb
, 0));
2077 /* Remove statements of basic block BB. */
2080 remove_bb (basic_block bb
)
2082 gimple_stmt_iterator i
;
2086 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
2087 if (dump_flags
& TDF_DETAILS
)
2089 dump_bb (dump_file
, bb
, 0, TDF_BLOCKS
);
2090 fprintf (dump_file
, "\n");
2096 struct loop
*loop
= bb
->loop_father
;
2098 /* If a loop gets removed, clean up the information associated
2100 if (loop
->latch
== bb
2101 || loop
->header
== bb
)
2102 free_numbers_of_iterations_estimates_loop (loop
);
2105 /* Remove all the instructions in the block. */
2106 if (bb_seq (bb
) != NULL
)
2108 /* Walk backwards so as to get a chance to substitute all
2109 released DEFs into debug stmts. See
2110 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2112 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
2114 gimple
*stmt
= gsi_stmt (i
);
2115 glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
);
2117 && (FORCED_LABEL (gimple_label_label (label_stmt
))
2118 || DECL_NONLOCAL (gimple_label_label (label_stmt
))))
2121 gimple_stmt_iterator new_gsi
;
2123 /* A non-reachable non-local label may still be referenced.
2124 But it no longer needs to carry the extra semantics of
2126 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
2128 DECL_NONLOCAL (gimple_label_label (label_stmt
)) = 0;
2129 FORCED_LABEL (gimple_label_label (label_stmt
)) = 1;
2132 new_bb
= bb
->prev_bb
;
2133 new_gsi
= gsi_start_bb (new_bb
);
2134 gsi_remove (&i
, false);
2135 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
2139 /* Release SSA definitions. */
2140 release_defs (stmt
);
2141 gsi_remove (&i
, true);
2145 i
= gsi_last_bb (bb
);
2151 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2152 bb
->il
.gimple
.seq
= NULL
;
2153 bb
->il
.gimple
.phi_nodes
= NULL
;
2157 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2158 predicate VAL, return the edge that will be taken out of the block.
2159 If VAL does not match a unique edge, NULL is returned. */
2162 find_taken_edge (basic_block bb
, tree val
)
2166 stmt
= last_stmt (bb
);
2169 gcc_assert (is_ctrl_stmt (stmt
));
2174 if (!is_gimple_min_invariant (val
))
2177 if (gimple_code (stmt
) == GIMPLE_COND
)
2178 return find_taken_edge_cond_expr (bb
, val
);
2180 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
2181 return find_taken_edge_switch_expr (as_a
<gswitch
*> (stmt
), bb
, val
);
2183 if (computed_goto_p (stmt
))
2185 /* Only optimize if the argument is a label, if the argument is
2186 not a label then we can not construct a proper CFG.
2188 It may be the case that we only need to allow the LABEL_REF to
2189 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2190 appear inside a LABEL_EXPR just to be safe. */
2191 if ((TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
2192 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
2193 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
2200 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2201 statement, determine which of the outgoing edges will be taken out of the
2202 block. Return NULL if either edge may be taken. */
2205 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2210 dest
= label_to_block (val
);
2213 e
= find_edge (bb
, dest
);
2214 gcc_assert (e
!= NULL
);
2220 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2221 statement, determine which of the two edges will be taken out of the
2222 block. Return NULL if either edge may be taken. */
2225 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2227 edge true_edge
, false_edge
;
2229 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2231 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2232 return (integer_zerop (val
) ? false_edge
: true_edge
);
2235 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2236 statement, determine which edge will be taken out of the block. Return
2237 NULL if any edge may be taken. */
2240 find_taken_edge_switch_expr (gswitch
*switch_stmt
, basic_block bb
,
2243 basic_block dest_bb
;
2247 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2248 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2250 e
= find_edge (bb
, dest_bb
);
2256 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2257 We can make optimal use here of the fact that the case labels are
2258 sorted: We can do a binary search for a case matching VAL. */
2261 find_case_label_for_value (gswitch
*switch_stmt
, tree val
)
2263 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2264 tree default_case
= gimple_switch_default_label (switch_stmt
);
2266 for (low
= 0, high
= n
; high
- low
> 1; )
2268 size_t i
= (high
+ low
) / 2;
2269 tree t
= gimple_switch_label (switch_stmt
, i
);
2272 /* Cache the result of comparing CASE_LOW and val. */
2273 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2280 if (CASE_HIGH (t
) == NULL
)
2282 /* A singe-valued case label. */
2288 /* A case range. We can only handle integer ranges. */
2289 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2294 return default_case
;
2298 /* Dump a basic block on stderr. */
2301 gimple_debug_bb (basic_block bb
)
2303 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2307 /* Dump basic block with index N on stderr. */
2310 gimple_debug_bb_n (int n
)
2312 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun
, n
));
2313 return BASIC_BLOCK_FOR_FN (cfun
, n
);
2317 /* Dump the CFG on stderr.
2319 FLAGS are the same used by the tree dumping functions
2320 (see TDF_* in dumpfile.h). */
2323 gimple_debug_cfg (int flags
)
2325 gimple_dump_cfg (stderr
, flags
);
2329 /* Dump the program showing basic block boundaries on the given FILE.
2331 FLAGS are the same used by the tree dumping functions (see TDF_* in
2335 gimple_dump_cfg (FILE *file
, int flags
)
2337 if (flags
& TDF_DETAILS
)
2339 dump_function_header (file
, current_function_decl
, flags
);
2340 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2341 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2342 last_basic_block_for_fn (cfun
));
2344 brief_dump_cfg (file
, flags
| TDF_COMMENT
);
2345 fprintf (file
, "\n");
2348 if (flags
& TDF_STATS
)
2349 dump_cfg_stats (file
);
2351 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2355 /* Dump CFG statistics on FILE. */
2358 dump_cfg_stats (FILE *file
)
2360 static long max_num_merged_labels
= 0;
2361 unsigned long size
, total
= 0;
2364 const char * const fmt_str
= "%-30s%-13s%12s\n";
2365 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2366 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2367 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2368 const char *funcname
= current_function_name ();
2370 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2372 fprintf (file
, "---------------------------------------------------------\n");
2373 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2374 fprintf (file
, fmt_str
, "", " instances ", "used ");
2375 fprintf (file
, "---------------------------------------------------------\n");
2377 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2379 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2380 SCALE (size
), LABEL (size
));
2383 FOR_EACH_BB_FN (bb
, cfun
)
2384 num_edges
+= EDGE_COUNT (bb
->succs
);
2385 size
= num_edges
* sizeof (struct edge_def
);
2387 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2389 fprintf (file
, "---------------------------------------------------------\n");
2390 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2392 fprintf (file
, "---------------------------------------------------------\n");
2393 fprintf (file
, "\n");
2395 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2396 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2398 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2399 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2401 fprintf (file
, "\n");
2405 /* Dump CFG statistics on stderr. Keep extern so that it's always
2406 linked in the final executable. */
2409 debug_cfg_stats (void)
2411 dump_cfg_stats (stderr
);
2414 /*---------------------------------------------------------------------------
2415 Miscellaneous helpers
2416 ---------------------------------------------------------------------------*/
2418 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2419 flow. Transfers of control flow associated with EH are excluded. */
2422 call_can_make_abnormal_goto (gimple
*t
)
2424 /* If the function has no non-local labels, then a call cannot make an
2425 abnormal transfer of control. */
2426 if (!cfun
->has_nonlocal_label
2427 && !cfun
->calls_setjmp
)
2430 /* Likewise if the call has no side effects. */
2431 if (!gimple_has_side_effects (t
))
2434 /* Likewise if the called function is leaf. */
2435 if (gimple_call_flags (t
) & ECF_LEAF
)
2442 /* Return true if T can make an abnormal transfer of control flow.
2443 Transfers of control flow associated with EH are excluded. */
2446 stmt_can_make_abnormal_goto (gimple
*t
)
2448 if (computed_goto_p (t
))
2450 if (is_gimple_call (t
))
2451 return call_can_make_abnormal_goto (t
);
2456 /* Return true if T represents a stmt that always transfers control. */
2459 is_ctrl_stmt (gimple
*t
)
2461 switch (gimple_code (t
))
2475 /* Return true if T is a statement that may alter the flow of control
2476 (e.g., a call to a non-returning function). */
2479 is_ctrl_altering_stmt (gimple
*t
)
2483 switch (gimple_code (t
))
2486 /* Per stmt call flag indicates whether the call could alter
2488 if (gimple_call_ctrl_altering_p (t
))
2492 case GIMPLE_EH_DISPATCH
:
2493 /* EH_DISPATCH branches to the individual catch handlers at
2494 this level of a try or allowed-exceptions region. It can
2495 fallthru to the next statement as well. */
2499 if (gimple_asm_nlabels (as_a
<gasm
*> (t
)) > 0)
2504 /* OpenMP directives alter control flow. */
2507 case GIMPLE_TRANSACTION
:
2508 /* A transaction start alters control flow. */
2515 /* If a statement can throw, it alters control flow. */
2516 return stmt_can_throw_internal (t
);
2520 /* Return true if T is a simple local goto. */
2523 simple_goto_p (gimple
*t
)
2525 return (gimple_code (t
) == GIMPLE_GOTO
2526 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2530 /* Return true if STMT should start a new basic block. PREV_STMT is
2531 the statement preceding STMT. It is used when STMT is a label or a
2532 case label. Labels should only start a new basic block if their
2533 previous statement wasn't a label. Otherwise, sequence of labels
2534 would generate unnecessary basic blocks that only contain a single
2538 stmt_starts_bb_p (gimple
*stmt
, gimple
*prev_stmt
)
2543 /* Labels start a new basic block only if the preceding statement
2544 wasn't a label of the same type. This prevents the creation of
2545 consecutive blocks that have nothing but a single label. */
2546 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2548 /* Nonlocal and computed GOTO targets always start a new block. */
2549 if (DECL_NONLOCAL (gimple_label_label (label_stmt
))
2550 || FORCED_LABEL (gimple_label_label (label_stmt
)))
2553 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2555 if (DECL_NONLOCAL (gimple_label_label (
2556 as_a
<glabel
*> (prev_stmt
))))
2559 cfg_stats
.num_merged_labels
++;
2565 else if (gimple_code (stmt
) == GIMPLE_CALL
2566 && gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2567 /* setjmp acts similar to a nonlocal GOTO target and thus should
2568 start a new block. */
2575 /* Return true if T should end a basic block. */
2578 stmt_ends_bb_p (gimple
*t
)
2580 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2583 /* Remove block annotations and other data structures. */
2586 delete_tree_cfg_annotations (struct function
*fn
)
2588 vec_free (label_to_block_map_for_fn (fn
));
2591 /* Return the virtual phi in BB. */
2594 get_virtual_phi (basic_block bb
)
2596 for (gphi_iterator gsi
= gsi_start_phis (bb
);
2600 gphi
*phi
= gsi
.phi ();
2602 if (virtual_operand_p (PHI_RESULT (phi
)))
2609 /* Return the first statement in basic block BB. */
2612 first_stmt (basic_block bb
)
2614 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2615 gimple
*stmt
= NULL
;
2617 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2625 /* Return the first non-label statement in basic block BB. */
2628 first_non_label_stmt (basic_block bb
)
2630 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2631 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2633 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2636 /* Return the last statement in basic block BB. */
2639 last_stmt (basic_block bb
)
2641 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2642 gimple
*stmt
= NULL
;
2644 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2652 /* Return the last statement of an otherwise empty block. Return NULL
2653 if the block is totally empty, or if it contains more than one
2657 last_and_only_stmt (basic_block bb
)
2659 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2660 gimple
*last
, *prev
;
2665 last
= gsi_stmt (i
);
2666 gsi_prev_nondebug (&i
);
2670 /* Empty statements should no longer appear in the instruction stream.
2671 Everything that might have appeared before should be deleted by
2672 remove_useless_stmts, and the optimizers should just gsi_remove
2673 instead of smashing with build_empty_stmt.
2675 Thus the only thing that should appear here in a block containing
2676 one executable statement is a label. */
2677 prev
= gsi_stmt (i
);
2678 if (gimple_code (prev
) == GIMPLE_LABEL
)
2684 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2687 reinstall_phi_args (edge new_edge
, edge old_edge
)
2693 vec
<edge_var_map
> *v
= redirect_edge_var_map_vector (old_edge
);
2697 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2698 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2699 i
++, gsi_next (&phis
))
2701 gphi
*phi
= phis
.phi ();
2702 tree result
= redirect_edge_var_map_result (vm
);
2703 tree arg
= redirect_edge_var_map_def (vm
);
2705 gcc_assert (result
== gimple_phi_result (phi
));
2707 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2710 redirect_edge_var_map_clear (old_edge
);
2713 /* Returns the basic block after which the new basic block created
2714 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2715 near its "logical" location. This is of most help to humans looking
2716 at debugging dumps. */
2719 split_edge_bb_loc (edge edge_in
)
2721 basic_block dest
= edge_in
->dest
;
2722 basic_block dest_prev
= dest
->prev_bb
;
2726 edge e
= find_edge (dest_prev
, dest
);
2727 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2728 return edge_in
->src
;
2733 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2734 Abort on abnormal edges. */
2737 gimple_split_edge (edge edge_in
)
2739 basic_block new_bb
, after_bb
, dest
;
2742 /* Abnormal edges cannot be split. */
2743 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2745 dest
= edge_in
->dest
;
2747 after_bb
= split_edge_bb_loc (edge_in
);
2749 new_bb
= create_empty_bb (after_bb
);
2750 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
2751 new_bb
->count
= edge_in
->count
;
2752 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2753 new_edge
->probability
= REG_BR_PROB_BASE
;
2754 new_edge
->count
= edge_in
->count
;
2756 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2757 gcc_assert (e
== edge_in
);
2758 reinstall_phi_args (new_edge
, e
);
2764 /* Verify properties of the address expression T with base object BASE. */
2767 verify_address (tree t
, tree base
)
2770 bool old_side_effects
;
2772 bool new_side_effects
;
2774 old_constant
= TREE_CONSTANT (t
);
2775 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2777 recompute_tree_invariant_for_addr_expr (t
);
2778 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2779 new_constant
= TREE_CONSTANT (t
);
2781 if (old_constant
!= new_constant
)
2783 error ("constant not recomputed when ADDR_EXPR changed");
2786 if (old_side_effects
!= new_side_effects
)
2788 error ("side effects not recomputed when ADDR_EXPR changed");
2792 if (!(TREE_CODE (base
) == VAR_DECL
2793 || TREE_CODE (base
) == PARM_DECL
2794 || TREE_CODE (base
) == RESULT_DECL
))
2797 if (DECL_GIMPLE_REG_P (base
))
2799 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2806 /* Callback for walk_tree, check that all elements with address taken are
2807 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2808 inside a PHI node. */
2811 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2818 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2819 #define CHECK_OP(N, MSG) \
2820 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2821 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2823 switch (TREE_CODE (t
))
2826 if (SSA_NAME_IN_FREE_LIST (t
))
2828 error ("SSA name in freelist but still referenced");
2837 tree context
= decl_function_context (t
);
2838 if (context
!= cfun
->decl
2839 && !SCOPE_FILE_SCOPE_P (context
)
2841 && !DECL_EXTERNAL (t
))
2843 error ("Local declaration from a different function");
2850 error ("INDIRECT_REF in gimple IL");
2854 x
= TREE_OPERAND (t
, 0);
2855 if (!POINTER_TYPE_P (TREE_TYPE (x
))
2856 || !is_gimple_mem_ref_addr (x
))
2858 error ("invalid first operand of MEM_REF");
2861 if (TREE_CODE (TREE_OPERAND (t
, 1)) != INTEGER_CST
2862 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
2864 error ("invalid offset operand of MEM_REF");
2865 return TREE_OPERAND (t
, 1);
2867 if (TREE_CODE (x
) == ADDR_EXPR
)
2869 tree va
= verify_address (x
, TREE_OPERAND (x
, 0));
2872 x
= TREE_OPERAND (x
, 0);
2874 walk_tree (&x
, verify_expr
, data
, NULL
);
2879 x
= fold (ASSERT_EXPR_COND (t
));
2880 if (x
== boolean_false_node
)
2882 error ("ASSERT_EXPR with an always-false condition");
2888 error ("MODIFY_EXPR not expected while having tuples");
2895 gcc_assert (is_gimple_address (t
));
2897 /* Skip any references (they will be checked when we recurse down the
2898 tree) and ensure that any variable used as a prefix is marked
2900 for (x
= TREE_OPERAND (t
, 0);
2901 handled_component_p (x
);
2902 x
= TREE_OPERAND (x
, 0))
2905 if ((tem
= verify_address (t
, x
)))
2908 if (!(TREE_CODE (x
) == VAR_DECL
2909 || TREE_CODE (x
) == PARM_DECL
2910 || TREE_CODE (x
) == RESULT_DECL
))
2913 if (!TREE_ADDRESSABLE (x
))
2915 error ("address taken, but ADDRESSABLE bit not set");
2923 x
= COND_EXPR_COND (t
);
2924 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
2926 error ("non-integral used in condition");
2929 if (!is_gimple_condexpr (x
))
2931 error ("invalid conditional operand");
2936 case NON_LVALUE_EXPR
:
2937 case TRUTH_NOT_EXPR
:
2941 case FIX_TRUNC_EXPR
:
2946 CHECK_OP (0, "invalid operand to unary operator");
2952 if (!is_gimple_reg_type (TREE_TYPE (t
)))
2954 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2958 if (TREE_CODE (t
) == BIT_FIELD_REF
)
2960 tree t0
= TREE_OPERAND (t
, 0);
2961 tree t1
= TREE_OPERAND (t
, 1);
2962 tree t2
= TREE_OPERAND (t
, 2);
2963 if (!tree_fits_uhwi_p (t1
)
2964 || !tree_fits_uhwi_p (t2
))
2966 error ("invalid position or size operand to BIT_FIELD_REF");
2969 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
2970 && (TYPE_PRECISION (TREE_TYPE (t
))
2971 != tree_to_uhwi (t1
)))
2973 error ("integral result type precision does not match "
2974 "field size of BIT_FIELD_REF");
2977 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
2978 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
2979 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t
)))
2980 != tree_to_uhwi (t1
)))
2982 error ("mode size of non-integral result does not "
2983 "match field size of BIT_FIELD_REF");
2986 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0
))
2987 && (tree_to_uhwi (t1
) + tree_to_uhwi (t2
)
2988 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0
)))))
2990 error ("position plus size exceeds size of referenced object in "
2995 t
= TREE_OPERAND (t
, 0);
3000 case ARRAY_RANGE_REF
:
3001 case VIEW_CONVERT_EXPR
:
3002 /* We have a nest of references. Verify that each of the operands
3003 that determine where to reference is either a constant or a variable,
3004 verify that the base is valid, and then show we've already checked
3006 while (handled_component_p (t
))
3008 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
3009 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3010 else if (TREE_CODE (t
) == ARRAY_REF
3011 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
3013 CHECK_OP (1, "invalid array index");
3014 if (TREE_OPERAND (t
, 2))
3015 CHECK_OP (2, "invalid array lower bound");
3016 if (TREE_OPERAND (t
, 3))
3017 CHECK_OP (3, "invalid array stride");
3019 else if (TREE_CODE (t
) == BIT_FIELD_REF
3020 || TREE_CODE (t
) == REALPART_EXPR
3021 || TREE_CODE (t
) == IMAGPART_EXPR
)
3023 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3028 t
= TREE_OPERAND (t
, 0);
3031 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
3033 error ("invalid reference prefix");
3036 walk_tree (&t
, verify_expr
, data
, NULL
);
3041 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3042 POINTER_PLUS_EXPR. */
3043 if (POINTER_TYPE_P (TREE_TYPE (t
)))
3045 error ("invalid operand to plus/minus, type is a pointer");
3048 CHECK_OP (0, "invalid operand to binary operator");
3049 CHECK_OP (1, "invalid operand to binary operator");
3052 case POINTER_PLUS_EXPR
:
3053 /* Check to make sure the first operand is a pointer or reference type. */
3054 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
3056 error ("invalid operand to pointer plus, first operand is not a pointer");
3059 /* Check to make sure the second operand is a ptrofftype. */
3060 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
3062 error ("invalid operand to pointer plus, second operand is not an "
3063 "integer type of appropriate width");
3073 case UNORDERED_EXPR
:
3082 case TRUNC_DIV_EXPR
:
3084 case FLOOR_DIV_EXPR
:
3085 case ROUND_DIV_EXPR
:
3086 case TRUNC_MOD_EXPR
:
3088 case FLOOR_MOD_EXPR
:
3089 case ROUND_MOD_EXPR
:
3091 case EXACT_DIV_EXPR
:
3101 CHECK_OP (0, "invalid operand to binary operator");
3102 CHECK_OP (1, "invalid operand to binary operator");
3106 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
3110 case CASE_LABEL_EXPR
:
3113 error ("invalid CASE_CHAIN");
3127 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3128 Returns true if there is an error, otherwise false. */
3131 verify_types_in_gimple_min_lval (tree expr
)
3135 if (is_gimple_id (expr
))
3138 if (TREE_CODE (expr
) != TARGET_MEM_REF
3139 && TREE_CODE (expr
) != MEM_REF
)
3141 error ("invalid expression for min lvalue");
3145 /* TARGET_MEM_REFs are strange beasts. */
3146 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3149 op
= TREE_OPERAND (expr
, 0);
3150 if (!is_gimple_val (op
))
3152 error ("invalid operand in indirect reference");
3153 debug_generic_stmt (op
);
3156 /* Memory references now generally can involve a value conversion. */
3161 /* Verify if EXPR is a valid GIMPLE reference expression. If
3162 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3163 if there is an error, otherwise false. */
3166 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
3168 while (handled_component_p (expr
))
3170 tree op
= TREE_OPERAND (expr
, 0);
3172 if (TREE_CODE (expr
) == ARRAY_REF
3173 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
3175 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
3176 || (TREE_OPERAND (expr
, 2)
3177 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3178 || (TREE_OPERAND (expr
, 3)
3179 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
3181 error ("invalid operands to array reference");
3182 debug_generic_stmt (expr
);
3187 /* Verify if the reference array element types are compatible. */
3188 if (TREE_CODE (expr
) == ARRAY_REF
3189 && !useless_type_conversion_p (TREE_TYPE (expr
),
3190 TREE_TYPE (TREE_TYPE (op
))))
3192 error ("type mismatch in array reference");
3193 debug_generic_stmt (TREE_TYPE (expr
));
3194 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3197 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
3198 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
3199 TREE_TYPE (TREE_TYPE (op
))))
3201 error ("type mismatch in array range reference");
3202 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
3203 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3207 if ((TREE_CODE (expr
) == REALPART_EXPR
3208 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3209 && !useless_type_conversion_p (TREE_TYPE (expr
),
3210 TREE_TYPE (TREE_TYPE (op
))))
3212 error ("type mismatch in real/imagpart reference");
3213 debug_generic_stmt (TREE_TYPE (expr
));
3214 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3218 if (TREE_CODE (expr
) == COMPONENT_REF
3219 && !useless_type_conversion_p (TREE_TYPE (expr
),
3220 TREE_TYPE (TREE_OPERAND (expr
, 1))))
3222 error ("type mismatch in component reference");
3223 debug_generic_stmt (TREE_TYPE (expr
));
3224 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
3228 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3230 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3231 that their operand is not an SSA name or an invariant when
3232 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3233 bug). Otherwise there is nothing to verify, gross mismatches at
3234 most invoke undefined behavior. */
3236 && (TREE_CODE (op
) == SSA_NAME
3237 || is_gimple_min_invariant (op
)))
3239 error ("conversion of an SSA_NAME on the left hand side");
3240 debug_generic_stmt (expr
);
3243 else if (TREE_CODE (op
) == SSA_NAME
3244 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3246 error ("conversion of register to a different size");
3247 debug_generic_stmt (expr
);
3250 else if (!handled_component_p (op
))
3257 if (TREE_CODE (expr
) == MEM_REF
)
3259 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
3261 error ("invalid address operand in MEM_REF");
3262 debug_generic_stmt (expr
);
3265 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
3266 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3268 error ("invalid offset operand in MEM_REF");
3269 debug_generic_stmt (expr
);
3273 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3275 if (!TMR_BASE (expr
)
3276 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3278 error ("invalid address operand in TARGET_MEM_REF");
3281 if (!TMR_OFFSET (expr
)
3282 || TREE_CODE (TMR_OFFSET (expr
)) != INTEGER_CST
3283 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3285 error ("invalid offset operand in TARGET_MEM_REF");
3286 debug_generic_stmt (expr
);
3291 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3292 && verify_types_in_gimple_min_lval (expr
));
3295 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3296 list of pointer-to types that is trivially convertible to DEST. */
3299 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3303 if (!TYPE_POINTER_TO (src_obj
))
3306 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3307 if (useless_type_conversion_p (dest
, src
))
3313 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3314 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3317 valid_fixed_convert_types_p (tree type1
, tree type2
)
3319 return (FIXED_POINT_TYPE_P (type1
)
3320 && (INTEGRAL_TYPE_P (type2
)
3321 || SCALAR_FLOAT_TYPE_P (type2
)
3322 || FIXED_POINT_TYPE_P (type2
)));
3325 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3326 is a problem, otherwise false. */
3329 verify_gimple_call (gcall
*stmt
)
3331 tree fn
= gimple_call_fn (stmt
);
3332 tree fntype
, fndecl
;
3335 if (gimple_call_internal_p (stmt
))
3339 error ("gimple call has two targets");
3340 debug_generic_stmt (fn
);
3348 error ("gimple call has no target");
3353 if (fn
&& !is_gimple_call_addr (fn
))
3355 error ("invalid function in gimple call");
3356 debug_generic_stmt (fn
);
3361 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3362 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3363 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3365 error ("non-function in gimple call");
3369 fndecl
= gimple_call_fndecl (stmt
);
3371 && TREE_CODE (fndecl
) == FUNCTION_DECL
3372 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3373 && !DECL_PURE_P (fndecl
)
3374 && !TREE_READONLY (fndecl
))
3376 error ("invalid pure const state for function");
3380 tree lhs
= gimple_call_lhs (stmt
);
3382 && (!is_gimple_lvalue (lhs
)
3383 || verify_types_in_gimple_reference (lhs
, true)))
3385 error ("invalid LHS in gimple call");
3389 if (gimple_call_ctrl_altering_p (stmt
)
3390 && gimple_call_noreturn_p (stmt
)
3391 && should_remove_lhs_p (lhs
))
3393 error ("LHS in noreturn call");
3397 fntype
= gimple_call_fntype (stmt
);
3400 && !useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (fntype
))
3401 /* ??? At least C++ misses conversions at assignments from
3402 void * call results.
3403 ??? Java is completely off. Especially with functions
3404 returning java.lang.Object.
3405 For now simply allow arbitrary pointer type conversions. */
3406 && !(POINTER_TYPE_P (TREE_TYPE (lhs
))
3407 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3409 error ("invalid conversion in gimple call");
3410 debug_generic_stmt (TREE_TYPE (lhs
));
3411 debug_generic_stmt (TREE_TYPE (fntype
));
3415 if (gimple_call_chain (stmt
)
3416 && !is_gimple_val (gimple_call_chain (stmt
)))
3418 error ("invalid static chain in gimple call");
3419 debug_generic_stmt (gimple_call_chain (stmt
));
3423 /* If there is a static chain argument, the call should either be
3424 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3425 if (gimple_call_chain (stmt
)
3427 && !DECL_STATIC_CHAIN (fndecl
))
3429 error ("static chain with function that doesn%'t use one");
3433 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3435 switch (DECL_FUNCTION_CODE (fndecl
))
3437 case BUILT_IN_UNREACHABLE
:
3439 if (gimple_call_num_args (stmt
) > 0)
3441 /* Built-in unreachable with parameters might not be caught by
3442 undefined behavior sanitizer. Front-ends do check users do not
3443 call them that way but we also produce calls to
3444 __builtin_unreachable internally, for example when IPA figures
3445 out a call cannot happen in a legal program. In such cases,
3446 we must make sure arguments are stripped off. */
3447 error ("__builtin_unreachable or __builtin_trap call with "
3457 /* ??? The C frontend passes unpromoted arguments in case it
3458 didn't see a function declaration before the call. So for now
3459 leave the call arguments mostly unverified. Once we gimplify
3460 unit-at-a-time we have a chance to fix this. */
3462 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3464 tree arg
= gimple_call_arg (stmt
, i
);
3465 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3466 && !is_gimple_val (arg
))
3467 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3468 && !is_gimple_lvalue (arg
)))
3470 error ("invalid argument to gimple call");
3471 debug_generic_expr (arg
);
3479 /* Verifies the gimple comparison with the result type TYPE and
3480 the operands OP0 and OP1, comparison code is CODE. */
3483 verify_gimple_comparison (tree type
, tree op0
, tree op1
, enum tree_code code
)
3485 tree op0_type
= TREE_TYPE (op0
);
3486 tree op1_type
= TREE_TYPE (op1
);
3488 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3490 error ("invalid operands in gimple comparison");
3494 /* For comparisons we do not have the operations type as the
3495 effective type the comparison is carried out in. Instead
3496 we require that either the first operand is trivially
3497 convertible into the second, or the other way around.
3498 Because we special-case pointers to void we allow
3499 comparisons of pointers with the same mode as well. */
3500 if (!useless_type_conversion_p (op0_type
, op1_type
)
3501 && !useless_type_conversion_p (op1_type
, op0_type
)
3502 && (!POINTER_TYPE_P (op0_type
)
3503 || !POINTER_TYPE_P (op1_type
)
3504 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3506 error ("mismatching comparison operand types");
3507 debug_generic_expr (op0_type
);
3508 debug_generic_expr (op1_type
);
3512 /* The resulting type of a comparison may be an effective boolean type. */
3513 if (INTEGRAL_TYPE_P (type
)
3514 && (TREE_CODE (type
) == BOOLEAN_TYPE
3515 || TYPE_PRECISION (type
) == 1))
3517 if ((TREE_CODE (op0_type
) == VECTOR_TYPE
3518 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3519 && code
!= EQ_EXPR
&& code
!= NE_EXPR
3520 && !VECTOR_BOOLEAN_TYPE_P (op0_type
)
3521 && !VECTOR_INTEGER_TYPE_P (op0_type
))
3523 error ("unsupported operation or type for vector comparison"
3524 " returning a boolean");
3525 debug_generic_expr (op0_type
);
3526 debug_generic_expr (op1_type
);
3530 /* Or a boolean vector type with the same element count
3531 as the comparison operand types. */
3532 else if (TREE_CODE (type
) == VECTOR_TYPE
3533 && TREE_CODE (TREE_TYPE (type
)) == BOOLEAN_TYPE
)
3535 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3536 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3538 error ("non-vector operands in vector comparison");
3539 debug_generic_expr (op0_type
);
3540 debug_generic_expr (op1_type
);
3544 if (TYPE_VECTOR_SUBPARTS (type
) != TYPE_VECTOR_SUBPARTS (op0_type
))
3546 error ("invalid vector comparison resulting type");
3547 debug_generic_expr (type
);
3553 error ("bogus comparison result type");
3554 debug_generic_expr (type
);
3561 /* Verify a gimple assignment statement STMT with an unary rhs.
3562 Returns true if anything is wrong. */
3565 verify_gimple_assign_unary (gassign
*stmt
)
3567 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3568 tree lhs
= gimple_assign_lhs (stmt
);
3569 tree lhs_type
= TREE_TYPE (lhs
);
3570 tree rhs1
= gimple_assign_rhs1 (stmt
);
3571 tree rhs1_type
= TREE_TYPE (rhs1
);
3573 if (!is_gimple_reg (lhs
))
3575 error ("non-register as LHS of unary operation");
3579 if (!is_gimple_val (rhs1
))
3581 error ("invalid operand in unary operation");
3585 /* First handle conversions. */
3590 /* Allow conversions from pointer type to integral type only if
3591 there is no sign or zero extension involved.
3592 For targets were the precision of ptrofftype doesn't match that
3593 of pointers we need to allow arbitrary conversions to ptrofftype. */
3594 if ((POINTER_TYPE_P (lhs_type
)
3595 && INTEGRAL_TYPE_P (rhs1_type
))
3596 || (POINTER_TYPE_P (rhs1_type
)
3597 && INTEGRAL_TYPE_P (lhs_type
)
3598 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3599 || ptrofftype_p (sizetype
))))
3602 /* Allow conversion from integral to offset type and vice versa. */
3603 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3604 && INTEGRAL_TYPE_P (rhs1_type
))
3605 || (INTEGRAL_TYPE_P (lhs_type
)
3606 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3609 /* Otherwise assert we are converting between types of the
3611 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3613 error ("invalid types in nop conversion");
3614 debug_generic_expr (lhs_type
);
3615 debug_generic_expr (rhs1_type
);
3622 case ADDR_SPACE_CONVERT_EXPR
:
3624 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3625 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3626 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3628 error ("invalid types in address space conversion");
3629 debug_generic_expr (lhs_type
);
3630 debug_generic_expr (rhs1_type
);
3637 case FIXED_CONVERT_EXPR
:
3639 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3640 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3642 error ("invalid types in fixed-point conversion");
3643 debug_generic_expr (lhs_type
);
3644 debug_generic_expr (rhs1_type
);
3653 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3654 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3655 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3657 error ("invalid types in conversion to floating point");
3658 debug_generic_expr (lhs_type
);
3659 debug_generic_expr (rhs1_type
);
3666 case FIX_TRUNC_EXPR
:
3668 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3669 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3670 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3672 error ("invalid types in conversion to integer");
3673 debug_generic_expr (lhs_type
);
3674 debug_generic_expr (rhs1_type
);
3680 case REDUC_MAX_EXPR
:
3681 case REDUC_MIN_EXPR
:
3682 case REDUC_PLUS_EXPR
:
3683 if (!VECTOR_TYPE_P (rhs1_type
)
3684 || !useless_type_conversion_p (lhs_type
, TREE_TYPE (rhs1_type
)))
3686 error ("reduction should convert from vector to element type");
3687 debug_generic_expr (lhs_type
);
3688 debug_generic_expr (rhs1_type
);
3693 case VEC_UNPACK_HI_EXPR
:
3694 case VEC_UNPACK_LO_EXPR
:
3695 case VEC_UNPACK_FLOAT_HI_EXPR
:
3696 case VEC_UNPACK_FLOAT_LO_EXPR
:
3711 /* For the remaining codes assert there is no conversion involved. */
3712 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3714 error ("non-trivial conversion in unary operation");
3715 debug_generic_expr (lhs_type
);
3716 debug_generic_expr (rhs1_type
);
3723 /* Verify a gimple assignment statement STMT with a binary rhs.
3724 Returns true if anything is wrong. */
3727 verify_gimple_assign_binary (gassign
*stmt
)
3729 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3730 tree lhs
= gimple_assign_lhs (stmt
);
3731 tree lhs_type
= TREE_TYPE (lhs
);
3732 tree rhs1
= gimple_assign_rhs1 (stmt
);
3733 tree rhs1_type
= TREE_TYPE (rhs1
);
3734 tree rhs2
= gimple_assign_rhs2 (stmt
);
3735 tree rhs2_type
= TREE_TYPE (rhs2
);
3737 if (!is_gimple_reg (lhs
))
3739 error ("non-register as LHS of binary operation");
3743 if (!is_gimple_val (rhs1
)
3744 || !is_gimple_val (rhs2
))
3746 error ("invalid operands in binary operation");
3750 /* First handle operations that involve different types. */
3755 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3756 || !(INTEGRAL_TYPE_P (rhs1_type
)
3757 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3758 || !(INTEGRAL_TYPE_P (rhs2_type
)
3759 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3761 error ("type mismatch in complex expression");
3762 debug_generic_expr (lhs_type
);
3763 debug_generic_expr (rhs1_type
);
3764 debug_generic_expr (rhs2_type
);
3776 /* Shifts and rotates are ok on integral types, fixed point
3777 types and integer vector types. */
3778 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3779 && !FIXED_POINT_TYPE_P (rhs1_type
)
3780 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3781 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3782 || (!INTEGRAL_TYPE_P (rhs2_type
)
3783 /* Vector shifts of vectors are also ok. */
3784 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3785 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3786 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3787 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3788 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3790 error ("type mismatch in shift expression");
3791 debug_generic_expr (lhs_type
);
3792 debug_generic_expr (rhs1_type
);
3793 debug_generic_expr (rhs2_type
);
3800 case WIDEN_LSHIFT_EXPR
:
3802 if (!INTEGRAL_TYPE_P (lhs_type
)
3803 || !INTEGRAL_TYPE_P (rhs1_type
)
3804 || TREE_CODE (rhs2
) != INTEGER_CST
3805 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3807 error ("type mismatch in widening vector shift expression");
3808 debug_generic_expr (lhs_type
);
3809 debug_generic_expr (rhs1_type
);
3810 debug_generic_expr (rhs2_type
);
3817 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3818 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3820 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3821 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3822 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3823 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3824 || TREE_CODE (rhs2
) != INTEGER_CST
3825 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3826 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3828 error ("type mismatch in widening vector shift expression");
3829 debug_generic_expr (lhs_type
);
3830 debug_generic_expr (rhs1_type
);
3831 debug_generic_expr (rhs2_type
);
3841 tree lhs_etype
= lhs_type
;
3842 tree rhs1_etype
= rhs1_type
;
3843 tree rhs2_etype
= rhs2_type
;
3844 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3846 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3847 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3849 error ("invalid non-vector operands to vector valued plus");
3852 lhs_etype
= TREE_TYPE (lhs_type
);
3853 rhs1_etype
= TREE_TYPE (rhs1_type
);
3854 rhs2_etype
= TREE_TYPE (rhs2_type
);
3856 if (POINTER_TYPE_P (lhs_etype
)
3857 || POINTER_TYPE_P (rhs1_etype
)
3858 || POINTER_TYPE_P (rhs2_etype
))
3860 error ("invalid (pointer) operands to plus/minus");
3864 /* Continue with generic binary expression handling. */
3868 case POINTER_PLUS_EXPR
:
3870 if (!POINTER_TYPE_P (rhs1_type
)
3871 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3872 || !ptrofftype_p (rhs2_type
))
3874 error ("type mismatch in pointer plus expression");
3875 debug_generic_stmt (lhs_type
);
3876 debug_generic_stmt (rhs1_type
);
3877 debug_generic_stmt (rhs2_type
);
3884 case TRUTH_ANDIF_EXPR
:
3885 case TRUTH_ORIF_EXPR
:
3886 case TRUTH_AND_EXPR
:
3888 case TRUTH_XOR_EXPR
:
3898 case UNORDERED_EXPR
:
3906 /* Comparisons are also binary, but the result type is not
3907 connected to the operand types. */
3908 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
, rhs_code
);
3910 case WIDEN_MULT_EXPR
:
3911 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
3913 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
3914 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
3916 case WIDEN_SUM_EXPR
:
3917 case VEC_WIDEN_MULT_HI_EXPR
:
3918 case VEC_WIDEN_MULT_LO_EXPR
:
3919 case VEC_WIDEN_MULT_EVEN_EXPR
:
3920 case VEC_WIDEN_MULT_ODD_EXPR
:
3921 case VEC_PACK_TRUNC_EXPR
:
3922 case VEC_PACK_SAT_EXPR
:
3923 case VEC_PACK_FIX_TRUNC_EXPR
:
3928 case MULT_HIGHPART_EXPR
:
3929 case TRUNC_DIV_EXPR
:
3931 case FLOOR_DIV_EXPR
:
3932 case ROUND_DIV_EXPR
:
3933 case TRUNC_MOD_EXPR
:
3935 case FLOOR_MOD_EXPR
:
3936 case ROUND_MOD_EXPR
:
3938 case EXACT_DIV_EXPR
:
3944 /* Continue with generic binary expression handling. */
3951 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3952 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3954 error ("type mismatch in binary expression");
3955 debug_generic_stmt (lhs_type
);
3956 debug_generic_stmt (rhs1_type
);
3957 debug_generic_stmt (rhs2_type
);
3964 /* Verify a gimple assignment statement STMT with a ternary rhs.
3965 Returns true if anything is wrong. */
3968 verify_gimple_assign_ternary (gassign
*stmt
)
3970 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3971 tree lhs
= gimple_assign_lhs (stmt
);
3972 tree lhs_type
= TREE_TYPE (lhs
);
3973 tree rhs1
= gimple_assign_rhs1 (stmt
);
3974 tree rhs1_type
= TREE_TYPE (rhs1
);
3975 tree rhs2
= gimple_assign_rhs2 (stmt
);
3976 tree rhs2_type
= TREE_TYPE (rhs2
);
3977 tree rhs3
= gimple_assign_rhs3 (stmt
);
3978 tree rhs3_type
= TREE_TYPE (rhs3
);
3980 if (!is_gimple_reg (lhs
))
3982 error ("non-register as LHS of ternary operation");
3986 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
3987 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
3988 || !is_gimple_val (rhs2
)
3989 || !is_gimple_val (rhs3
))
3991 error ("invalid operands in ternary operation");
3995 /* First handle operations that involve different types. */
3998 case WIDEN_MULT_PLUS_EXPR
:
3999 case WIDEN_MULT_MINUS_EXPR
:
4000 if ((!INTEGRAL_TYPE_P (rhs1_type
)
4001 && !FIXED_POINT_TYPE_P (rhs1_type
))
4002 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
4003 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4004 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
4005 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
4007 error ("type mismatch in widening multiply-accumulate expression");
4008 debug_generic_expr (lhs_type
);
4009 debug_generic_expr (rhs1_type
);
4010 debug_generic_expr (rhs2_type
);
4011 debug_generic_expr (rhs3_type
);
4017 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4018 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
4019 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4021 error ("type mismatch in fused multiply-add expression");
4022 debug_generic_expr (lhs_type
);
4023 debug_generic_expr (rhs1_type
);
4024 debug_generic_expr (rhs2_type
);
4025 debug_generic_expr (rhs3_type
);
4031 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type
)
4032 || TYPE_VECTOR_SUBPARTS (rhs1_type
)
4033 != TYPE_VECTOR_SUBPARTS (lhs_type
))
4035 error ("the first argument of a VEC_COND_EXPR must be of a "
4036 "boolean vector type of the same number of elements "
4038 debug_generic_expr (lhs_type
);
4039 debug_generic_expr (rhs1_type
);
4044 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
4045 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4047 error ("type mismatch in conditional expression");
4048 debug_generic_expr (lhs_type
);
4049 debug_generic_expr (rhs2_type
);
4050 debug_generic_expr (rhs3_type
);
4056 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4057 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4059 error ("type mismatch in vector permute expression");
4060 debug_generic_expr (lhs_type
);
4061 debug_generic_expr (rhs1_type
);
4062 debug_generic_expr (rhs2_type
);
4063 debug_generic_expr (rhs3_type
);
4067 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4068 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4069 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4071 error ("vector types expected in vector permute expression");
4072 debug_generic_expr (lhs_type
);
4073 debug_generic_expr (rhs1_type
);
4074 debug_generic_expr (rhs2_type
);
4075 debug_generic_expr (rhs3_type
);
4079 if (TYPE_VECTOR_SUBPARTS (rhs1_type
) != TYPE_VECTOR_SUBPARTS (rhs2_type
)
4080 || TYPE_VECTOR_SUBPARTS (rhs2_type
)
4081 != TYPE_VECTOR_SUBPARTS (rhs3_type
)
4082 || TYPE_VECTOR_SUBPARTS (rhs3_type
)
4083 != TYPE_VECTOR_SUBPARTS (lhs_type
))
4085 error ("vectors with different element number found "
4086 "in vector permute expression");
4087 debug_generic_expr (lhs_type
);
4088 debug_generic_expr (rhs1_type
);
4089 debug_generic_expr (rhs2_type
);
4090 debug_generic_expr (rhs3_type
);
4094 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
4095 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type
)))
4096 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
))))
4098 error ("invalid mask type in vector permute expression");
4099 debug_generic_expr (lhs_type
);
4100 debug_generic_expr (rhs1_type
);
4101 debug_generic_expr (rhs2_type
);
4102 debug_generic_expr (rhs3_type
);
4109 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
)
4110 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4111 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
)))
4112 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type
))))
4114 error ("type mismatch in sad expression");
4115 debug_generic_expr (lhs_type
);
4116 debug_generic_expr (rhs1_type
);
4117 debug_generic_expr (rhs2_type
);
4118 debug_generic_expr (rhs3_type
);
4122 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4123 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4124 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4126 error ("vector types expected in sad expression");
4127 debug_generic_expr (lhs_type
);
4128 debug_generic_expr (rhs1_type
);
4129 debug_generic_expr (rhs2_type
);
4130 debug_generic_expr (rhs3_type
);
4136 case BIT_INSERT_EXPR
:
4137 if (! useless_type_conversion_p (lhs_type
, rhs1_type
))
4139 error ("type mismatch in BIT_INSERT_EXPR");
4140 debug_generic_expr (lhs_type
);
4141 debug_generic_expr (rhs1_type
);
4144 if (! ((INTEGRAL_TYPE_P (rhs1_type
)
4145 && INTEGRAL_TYPE_P (rhs2_type
))
4146 || (VECTOR_TYPE_P (rhs1_type
)
4147 && types_compatible_p (TREE_TYPE (rhs1_type
), rhs2_type
))))
4149 error ("not allowed type combination in BIT_INSERT_EXPR");
4150 debug_generic_expr (rhs1_type
);
4151 debug_generic_expr (rhs2_type
);
4154 if (! tree_fits_uhwi_p (rhs3
)
4155 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type
)))
4157 error ("invalid position or size in BIT_INSERT_EXPR");
4160 if (INTEGRAL_TYPE_P (rhs1_type
))
4162 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4163 if (bitpos
>= TYPE_PRECISION (rhs1_type
)
4164 || (bitpos
+ TYPE_PRECISION (rhs2_type
)
4165 > TYPE_PRECISION (rhs1_type
)))
4167 error ("insertion out of range in BIT_INSERT_EXPR");
4171 else if (VECTOR_TYPE_P (rhs1_type
))
4173 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4174 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (TYPE_SIZE (rhs2_type
));
4175 if (bitpos
% bitsize
!= 0)
4177 error ("vector insertion not at element boundary");
4184 case REALIGN_LOAD_EXPR
:
4194 /* Verify a gimple assignment statement STMT with a single rhs.
4195 Returns true if anything is wrong. */
4198 verify_gimple_assign_single (gassign
*stmt
)
4200 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4201 tree lhs
= gimple_assign_lhs (stmt
);
4202 tree lhs_type
= TREE_TYPE (lhs
);
4203 tree rhs1
= gimple_assign_rhs1 (stmt
);
4204 tree rhs1_type
= TREE_TYPE (rhs1
);
4207 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
4209 error ("non-trivial conversion at assignment");
4210 debug_generic_expr (lhs_type
);
4211 debug_generic_expr (rhs1_type
);
4215 if (gimple_clobber_p (stmt
)
4216 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
4218 error ("non-decl/MEM_REF LHS in clobber statement");
4219 debug_generic_expr (lhs
);
4223 if (handled_component_p (lhs
)
4224 || TREE_CODE (lhs
) == MEM_REF
4225 || TREE_CODE (lhs
) == TARGET_MEM_REF
)
4226 res
|= verify_types_in_gimple_reference (lhs
, true);
4228 /* Special codes we cannot handle via their class. */
4233 tree op
= TREE_OPERAND (rhs1
, 0);
4234 if (!is_gimple_addressable (op
))
4236 error ("invalid operand in unary expression");
4240 /* Technically there is no longer a need for matching types, but
4241 gimple hygiene asks for this check. In LTO we can end up
4242 combining incompatible units and thus end up with addresses
4243 of globals that change their type to a common one. */
4245 && !types_compatible_p (TREE_TYPE (op
),
4246 TREE_TYPE (TREE_TYPE (rhs1
)))
4247 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
4250 error ("type mismatch in address expression");
4251 debug_generic_stmt (TREE_TYPE (rhs1
));
4252 debug_generic_stmt (TREE_TYPE (op
));
4256 return verify_types_in_gimple_reference (op
, true);
4261 error ("INDIRECT_REF in gimple IL");
4267 case ARRAY_RANGE_REF
:
4268 case VIEW_CONVERT_EXPR
:
4271 case TARGET_MEM_REF
:
4273 if (!is_gimple_reg (lhs
)
4274 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4276 error ("invalid rhs for gimple memory store");
4277 debug_generic_stmt (lhs
);
4278 debug_generic_stmt (rhs1
);
4281 return res
|| verify_types_in_gimple_reference (rhs1
, false);
4293 /* tcc_declaration */
4298 if (!is_gimple_reg (lhs
)
4299 && !is_gimple_reg (rhs1
)
4300 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4302 error ("invalid rhs for gimple memory store");
4303 debug_generic_stmt (lhs
);
4304 debug_generic_stmt (rhs1
);
4310 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
4313 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
4315 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
4317 /* For vector CONSTRUCTORs we require that either it is empty
4318 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4319 (then the element count must be correct to cover the whole
4320 outer vector and index must be NULL on all elements, or it is
4321 a CONSTRUCTOR of scalar elements, where we as an exception allow
4322 smaller number of elements (assuming zero filling) and
4323 consecutive indexes as compared to NULL indexes (such
4324 CONSTRUCTORs can appear in the IL from FEs). */
4325 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4327 if (elt_t
== NULL_TREE
)
4329 elt_t
= TREE_TYPE (elt_v
);
4330 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4332 tree elt_t
= TREE_TYPE (elt_v
);
4333 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4336 error ("incorrect type of vector CONSTRUCTOR"
4338 debug_generic_stmt (rhs1
);
4341 else if (CONSTRUCTOR_NELTS (rhs1
)
4342 * TYPE_VECTOR_SUBPARTS (elt_t
)
4343 != TYPE_VECTOR_SUBPARTS (rhs1_type
))
4345 error ("incorrect number of vector CONSTRUCTOR"
4347 debug_generic_stmt (rhs1
);
4351 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4354 error ("incorrect type of vector CONSTRUCTOR elements");
4355 debug_generic_stmt (rhs1
);
4358 else if (CONSTRUCTOR_NELTS (rhs1
)
4359 > TYPE_VECTOR_SUBPARTS (rhs1_type
))
4361 error ("incorrect number of vector CONSTRUCTOR elements");
4362 debug_generic_stmt (rhs1
);
4366 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4368 error ("incorrect type of vector CONSTRUCTOR elements");
4369 debug_generic_stmt (rhs1
);
4372 if (elt_i
!= NULL_TREE
4373 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4374 || TREE_CODE (elt_i
) != INTEGER_CST
4375 || compare_tree_int (elt_i
, i
) != 0))
4377 error ("vector CONSTRUCTOR with non-NULL element index");
4378 debug_generic_stmt (rhs1
);
4381 if (!is_gimple_val (elt_v
))
4383 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4384 debug_generic_stmt (rhs1
);
4389 else if (CONSTRUCTOR_NELTS (rhs1
) != 0)
4391 error ("non-vector CONSTRUCTOR with elements");
4392 debug_generic_stmt (rhs1
);
4398 case WITH_SIZE_EXPR
:
4408 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4409 is a problem, otherwise false. */
4412 verify_gimple_assign (gassign
*stmt
)
4414 switch (gimple_assign_rhs_class (stmt
))
4416 case GIMPLE_SINGLE_RHS
:
4417 return verify_gimple_assign_single (stmt
);
4419 case GIMPLE_UNARY_RHS
:
4420 return verify_gimple_assign_unary (stmt
);
4422 case GIMPLE_BINARY_RHS
:
4423 return verify_gimple_assign_binary (stmt
);
4425 case GIMPLE_TERNARY_RHS
:
4426 return verify_gimple_assign_ternary (stmt
);
4433 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4434 is a problem, otherwise false. */
4437 verify_gimple_return (greturn
*stmt
)
4439 tree op
= gimple_return_retval (stmt
);
4440 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4442 /* We cannot test for present return values as we do not fix up missing
4443 return values from the original source. */
4447 if (!is_gimple_val (op
)
4448 && TREE_CODE (op
) != RESULT_DECL
)
4450 error ("invalid operand in return statement");
4451 debug_generic_stmt (op
);
4455 if ((TREE_CODE (op
) == RESULT_DECL
4456 && DECL_BY_REFERENCE (op
))
4457 || (TREE_CODE (op
) == SSA_NAME
4458 && SSA_NAME_VAR (op
)
4459 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4460 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4461 op
= TREE_TYPE (op
);
4463 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4465 error ("invalid conversion in return statement");
4466 debug_generic_stmt (restype
);
4467 debug_generic_stmt (TREE_TYPE (op
));
4475 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4476 is a problem, otherwise false. */
4479 verify_gimple_goto (ggoto
*stmt
)
4481 tree dest
= gimple_goto_dest (stmt
);
4483 /* ??? We have two canonical forms of direct goto destinations, a
4484 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4485 if (TREE_CODE (dest
) != LABEL_DECL
4486 && (!is_gimple_val (dest
)
4487 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4489 error ("goto destination is neither a label nor a pointer");
4496 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4497 is a problem, otherwise false. */
4500 verify_gimple_switch (gswitch
*stmt
)
4503 tree elt
, prev_upper_bound
= NULL_TREE
;
4504 tree index_type
, elt_type
= NULL_TREE
;
4506 if (!is_gimple_val (gimple_switch_index (stmt
)))
4508 error ("invalid operand to switch statement");
4509 debug_generic_stmt (gimple_switch_index (stmt
));
4513 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4514 if (! INTEGRAL_TYPE_P (index_type
))
4516 error ("non-integral type switch statement");
4517 debug_generic_expr (index_type
);
4521 elt
= gimple_switch_label (stmt
, 0);
4522 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4524 error ("invalid default case label in switch statement");
4525 debug_generic_expr (elt
);
4529 n
= gimple_switch_num_labels (stmt
);
4530 for (i
= 1; i
< n
; i
++)
4532 elt
= gimple_switch_label (stmt
, i
);
4534 if (! CASE_LOW (elt
))
4536 error ("invalid case label in switch statement");
4537 debug_generic_expr (elt
);
4541 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4543 error ("invalid case range in switch statement");
4544 debug_generic_expr (elt
);
4550 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4551 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4553 error ("type mismatch for case label in switch statement");
4554 debug_generic_expr (elt
);
4560 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4561 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4563 error ("type precision mismatch in switch statement");
4568 if (prev_upper_bound
)
4570 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4572 error ("case labels not sorted in switch statement");
4577 prev_upper_bound
= CASE_HIGH (elt
);
4578 if (! prev_upper_bound
)
4579 prev_upper_bound
= CASE_LOW (elt
);
4585 /* Verify a gimple debug statement STMT.
4586 Returns true if anything is wrong. */
4589 verify_gimple_debug (gimple
*stmt ATTRIBUTE_UNUSED
)
4591 /* There isn't much that could be wrong in a gimple debug stmt. A
4592 gimple debug bind stmt, for example, maps a tree, that's usually
4593 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4594 component or member of an aggregate type, to another tree, that
4595 can be an arbitrary expression. These stmts expand into debug
4596 insns, and are converted to debug notes by var-tracking.c. */
4600 /* Verify a gimple label statement STMT.
4601 Returns true if anything is wrong. */
4604 verify_gimple_label (glabel
*stmt
)
4606 tree decl
= gimple_label_label (stmt
);
4610 if (TREE_CODE (decl
) != LABEL_DECL
)
4612 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4613 && DECL_CONTEXT (decl
) != current_function_decl
)
4615 error ("label's context is not the current function decl");
4619 uid
= LABEL_DECL_UID (decl
);
4622 || (*label_to_block_map_for_fn (cfun
))[uid
] != gimple_bb (stmt
)))
4624 error ("incorrect entry in label_to_block_map");
4628 uid
= EH_LANDING_PAD_NR (decl
);
4631 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4632 if (decl
!= lp
->post_landing_pad
)
4634 error ("incorrect setting of landing pad number");
4642 /* Verify a gimple cond statement STMT.
4643 Returns true if anything is wrong. */
4646 verify_gimple_cond (gcond
*stmt
)
4648 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4650 error ("invalid comparison code in gimple cond");
4653 if (!(!gimple_cond_true_label (stmt
)
4654 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4655 || !(!gimple_cond_false_label (stmt
)
4656 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4658 error ("invalid labels in gimple cond");
4662 return verify_gimple_comparison (boolean_type_node
,
4663 gimple_cond_lhs (stmt
),
4664 gimple_cond_rhs (stmt
),
4665 gimple_cond_code (stmt
));
4668 /* Verify the GIMPLE statement STMT. Returns true if there is an
4669 error, otherwise false. */
4672 verify_gimple_stmt (gimple
*stmt
)
4674 switch (gimple_code (stmt
))
4677 return verify_gimple_assign (as_a
<gassign
*> (stmt
));
4680 return verify_gimple_label (as_a
<glabel
*> (stmt
));
4683 return verify_gimple_call (as_a
<gcall
*> (stmt
));
4686 return verify_gimple_cond (as_a
<gcond
*> (stmt
));
4689 return verify_gimple_goto (as_a
<ggoto
*> (stmt
));
4692 return verify_gimple_switch (as_a
<gswitch
*> (stmt
));
4695 return verify_gimple_return (as_a
<greturn
*> (stmt
));
4700 case GIMPLE_TRANSACTION
:
4701 return verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4703 /* Tuples that do not have tree operands. */
4705 case GIMPLE_PREDICT
:
4707 case GIMPLE_EH_DISPATCH
:
4708 case GIMPLE_EH_MUST_NOT_THROW
:
4712 /* OpenMP directives are validated by the FE and never operated
4713 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4714 non-gimple expressions when the main index variable has had
4715 its address taken. This does not affect the loop itself
4716 because the header of an GIMPLE_OMP_FOR is merely used to determine
4717 how to setup the parallel iteration. */
4721 return verify_gimple_debug (stmt
);
4728 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4729 and false otherwise. */
4732 verify_gimple_phi (gimple
*phi
)
4736 tree phi_result
= gimple_phi_result (phi
);
4741 error ("invalid PHI result");
4745 virtual_p
= virtual_operand_p (phi_result
);
4746 if (TREE_CODE (phi_result
) != SSA_NAME
4748 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4750 error ("invalid PHI result");
4754 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4756 tree t
= gimple_phi_arg_def (phi
, i
);
4760 error ("missing PHI def");
4764 /* Addressable variables do have SSA_NAMEs but they
4765 are not considered gimple values. */
4766 else if ((TREE_CODE (t
) == SSA_NAME
4767 && virtual_p
!= virtual_operand_p (t
))
4769 && (TREE_CODE (t
) != SSA_NAME
4770 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
4772 && !is_gimple_val (t
)))
4774 error ("invalid PHI argument");
4775 debug_generic_expr (t
);
4778 #ifdef ENABLE_TYPES_CHECKING
4779 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
4781 error ("incompatible types in PHI argument %u", i
);
4782 debug_generic_stmt (TREE_TYPE (phi_result
));
4783 debug_generic_stmt (TREE_TYPE (t
));
4792 /* Verify the GIMPLE statements inside the sequence STMTS. */
4795 verify_gimple_in_seq_2 (gimple_seq stmts
)
4797 gimple_stmt_iterator ittr
;
4800 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
4802 gimple
*stmt
= gsi_stmt (ittr
);
4804 switch (gimple_code (stmt
))
4807 err
|= verify_gimple_in_seq_2 (
4808 gimple_bind_body (as_a
<gbind
*> (stmt
)));
4812 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
4813 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
4816 case GIMPLE_EH_FILTER
:
4817 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
4820 case GIMPLE_EH_ELSE
:
4822 geh_else
*eh_else
= as_a
<geh_else
*> (stmt
);
4823 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else
));
4824 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else
));
4829 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (
4830 as_a
<gcatch
*> (stmt
)));
4833 case GIMPLE_TRANSACTION
:
4834 err
|= verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4839 bool err2
= verify_gimple_stmt (stmt
);
4841 debug_gimple_stmt (stmt
);
4850 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4851 is a problem, otherwise false. */
4854 verify_gimple_transaction (gtransaction
*stmt
)
4858 lab
= gimple_transaction_label_norm (stmt
);
4859 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4861 lab
= gimple_transaction_label_uninst (stmt
);
4862 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4864 lab
= gimple_transaction_label_over (stmt
);
4865 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4868 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
4872 /* Verify the GIMPLE statements inside the statement list STMTS. */
4875 verify_gimple_in_seq (gimple_seq stmts
)
4877 timevar_push (TV_TREE_STMT_VERIFY
);
4878 if (verify_gimple_in_seq_2 (stmts
))
4879 internal_error ("verify_gimple failed");
4880 timevar_pop (TV_TREE_STMT_VERIFY
);
4883 /* Return true when the T can be shared. */
4886 tree_node_can_be_shared (tree t
)
4888 if (IS_TYPE_OR_DECL_P (t
)
4889 || is_gimple_min_invariant (t
)
4890 || TREE_CODE (t
) == SSA_NAME
4891 || t
== error_mark_node
4892 || TREE_CODE (t
) == IDENTIFIER_NODE
)
4895 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
4904 /* Called via walk_tree. Verify tree sharing. */
4907 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4909 hash_set
<void *> *visited
= (hash_set
<void *> *) data
;
4911 if (tree_node_can_be_shared (*tp
))
4913 *walk_subtrees
= false;
4917 if (visited
->add (*tp
))
4923 /* Called via walk_gimple_stmt. Verify tree sharing. */
4926 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
4928 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4929 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
4932 static bool eh_error_found
;
4934 verify_eh_throw_stmt_node (gimple
*const &stmt
, const int &,
4935 hash_set
<gimple
*> *visited
)
4937 if (!visited
->contains (stmt
))
4939 error ("dead STMT in EH table");
4940 debug_gimple_stmt (stmt
);
4941 eh_error_found
= true;
4946 /* Verify if the location LOCs block is in BLOCKS. */
4949 verify_location (hash_set
<tree
> *blocks
, location_t loc
)
4951 tree block
= LOCATION_BLOCK (loc
);
4952 if (block
!= NULL_TREE
4953 && !blocks
->contains (block
))
4955 error ("location references block not in block tree");
4958 if (block
!= NULL_TREE
)
4959 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
4963 /* Called via walk_tree. Verify that expressions have no blocks. */
4966 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
4970 *walk_subtrees
= false;
4974 location_t loc
= EXPR_LOCATION (*tp
);
4975 if (LOCATION_BLOCK (loc
) != NULL
)
4981 /* Called via walk_tree. Verify locations of expressions. */
4984 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4986 hash_set
<tree
> *blocks
= (hash_set
<tree
> *) data
;
4988 if (TREE_CODE (*tp
) == VAR_DECL
4989 && DECL_HAS_DEBUG_EXPR_P (*tp
))
4991 tree t
= DECL_DEBUG_EXPR (*tp
);
4992 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4996 if ((TREE_CODE (*tp
) == VAR_DECL
4997 || TREE_CODE (*tp
) == PARM_DECL
4998 || TREE_CODE (*tp
) == RESULT_DECL
)
4999 && DECL_HAS_VALUE_EXPR_P (*tp
))
5001 tree t
= DECL_VALUE_EXPR (*tp
);
5002 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
5009 *walk_subtrees
= false;
5013 location_t loc
= EXPR_LOCATION (*tp
);
5014 if (verify_location (blocks
, loc
))
5020 /* Called via walk_gimple_op. Verify locations of expressions. */
5023 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
5025 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5026 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
5029 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5032 collect_subblocks (hash_set
<tree
> *blocks
, tree block
)
5035 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
5038 collect_subblocks (blocks
, t
);
5042 /* Verify the GIMPLE statements in the CFG of FN. */
5045 verify_gimple_in_cfg (struct function
*fn
, bool verify_nothrow
)
5050 timevar_push (TV_TREE_STMT_VERIFY
);
5051 hash_set
<void *> visited
;
5052 hash_set
<gimple
*> visited_stmts
;
5054 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5055 hash_set
<tree
> blocks
;
5056 if (DECL_INITIAL (fn
->decl
))
5058 blocks
.add (DECL_INITIAL (fn
->decl
));
5059 collect_subblocks (&blocks
, DECL_INITIAL (fn
->decl
));
5062 FOR_EACH_BB_FN (bb
, fn
)
5064 gimple_stmt_iterator gsi
;
5066 for (gphi_iterator gpi
= gsi_start_phis (bb
);
5070 gphi
*phi
= gpi
.phi ();
5074 visited_stmts
.add (phi
);
5076 if (gimple_bb (phi
) != bb
)
5078 error ("gimple_bb (phi) is set to a wrong basic block");
5082 err2
|= verify_gimple_phi (phi
);
5084 /* Only PHI arguments have locations. */
5085 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
5087 error ("PHI node with location");
5091 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5093 tree arg
= gimple_phi_arg_def (phi
, i
);
5094 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
5098 error ("incorrect sharing of tree nodes");
5099 debug_generic_expr (addr
);
5102 location_t loc
= gimple_phi_arg_location (phi
, i
);
5103 if (virtual_operand_p (gimple_phi_result (phi
))
5104 && loc
!= UNKNOWN_LOCATION
)
5106 error ("virtual PHI with argument locations");
5109 addr
= walk_tree (&arg
, verify_expr_location_1
, &blocks
, NULL
);
5112 debug_generic_expr (addr
);
5115 err2
|= verify_location (&blocks
, loc
);
5119 debug_gimple_stmt (phi
);
5123 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5125 gimple
*stmt
= gsi_stmt (gsi
);
5127 struct walk_stmt_info wi
;
5131 visited_stmts
.add (stmt
);
5133 if (gimple_bb (stmt
) != bb
)
5135 error ("gimple_bb (stmt) is set to a wrong basic block");
5139 err2
|= verify_gimple_stmt (stmt
);
5140 err2
|= verify_location (&blocks
, gimple_location (stmt
));
5142 memset (&wi
, 0, sizeof (wi
));
5143 wi
.info
= (void *) &visited
;
5144 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
5147 error ("incorrect sharing of tree nodes");
5148 debug_generic_expr (addr
);
5152 memset (&wi
, 0, sizeof (wi
));
5153 wi
.info
= (void *) &blocks
;
5154 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
5157 debug_generic_expr (addr
);
5161 /* ??? Instead of not checking these stmts at all the walker
5162 should know its context via wi. */
5163 if (!is_gimple_debug (stmt
)
5164 && !is_gimple_omp (stmt
))
5166 memset (&wi
, 0, sizeof (wi
));
5167 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
5170 debug_generic_expr (addr
);
5171 inform (gimple_location (stmt
), "in statement");
5176 /* If the statement is marked as part of an EH region, then it is
5177 expected that the statement could throw. Verify that when we
5178 have optimizations that simplify statements such that we prove
5179 that they cannot throw, that we update other data structures
5181 lp_nr
= lookup_stmt_eh_lp (stmt
);
5184 if (!stmt_could_throw_p (stmt
))
5188 error ("statement marked for throw, but doesn%'t");
5192 else if (!gsi_one_before_end_p (gsi
))
5194 error ("statement marked for throw in middle of block");
5200 debug_gimple_stmt (stmt
);
5205 eh_error_found
= false;
5206 hash_map
<gimple
*, int> *eh_table
= get_eh_throw_stmt_table (cfun
);
5208 eh_table
->traverse
<hash_set
<gimple
*> *, verify_eh_throw_stmt_node
>
5211 if (err
|| eh_error_found
)
5212 internal_error ("verify_gimple failed");
5214 verify_histograms ();
5215 timevar_pop (TV_TREE_STMT_VERIFY
);
5219 /* Verifies that the flow information is OK. */
5222 gimple_verify_flow_info (void)
5226 gimple_stmt_iterator gsi
;
5231 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5232 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5234 error ("ENTRY_BLOCK has IL associated with it");
5238 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5239 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5241 error ("EXIT_BLOCK has IL associated with it");
5245 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5246 if (e
->flags
& EDGE_FALLTHRU
)
5248 error ("fallthru to exit from bb %d", e
->src
->index
);
5252 FOR_EACH_BB_FN (bb
, cfun
)
5254 bool found_ctrl_stmt
= false;
5258 /* Skip labels on the start of basic block. */
5259 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5262 gimple
*prev_stmt
= stmt
;
5264 stmt
= gsi_stmt (gsi
);
5266 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5269 label
= gimple_label_label (as_a
<glabel
*> (stmt
));
5270 if (prev_stmt
&& DECL_NONLOCAL (label
))
5272 error ("nonlocal label ");
5273 print_generic_expr (stderr
, label
, 0);
5274 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5279 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
5281 error ("EH landing pad label ");
5282 print_generic_expr (stderr
, label
, 0);
5283 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5288 if (label_to_block (label
) != bb
)
5291 print_generic_expr (stderr
, label
, 0);
5292 fprintf (stderr
, " to block does not match in bb %d",
5297 if (decl_function_context (label
) != current_function_decl
)
5300 print_generic_expr (stderr
, label
, 0);
5301 fprintf (stderr
, " has incorrect context in bb %d",
5307 /* Verify that body of basic block BB is free of control flow. */
5308 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5310 gimple
*stmt
= gsi_stmt (gsi
);
5312 if (found_ctrl_stmt
)
5314 error ("control flow in the middle of basic block %d",
5319 if (stmt_ends_bb_p (stmt
))
5320 found_ctrl_stmt
= true;
5322 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
5325 print_generic_expr (stderr
, gimple_label_label (label_stmt
), 0);
5326 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
5331 gsi
= gsi_last_bb (bb
);
5332 if (gsi_end_p (gsi
))
5335 stmt
= gsi_stmt (gsi
);
5337 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5340 err
|= verify_eh_edges (stmt
);
5342 if (is_ctrl_stmt (stmt
))
5344 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5345 if (e
->flags
& EDGE_FALLTHRU
)
5347 error ("fallthru edge after a control statement in bb %d",
5353 if (gimple_code (stmt
) != GIMPLE_COND
)
5355 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5356 after anything else but if statement. */
5357 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5358 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5360 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5366 switch (gimple_code (stmt
))
5373 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5377 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5378 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5379 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5380 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5381 || EDGE_COUNT (bb
->succs
) >= 3)
5383 error ("wrong outgoing edge flags at end of bb %d",
5391 if (simple_goto_p (stmt
))
5393 error ("explicit goto at end of bb %d", bb
->index
);
5398 /* FIXME. We should double check that the labels in the
5399 destination blocks have their address taken. */
5400 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5401 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5402 | EDGE_FALSE_VALUE
))
5403 || !(e
->flags
& EDGE_ABNORMAL
))
5405 error ("wrong outgoing edge flags at end of bb %d",
5413 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5415 /* ... fallthru ... */
5417 if (!single_succ_p (bb
)
5418 || (single_succ_edge (bb
)->flags
5419 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5420 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5422 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5425 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5427 error ("return edge does not point to exit in bb %d",
5435 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5440 n
= gimple_switch_num_labels (switch_stmt
);
5442 /* Mark all the destination basic blocks. */
5443 for (i
= 0; i
< n
; ++i
)
5445 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5446 basic_block label_bb
= label_to_block (lab
);
5447 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5448 label_bb
->aux
= (void *)1;
5451 /* Verify that the case labels are sorted. */
5452 prev
= gimple_switch_label (switch_stmt
, 0);
5453 for (i
= 1; i
< n
; ++i
)
5455 tree c
= gimple_switch_label (switch_stmt
, i
);
5458 error ("found default case not at the start of "
5464 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5466 error ("case labels not sorted: ");
5467 print_generic_expr (stderr
, prev
, 0);
5468 fprintf (stderr
," is greater than ");
5469 print_generic_expr (stderr
, c
, 0);
5470 fprintf (stderr
," but comes before it.\n");
5475 /* VRP will remove the default case if it can prove it will
5476 never be executed. So do not verify there always exists
5477 a default case here. */
5479 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5483 error ("extra outgoing edge %d->%d",
5484 bb
->index
, e
->dest
->index
);
5488 e
->dest
->aux
= (void *)2;
5489 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5490 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5492 error ("wrong outgoing edge flags at end of bb %d",
5498 /* Check that we have all of them. */
5499 for (i
= 0; i
< n
; ++i
)
5501 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5502 basic_block label_bb
= label_to_block (lab
);
5504 if (label_bb
->aux
!= (void *)2)
5506 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5511 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5512 e
->dest
->aux
= (void *)0;
5516 case GIMPLE_EH_DISPATCH
:
5517 err
|= verify_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
));
5525 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5526 verify_dominators (CDI_DOMINATORS
);
5532 /* Updates phi nodes after creating a forwarder block joined
5533 by edge FALLTHRU. */
5536 gimple_make_forwarder_block (edge fallthru
)
5540 basic_block dummy
, bb
;
5544 dummy
= fallthru
->src
;
5545 bb
= fallthru
->dest
;
5547 if (single_pred_p (bb
))
5550 /* If we redirected a branch we must create new PHI nodes at the
5552 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5554 gphi
*phi
, *new_phi
;
5557 var
= gimple_phi_result (phi
);
5558 new_phi
= create_phi_node (var
, bb
);
5559 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5560 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5564 /* Add the arguments we have stored on edges. */
5565 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5570 flush_pending_stmts (e
);
5575 /* Return a non-special label in the head of basic block BLOCK.
5576 Create one if it doesn't exist. */
5579 gimple_block_label (basic_block bb
)
5581 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5586 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5588 stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
5591 label
= gimple_label_label (stmt
);
5592 if (!DECL_NONLOCAL (label
))
5595 gsi_move_before (&i
, &s
);
5600 label
= create_artificial_label (UNKNOWN_LOCATION
);
5601 stmt
= gimple_build_label (label
);
5602 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5607 /* Attempt to perform edge redirection by replacing a possibly complex
5608 jump instruction by a goto or by removing the jump completely.
5609 This can apply only if all edges now point to the same block. The
5610 parameters and return values are equivalent to
5611 redirect_edge_and_branch. */
5614 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5616 basic_block src
= e
->src
;
5617 gimple_stmt_iterator i
;
5620 /* We can replace or remove a complex jump only when we have exactly
5622 if (EDGE_COUNT (src
->succs
) != 2
5623 /* Verify that all targets will be TARGET. Specifically, the
5624 edge that is not E must also go to TARGET. */
5625 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5628 i
= gsi_last_bb (src
);
5632 stmt
= gsi_stmt (i
);
5634 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5636 gsi_remove (&i
, true);
5637 e
= ssa_redirect_edge (e
, target
);
5638 e
->flags
= EDGE_FALLTHRU
;
5646 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5647 edge representing the redirected branch. */
5650 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5652 basic_block bb
= e
->src
;
5653 gimple_stmt_iterator gsi
;
5657 if (e
->flags
& EDGE_ABNORMAL
)
5660 if (e
->dest
== dest
)
5663 if (e
->flags
& EDGE_EH
)
5664 return redirect_eh_edge (e
, dest
);
5666 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5668 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5673 gsi
= gsi_last_bb (bb
);
5674 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5676 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5679 /* For COND_EXPR, we only need to redirect the edge. */
5683 /* No non-abnormal edges should lead from a non-simple goto, and
5684 simple ones should be represented implicitly. */
5689 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5690 tree label
= gimple_block_label (dest
);
5691 tree cases
= get_cases_for_edge (e
, switch_stmt
);
5693 /* If we have a list of cases associated with E, then use it
5694 as it's a lot faster than walking the entire case vector. */
5697 edge e2
= find_edge (e
->src
, dest
);
5704 CASE_LABEL (cases
) = label
;
5705 cases
= CASE_CHAIN (cases
);
5708 /* If there was already an edge in the CFG, then we need
5709 to move all the cases associated with E to E2. */
5712 tree cases2
= get_cases_for_edge (e2
, switch_stmt
);
5714 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5715 CASE_CHAIN (cases2
) = first
;
5717 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5721 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
5723 for (i
= 0; i
< n
; i
++)
5725 tree elt
= gimple_switch_label (switch_stmt
, i
);
5726 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
5727 CASE_LABEL (elt
) = label
;
5735 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5736 int i
, n
= gimple_asm_nlabels (asm_stmt
);
5739 for (i
= 0; i
< n
; ++i
)
5741 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
5742 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
5745 label
= gimple_block_label (dest
);
5746 TREE_VALUE (cons
) = label
;
5750 /* If we didn't find any label matching the former edge in the
5751 asm labels, we must be redirecting the fallthrough
5753 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5758 gsi_remove (&gsi
, true);
5759 e
->flags
|= EDGE_FALLTHRU
;
5762 case GIMPLE_OMP_RETURN
:
5763 case GIMPLE_OMP_CONTINUE
:
5764 case GIMPLE_OMP_SECTIONS_SWITCH
:
5765 case GIMPLE_OMP_FOR
:
5766 /* The edges from OMP constructs can be simply redirected. */
5769 case GIMPLE_EH_DISPATCH
:
5770 if (!(e
->flags
& EDGE_FALLTHRU
))
5771 redirect_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
), e
, dest
);
5774 case GIMPLE_TRANSACTION
:
5775 if (e
->flags
& EDGE_TM_ABORT
)
5776 gimple_transaction_set_label_over (as_a
<gtransaction
*> (stmt
),
5777 gimple_block_label (dest
));
5778 else if (e
->flags
& EDGE_TM_UNINSTRUMENTED
)
5779 gimple_transaction_set_label_uninst (as_a
<gtransaction
*> (stmt
),
5780 gimple_block_label (dest
));
5782 gimple_transaction_set_label_norm (as_a
<gtransaction
*> (stmt
),
5783 gimple_block_label (dest
));
5787 /* Otherwise it must be a fallthru edge, and we don't need to
5788 do anything besides redirecting it. */
5789 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
5793 /* Update/insert PHI nodes as necessary. */
5795 /* Now update the edges in the CFG. */
5796 e
= ssa_redirect_edge (e
, dest
);
5801 /* Returns true if it is possible to remove edge E by redirecting
5802 it to the destination of the other edge from E->src. */
5805 gimple_can_remove_branch_p (const_edge e
)
5807 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
5813 /* Simple wrapper, as we can always redirect fallthru edges. */
5816 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
5818 e
= gimple_redirect_edge_and_branch (e
, dest
);
5825 /* Splits basic block BB after statement STMT (but at least after the
5826 labels). If STMT is NULL, BB is split just after the labels. */
5829 gimple_split_block (basic_block bb
, void *stmt
)
5831 gimple_stmt_iterator gsi
;
5832 gimple_stmt_iterator gsi_tgt
;
5838 new_bb
= create_empty_bb (bb
);
5840 /* Redirect the outgoing edges. */
5841 new_bb
->succs
= bb
->succs
;
5843 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
5846 /* Get a stmt iterator pointing to the first stmt to move. */
5847 if (!stmt
|| gimple_code ((gimple
*) stmt
) == GIMPLE_LABEL
)
5848 gsi
= gsi_after_labels (bb
);
5851 gsi
= gsi_for_stmt ((gimple
*) stmt
);
5855 /* Move everything from GSI to the new basic block. */
5856 if (gsi_end_p (gsi
))
5859 /* Split the statement list - avoid re-creating new containers as this
5860 brings ugly quadratic memory consumption in the inliner.
5861 (We are still quadratic since we need to update stmt BB pointers,
5863 gsi_split_seq_before (&gsi
, &list
);
5864 set_bb_seq (new_bb
, list
);
5865 for (gsi_tgt
= gsi_start (list
);
5866 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
5867 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
5873 /* Moves basic block BB after block AFTER. */
5876 gimple_move_block_after (basic_block bb
, basic_block after
)
5878 if (bb
->prev_bb
== after
)
5882 link_block (bb
, after
);
5888 /* Return TRUE if block BB has no executable statements, otherwise return
5892 gimple_empty_block_p (basic_block bb
)
5894 /* BB must have no executable statements. */
5895 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
5898 if (gsi_end_p (gsi
))
5900 if (is_gimple_debug (gsi_stmt (gsi
)))
5901 gsi_next_nondebug (&gsi
);
5902 return gsi_end_p (gsi
);
5906 /* Split a basic block if it ends with a conditional branch and if the
5907 other part of the block is not empty. */
5910 gimple_split_block_before_cond_jump (basic_block bb
)
5912 gimple
*last
, *split_point
;
5913 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
5914 if (gsi_end_p (gsi
))
5916 last
= gsi_stmt (gsi
);
5917 if (gimple_code (last
) != GIMPLE_COND
5918 && gimple_code (last
) != GIMPLE_SWITCH
)
5921 split_point
= gsi_stmt (gsi
);
5922 return split_block (bb
, split_point
)->dest
;
5926 /* Return true if basic_block can be duplicated. */
5929 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
5934 /* Create a duplicate of the basic block BB. NOTE: This does not
5935 preserve SSA form. */
5938 gimple_duplicate_bb (basic_block bb
)
5941 gimple_stmt_iterator gsi_tgt
;
5943 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
5945 /* Copy the PHI nodes. We ignore PHI node arguments here because
5946 the incoming edges have not been setup yet. */
5947 for (gphi_iterator gpi
= gsi_start_phis (bb
);
5953 copy
= create_phi_node (NULL_TREE
, new_bb
);
5954 create_new_def_for (gimple_phi_result (phi
), copy
,
5955 gimple_phi_result_ptr (copy
));
5956 gimple_set_uid (copy
, gimple_uid (phi
));
5959 gsi_tgt
= gsi_start_bb (new_bb
);
5960 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5964 def_operand_p def_p
;
5965 ssa_op_iter op_iter
;
5967 gimple
*stmt
, *copy
;
5969 stmt
= gsi_stmt (gsi
);
5970 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5973 /* Don't duplicate label debug stmts. */
5974 if (gimple_debug_bind_p (stmt
)
5975 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
5979 /* Create a new copy of STMT and duplicate STMT's virtual
5981 copy
= gimple_copy (stmt
);
5982 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
5984 maybe_duplicate_eh_stmt (copy
, stmt
);
5985 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
5987 /* When copying around a stmt writing into a local non-user
5988 aggregate, make sure it won't share stack slot with other
5990 lhs
= gimple_get_lhs (stmt
);
5991 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
5993 tree base
= get_base_address (lhs
);
5995 && (TREE_CODE (base
) == VAR_DECL
5996 || TREE_CODE (base
) == RESULT_DECL
)
5997 && DECL_IGNORED_P (base
)
5998 && !TREE_STATIC (base
)
5999 && !DECL_EXTERNAL (base
)
6000 && (TREE_CODE (base
) != VAR_DECL
6001 || !DECL_HAS_VALUE_EXPR_P (base
)))
6002 DECL_NONSHAREABLE (base
) = 1;
6005 /* Create new names for all the definitions created by COPY and
6006 add replacement mappings for each new name. */
6007 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
6008 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
6014 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6017 add_phi_args_after_copy_edge (edge e_copy
)
6019 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
6022 gphi
*phi
, *phi_copy
;
6024 gphi_iterator psi
, psi_copy
;
6026 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
6029 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
6031 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
6032 dest
= get_bb_original (e_copy
->dest
);
6034 dest
= e_copy
->dest
;
6036 e
= find_edge (bb
, dest
);
6039 /* During loop unrolling the target of the latch edge is copied.
6040 In this case we are not looking for edge to dest, but to
6041 duplicated block whose original was dest. */
6042 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6044 if ((e
->dest
->flags
& BB_DUPLICATED
)
6045 && get_bb_original (e
->dest
) == dest
)
6049 gcc_assert (e
!= NULL
);
6052 for (psi
= gsi_start_phis (e
->dest
),
6053 psi_copy
= gsi_start_phis (e_copy
->dest
);
6055 gsi_next (&psi
), gsi_next (&psi_copy
))
6058 phi_copy
= psi_copy
.phi ();
6059 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
6060 add_phi_arg (phi_copy
, def
, e_copy
,
6061 gimple_phi_arg_location_from_edge (phi
, e
));
6066 /* Basic block BB_COPY was created by code duplication. Add phi node
6067 arguments for edges going out of BB_COPY. The blocks that were
6068 duplicated have BB_DUPLICATED set. */
6071 add_phi_args_after_copy_bb (basic_block bb_copy
)
6076 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
6078 add_phi_args_after_copy_edge (e_copy
);
6082 /* Blocks in REGION_COPY array of length N_REGION were created by
6083 duplication of basic blocks. Add phi node arguments for edges
6084 going from these blocks. If E_COPY is not NULL, also add
6085 phi node arguments for its destination.*/
6088 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
6093 for (i
= 0; i
< n_region
; i
++)
6094 region_copy
[i
]->flags
|= BB_DUPLICATED
;
6096 for (i
= 0; i
< n_region
; i
++)
6097 add_phi_args_after_copy_bb (region_copy
[i
]);
6099 add_phi_args_after_copy_edge (e_copy
);
6101 for (i
= 0; i
< n_region
; i
++)
6102 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
6105 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6106 important exit edge EXIT. By important we mean that no SSA name defined
6107 inside region is live over the other exit edges of the region. All entry
6108 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6109 to the duplicate of the region. Dominance and loop information is
6110 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6111 UPDATE_DOMINANCE is false then we assume that the caller will update the
6112 dominance information after calling this function. The new basic
6113 blocks are stored to REGION_COPY in the same order as they had in REGION,
6114 provided that REGION_COPY is not NULL.
6115 The function returns false if it is unable to copy the region,
6119 gimple_duplicate_sese_region (edge entry
, edge exit
,
6120 basic_block
*region
, unsigned n_region
,
6121 basic_block
*region_copy
,
6122 bool update_dominance
)
6125 bool free_region_copy
= false, copying_header
= false;
6126 struct loop
*loop
= entry
->dest
->loop_father
;
6128 vec
<basic_block
> doms
;
6130 int total_freq
= 0, entry_freq
= 0;
6131 gcov_type total_count
= 0, entry_count
= 0;
6133 if (!can_copy_bbs_p (region
, n_region
))
6136 /* Some sanity checking. Note that we do not check for all possible
6137 missuses of the functions. I.e. if you ask to copy something weird,
6138 it will work, but the state of structures probably will not be
6140 for (i
= 0; i
< n_region
; i
++)
6142 /* We do not handle subloops, i.e. all the blocks must belong to the
6144 if (region
[i
]->loop_father
!= loop
)
6147 if (region
[i
] != entry
->dest
6148 && region
[i
] == loop
->header
)
6152 /* In case the function is used for loop header copying (which is the primary
6153 use), ensure that EXIT and its copy will be new latch and entry edges. */
6154 if (loop
->header
== entry
->dest
)
6156 copying_header
= true;
6158 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
6161 for (i
= 0; i
< n_region
; i
++)
6162 if (region
[i
] != exit
->src
6163 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
6167 initialize_original_copy_tables ();
6170 set_loop_copy (loop
, loop_outer (loop
));
6172 set_loop_copy (loop
, loop
);
6176 region_copy
= XNEWVEC (basic_block
, n_region
);
6177 free_region_copy
= true;
6180 /* Record blocks outside the region that are dominated by something
6182 if (update_dominance
)
6185 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6188 if (entry
->dest
->count
)
6190 total_count
= entry
->dest
->count
;
6191 entry_count
= entry
->count
;
6192 /* Fix up corner cases, to avoid division by zero or creation of negative
6194 if (entry_count
> total_count
)
6195 entry_count
= total_count
;
6199 total_freq
= entry
->dest
->frequency
;
6200 entry_freq
= EDGE_FREQUENCY (entry
);
6201 /* Fix up corner cases, to avoid division by zero or creation of negative
6203 if (total_freq
== 0)
6205 else if (entry_freq
> total_freq
)
6206 entry_freq
= total_freq
;
6209 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
6210 split_edge_bb_loc (entry
), update_dominance
);
6213 scale_bbs_frequencies_gcov_type (region
, n_region
,
6214 total_count
- entry_count
,
6216 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, entry_count
,
6221 scale_bbs_frequencies_int (region
, n_region
, total_freq
- entry_freq
,
6223 scale_bbs_frequencies_int (region_copy
, n_region
, entry_freq
, total_freq
);
6228 loop
->header
= exit
->dest
;
6229 loop
->latch
= exit
->src
;
6232 /* Redirect the entry and add the phi node arguments. */
6233 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
6234 gcc_assert (redirected
!= NULL
);
6235 flush_pending_stmts (entry
);
6237 /* Concerning updating of dominators: We must recount dominators
6238 for entry block and its copy. Anything that is outside of the
6239 region, but was dominated by something inside needs recounting as
6241 if (update_dominance
)
6243 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
6244 doms
.safe_push (get_bb_original (entry
->dest
));
6245 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6249 /* Add the other PHI node arguments. */
6250 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
6252 if (free_region_copy
)
6255 free_original_copy_tables ();
6259 /* Checks if BB is part of the region defined by N_REGION BBS. */
6261 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
6265 for (n
= 0; n
< n_region
; n
++)
6273 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6274 are stored to REGION_COPY in the same order in that they appear
6275 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6276 the region, EXIT an exit from it. The condition guarding EXIT
6277 is moved to ENTRY. Returns true if duplication succeeds, false
6303 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED
, edge exit ATTRIBUTE_UNUSED
,
6304 basic_block
*region ATTRIBUTE_UNUSED
, unsigned n_region ATTRIBUTE_UNUSED
,
6305 basic_block
*region_copy ATTRIBUTE_UNUSED
)
6308 bool free_region_copy
= false;
6309 struct loop
*loop
= exit
->dest
->loop_father
;
6310 struct loop
*orig_loop
= entry
->dest
->loop_father
;
6311 basic_block switch_bb
, entry_bb
, nentry_bb
;
6312 vec
<basic_block
> doms
;
6313 int total_freq
= 0, exit_freq
= 0;
6314 gcov_type total_count
= 0, exit_count
= 0;
6315 edge exits
[2], nexits
[2], e
;
6316 gimple_stmt_iterator gsi
;
6319 basic_block exit_bb
;
6323 struct loop
*target
, *aloop
, *cloop
;
6325 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
6327 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
6329 if (!can_copy_bbs_p (region
, n_region
))
6332 initialize_original_copy_tables ();
6333 set_loop_copy (orig_loop
, loop
);
6336 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
6338 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
6340 cloop
= duplicate_loop (aloop
, target
);
6341 duplicate_subloops (aloop
, cloop
);
6347 region_copy
= XNEWVEC (basic_block
, n_region
);
6348 free_region_copy
= true;
6351 gcc_assert (!need_ssa_update_p (cfun
));
6353 /* Record blocks outside the region that are dominated by something
6355 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6357 if (exit
->src
->count
)
6359 total_count
= exit
->src
->count
;
6360 exit_count
= exit
->count
;
6361 /* Fix up corner cases, to avoid division by zero or creation of negative
6363 if (exit_count
> total_count
)
6364 exit_count
= total_count
;
6368 total_freq
= exit
->src
->frequency
;
6369 exit_freq
= EDGE_FREQUENCY (exit
);
6370 /* Fix up corner cases, to avoid division by zero or creation of negative
6372 if (total_freq
== 0)
6374 if (exit_freq
> total_freq
)
6375 exit_freq
= total_freq
;
6378 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6379 split_edge_bb_loc (exit
), true);
6382 scale_bbs_frequencies_gcov_type (region
, n_region
,
6383 total_count
- exit_count
,
6385 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, exit_count
,
6390 scale_bbs_frequencies_int (region
, n_region
, total_freq
- exit_freq
,
6392 scale_bbs_frequencies_int (region_copy
, n_region
, exit_freq
, total_freq
);
6395 /* Create the switch block, and put the exit condition to it. */
6396 entry_bb
= entry
->dest
;
6397 nentry_bb
= get_bb_copy (entry_bb
);
6398 if (!last_stmt (entry
->src
)
6399 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6400 switch_bb
= entry
->src
;
6402 switch_bb
= split_edge (entry
);
6403 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6405 gsi
= gsi_last_bb (switch_bb
);
6406 cond_stmt
= last_stmt (exit
->src
);
6407 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6408 cond_stmt
= gimple_copy (cond_stmt
);
6410 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6412 sorig
= single_succ_edge (switch_bb
);
6413 sorig
->flags
= exits
[1]->flags
;
6414 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6416 /* Register the new edge from SWITCH_BB in loop exit lists. */
6417 rescan_loop_exit (snew
, true, false);
6419 /* Add the PHI node arguments. */
6420 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6422 /* Get rid of now superfluous conditions and associated edges (and phi node
6424 exit_bb
= exit
->dest
;
6426 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6427 PENDING_STMT (e
) = NULL
;
6429 /* The latch of ORIG_LOOP was copied, and so was the backedge
6430 to the original header. We redirect this backedge to EXIT_BB. */
6431 for (i
= 0; i
< n_region
; i
++)
6432 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6434 gcc_assert (single_succ_edge (region_copy
[i
]));
6435 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6436 PENDING_STMT (e
) = NULL
;
6437 for (psi
= gsi_start_phis (exit_bb
);
6442 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6443 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6446 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6447 PENDING_STMT (e
) = NULL
;
6449 /* Anything that is outside of the region, but was dominated by something
6450 inside needs to update dominance info. */
6451 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6453 /* Update the SSA web. */
6454 update_ssa (TODO_update_ssa
);
6456 if (free_region_copy
)
6459 free_original_copy_tables ();
6463 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6464 adding blocks when the dominator traversal reaches EXIT. This
6465 function silently assumes that ENTRY strictly dominates EXIT. */
6468 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6469 vec
<basic_block
> *bbs_p
)
6473 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6475 son
= next_dom_son (CDI_DOMINATORS
, son
))
6477 bbs_p
->safe_push (son
);
6479 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6483 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6484 The duplicates are recorded in VARS_MAP. */
6487 replace_by_duplicate_decl (tree
*tp
, hash_map
<tree
, tree
> *vars_map
,
6490 tree t
= *tp
, new_t
;
6491 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6493 if (DECL_CONTEXT (t
) == to_context
)
6497 tree
&loc
= vars_map
->get_or_insert (t
, &existed
);
6503 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6504 add_local_decl (f
, new_t
);
6508 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6509 new_t
= copy_node (t
);
6511 DECL_CONTEXT (new_t
) = to_context
;
6522 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6523 VARS_MAP maps old ssa names and var_decls to the new ones. */
6526 replace_ssa_name (tree name
, hash_map
<tree
, tree
> *vars_map
,
6531 gcc_assert (!virtual_operand_p (name
));
6533 tree
*loc
= vars_map
->get (name
);
6537 tree decl
= SSA_NAME_VAR (name
);
6540 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name
));
6541 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6542 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6543 decl
, SSA_NAME_DEF_STMT (name
));
6546 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6547 name
, SSA_NAME_DEF_STMT (name
));
6549 /* Now that we've used the def stmt to define new_name, make sure it
6550 doesn't define name anymore. */
6551 SSA_NAME_DEF_STMT (name
) = NULL
;
6553 vars_map
->put (name
, new_name
);
6567 hash_map
<tree
, tree
> *vars_map
;
6568 htab_t new_label_map
;
6569 hash_map
<void *, void *> *eh_map
;
6573 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6574 contained in *TP if it has been ORIG_BLOCK previously and change the
6575 DECL_CONTEXT of every local variable referenced in *TP. */
6578 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6580 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6581 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6586 tree block
= TREE_BLOCK (t
);
6587 if (block
== p
->orig_block
6588 || (p
->orig_block
== NULL_TREE
6589 && block
!= NULL_TREE
))
6590 TREE_SET_BLOCK (t
, p
->new_block
);
6591 else if (flag_checking
&& block
!= NULL_TREE
)
6593 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6594 block
= BLOCK_SUPERCONTEXT (block
);
6595 gcc_assert (block
== p
->orig_block
);
6598 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6600 if (TREE_CODE (t
) == SSA_NAME
)
6601 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6602 else if (TREE_CODE (t
) == PARM_DECL
6603 && gimple_in_ssa_p (cfun
))
6604 *tp
= *(p
->vars_map
->get (t
));
6605 else if (TREE_CODE (t
) == LABEL_DECL
)
6607 if (p
->new_label_map
)
6609 struct tree_map in
, *out
;
6611 out
= (struct tree_map
*)
6612 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6617 DECL_CONTEXT (t
) = p
->to_context
;
6619 else if (p
->remap_decls_p
)
6621 /* Replace T with its duplicate. T should no longer appear in the
6622 parent function, so this looks wasteful; however, it may appear
6623 in referenced_vars, and more importantly, as virtual operands of
6624 statements, and in alias lists of other variables. It would be
6625 quite difficult to expunge it from all those places. ??? It might
6626 suffice to do this for addressable variables. */
6627 if ((TREE_CODE (t
) == VAR_DECL
6628 && !is_global_var (t
))
6629 || TREE_CODE (t
) == CONST_DECL
)
6630 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6634 else if (TYPE_P (t
))
6640 /* Helper for move_stmt_r. Given an EH region number for the source
6641 function, map that to the duplicate EH regio number in the dest. */
6644 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6646 eh_region old_r
, new_r
;
6648 old_r
= get_eh_region_from_number (old_nr
);
6649 new_r
= static_cast<eh_region
> (*p
->eh_map
->get (old_r
));
6651 return new_r
->index
;
6654 /* Similar, but operate on INTEGER_CSTs. */
6657 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6661 old_nr
= tree_to_shwi (old_t_nr
);
6662 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6664 return build_int_cst (integer_type_node
, new_nr
);
6667 /* Like move_stmt_op, but for gimple statements.
6669 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6670 contained in the current statement in *GSI_P and change the
6671 DECL_CONTEXT of every local variable referenced in the current
6675 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6676 struct walk_stmt_info
*wi
)
6678 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6679 gimple
*stmt
= gsi_stmt (*gsi_p
);
6680 tree block
= gimple_block (stmt
);
6682 if (block
== p
->orig_block
6683 || (p
->orig_block
== NULL_TREE
6684 && block
!= NULL_TREE
))
6685 gimple_set_block (stmt
, p
->new_block
);
6687 switch (gimple_code (stmt
))
6690 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6692 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6693 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
6694 switch (DECL_FUNCTION_CODE (fndecl
))
6696 case BUILT_IN_EH_COPY_VALUES
:
6697 r
= gimple_call_arg (stmt
, 1);
6698 r
= move_stmt_eh_region_tree_nr (r
, p
);
6699 gimple_call_set_arg (stmt
, 1, r
);
6702 case BUILT_IN_EH_POINTER
:
6703 case BUILT_IN_EH_FILTER
:
6704 r
= gimple_call_arg (stmt
, 0);
6705 r
= move_stmt_eh_region_tree_nr (r
, p
);
6706 gimple_call_set_arg (stmt
, 0, r
);
6717 gresx
*resx_stmt
= as_a
<gresx
*> (stmt
);
6718 int r
= gimple_resx_region (resx_stmt
);
6719 r
= move_stmt_eh_region_nr (r
, p
);
6720 gimple_resx_set_region (resx_stmt
, r
);
6724 case GIMPLE_EH_DISPATCH
:
6726 geh_dispatch
*eh_dispatch_stmt
= as_a
<geh_dispatch
*> (stmt
);
6727 int r
= gimple_eh_dispatch_region (eh_dispatch_stmt
);
6728 r
= move_stmt_eh_region_nr (r
, p
);
6729 gimple_eh_dispatch_set_region (eh_dispatch_stmt
, r
);
6733 case GIMPLE_OMP_RETURN
:
6734 case GIMPLE_OMP_CONTINUE
:
6737 if (is_gimple_omp (stmt
))
6739 /* Do not remap variables inside OMP directives. Variables
6740 referenced in clauses and directive header belong to the
6741 parent function and should not be moved into the child
6743 bool save_remap_decls_p
= p
->remap_decls_p
;
6744 p
->remap_decls_p
= false;
6745 *handled_ops_p
= true;
6747 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
6750 p
->remap_decls_p
= save_remap_decls_p
;
6758 /* Move basic block BB from function CFUN to function DEST_FN. The
6759 block is moved out of the original linked list and placed after
6760 block AFTER in the new list. Also, the block is removed from the
6761 original array of blocks and placed in DEST_FN's array of blocks.
6762 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6763 updated to reflect the moved edges.
6765 The local variables are remapped to new instances, VARS_MAP is used
6766 to record the mapping. */
6769 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
6770 basic_block after
, bool update_edge_count_p
,
6771 struct move_stmt_d
*d
)
6773 struct control_flow_graph
*cfg
;
6776 gimple_stmt_iterator si
;
6777 unsigned old_len
, new_len
;
6779 /* Remove BB from dominance structures. */
6780 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
6782 /* Move BB from its current loop to the copy in the new function. */
6785 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
6787 bb
->loop_father
= new_loop
;
6790 /* Link BB to the new linked list. */
6791 move_block_after (bb
, after
);
6793 /* Update the edge count in the corresponding flowgraphs. */
6794 if (update_edge_count_p
)
6795 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6797 cfun
->cfg
->x_n_edges
--;
6798 dest_cfun
->cfg
->x_n_edges
++;
6801 /* Remove BB from the original basic block array. */
6802 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
6803 cfun
->cfg
->x_n_basic_blocks
--;
6805 /* Grow DEST_CFUN's basic block array if needed. */
6806 cfg
= dest_cfun
->cfg
;
6807 cfg
->x_n_basic_blocks
++;
6808 if (bb
->index
>= cfg
->x_last_basic_block
)
6809 cfg
->x_last_basic_block
= bb
->index
+ 1;
6811 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
6812 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
6814 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
6815 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
6818 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
6820 /* Remap the variables in phi nodes. */
6821 for (gphi_iterator psi
= gsi_start_phis (bb
);
6824 gphi
*phi
= psi
.phi ();
6826 tree op
= PHI_RESULT (phi
);
6830 if (virtual_operand_p (op
))
6832 /* Remove the phi nodes for virtual operands (alias analysis will be
6833 run for the new function, anyway). */
6834 remove_phi_node (&psi
, true);
6838 SET_PHI_RESULT (phi
,
6839 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6840 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
6842 op
= USE_FROM_PTR (use
);
6843 if (TREE_CODE (op
) == SSA_NAME
)
6844 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6847 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
6849 location_t locus
= gimple_phi_arg_location (phi
, i
);
6850 tree block
= LOCATION_BLOCK (locus
);
6852 if (locus
== UNKNOWN_LOCATION
)
6854 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
6856 locus
= set_block (locus
, d
->new_block
);
6857 gimple_phi_arg_set_location (phi
, i
, locus
);
6864 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6866 gimple
*stmt
= gsi_stmt (si
);
6867 struct walk_stmt_info wi
;
6869 memset (&wi
, 0, sizeof (wi
));
6871 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
6873 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
6875 tree label
= gimple_label_label (label_stmt
);
6876 int uid
= LABEL_DECL_UID (label
);
6878 gcc_assert (uid
> -1);
6880 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
6881 if (old_len
<= (unsigned) uid
)
6883 new_len
= 3 * uid
/ 2 + 1;
6884 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
6887 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
6888 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
6890 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
6892 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
6893 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
6896 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
6897 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
6899 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
6900 gimple_remove_stmt_histograms (cfun
, stmt
);
6902 /* We cannot leave any operands allocated from the operand caches of
6903 the current function. */
6904 free_stmt_operands (cfun
, stmt
);
6905 push_cfun (dest_cfun
);
6910 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6911 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
6913 tree block
= LOCATION_BLOCK (e
->goto_locus
);
6914 if (d
->orig_block
== NULL_TREE
6915 || block
== d
->orig_block
)
6916 e
->goto_locus
= set_block (e
->goto_locus
, d
->new_block
);
6920 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6921 the outermost EH region. Use REGION as the incoming base EH region. */
6924 find_outermost_region_in_block (struct function
*src_cfun
,
6925 basic_block bb
, eh_region region
)
6927 gimple_stmt_iterator si
;
6929 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6931 gimple
*stmt
= gsi_stmt (si
);
6932 eh_region stmt_region
;
6935 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
6936 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
6940 region
= stmt_region
;
6941 else if (stmt_region
!= region
)
6943 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
6944 gcc_assert (region
!= NULL
);
6953 new_label_mapper (tree decl
, void *data
)
6955 htab_t hash
= (htab_t
) data
;
6959 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
6961 m
= XNEW (struct tree_map
);
6962 m
->hash
= DECL_UID (decl
);
6963 m
->base
.from
= decl
;
6964 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
6965 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
6966 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
6967 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
6969 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
6970 gcc_assert (*slot
== NULL
);
6977 /* Tree walker to replace the decls used inside value expressions by
6981 replace_block_vars_by_duplicates_1 (tree
*tp
, int *walk_subtrees
, void *data
)
6983 struct replace_decls_d
*rd
= (struct replace_decls_d
*)data
;
6985 switch (TREE_CODE (*tp
))
6990 replace_by_duplicate_decl (tp
, rd
->vars_map
, rd
->to_context
);
6996 if (IS_TYPE_OR_DECL_P (*tp
))
6997 *walk_subtrees
= false;
7002 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7006 replace_block_vars_by_duplicates (tree block
, hash_map
<tree
, tree
> *vars_map
,
7011 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
7014 if (TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != CONST_DECL
)
7016 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
7019 if (TREE_CODE (*tp
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (*tp
))
7021 tree x
= DECL_VALUE_EXPR (*tp
);
7022 struct replace_decls_d rd
= { vars_map
, to_context
};
7024 walk_tree (&x
, replace_block_vars_by_duplicates_1
, &rd
, NULL
);
7025 SET_DECL_VALUE_EXPR (t
, x
);
7026 DECL_HAS_VALUE_EXPR_P (t
) = 1;
7028 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
7033 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
7034 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
7037 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7041 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
7044 /* Discard it from the old loop array. */
7045 (*get_loops (fn1
))[loop
->num
] = NULL
;
7047 /* Place it in the new loop array, assigning it a new number. */
7048 loop
->num
= number_of_loops (fn2
);
7049 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
7051 /* Recurse to children. */
7052 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
7053 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
7056 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7057 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7060 verify_sese (basic_block entry
, basic_block exit
, vec
<basic_block
> *bbs_p
)
7065 bitmap bbs
= BITMAP_ALLOC (NULL
);
7068 gcc_assert (entry
!= NULL
);
7069 gcc_assert (entry
!= exit
);
7070 gcc_assert (bbs_p
!= NULL
);
7072 gcc_assert (bbs_p
->length () > 0);
7074 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7075 bitmap_set_bit (bbs
, bb
->index
);
7077 gcc_assert (bitmap_bit_p (bbs
, entry
->index
));
7078 gcc_assert (exit
== NULL
|| bitmap_bit_p (bbs
, exit
->index
));
7080 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7084 gcc_assert (single_pred_p (entry
));
7085 gcc_assert (!bitmap_bit_p (bbs
, single_pred (entry
)->index
));
7088 for (ei
= ei_start (bb
->preds
); !ei_end_p (ei
); ei_next (&ei
))
7091 gcc_assert (bitmap_bit_p (bbs
, e
->src
->index
));
7096 gcc_assert (single_succ_p (exit
));
7097 gcc_assert (!bitmap_bit_p (bbs
, single_succ (exit
)->index
));
7100 for (ei
= ei_start (bb
->succs
); !ei_end_p (ei
); ei_next (&ei
))
7103 gcc_assert (bitmap_bit_p (bbs
, e
->dest
->index
));
7110 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7113 gather_ssa_name_hash_map_from (tree
const &from
, tree
const &, void *data
)
7115 bitmap release_names
= (bitmap
)data
;
7117 if (TREE_CODE (from
) != SSA_NAME
)
7120 bitmap_set_bit (release_names
, SSA_NAME_VERSION (from
));
7124 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7125 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7126 single basic block in the original CFG and the new basic block is
7127 returned. DEST_CFUN must not have a CFG yet.
7129 Note that the region need not be a pure SESE region. Blocks inside
7130 the region may contain calls to abort/exit. The only restriction
7131 is that ENTRY_BB should be the only entry point and it must
7134 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7135 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7136 to the new function.
7138 All local variables referenced in the region are assumed to be in
7139 the corresponding BLOCK_VARS and unexpanded variable lists
7140 associated with DEST_CFUN.
7142 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7143 reimplement move_sese_region_to_fn by duplicating the region rather than
7147 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
7148 basic_block exit_bb
, tree orig_block
)
7150 vec
<basic_block
> bbs
, dom_bbs
;
7151 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
7152 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
7153 struct function
*saved_cfun
= cfun
;
7154 int *entry_flag
, *exit_flag
;
7155 unsigned *entry_prob
, *exit_prob
;
7156 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
7159 htab_t new_label_map
;
7160 hash_map
<void *, void *> *eh_map
;
7161 struct loop
*loop
= entry_bb
->loop_father
;
7162 struct loop
*loop0
= get_loop (saved_cfun
, 0);
7163 struct move_stmt_d d
;
7165 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7167 gcc_assert (entry_bb
!= exit_bb
7169 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
7171 /* Collect all the blocks in the region. Manually add ENTRY_BB
7172 because it won't be added by dfs_enumerate_from. */
7174 bbs
.safe_push (entry_bb
);
7175 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
7178 verify_sese (entry_bb
, exit_bb
, &bbs
);
7180 /* The blocks that used to be dominated by something in BBS will now be
7181 dominated by the new block. */
7182 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
7186 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7187 the predecessor edges to ENTRY_BB and the successor edges to
7188 EXIT_BB so that we can re-attach them to the new basic block that
7189 will replace the region. */
7190 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
7191 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
7192 entry_flag
= XNEWVEC (int, num_entry_edges
);
7193 entry_prob
= XNEWVEC (unsigned, num_entry_edges
);
7195 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
7197 entry_prob
[i
] = e
->probability
;
7198 entry_flag
[i
] = e
->flags
;
7199 entry_pred
[i
++] = e
->src
;
7205 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
7206 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
7207 exit_flag
= XNEWVEC (int, num_exit_edges
);
7208 exit_prob
= XNEWVEC (unsigned, num_exit_edges
);
7210 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
7212 exit_prob
[i
] = e
->probability
;
7213 exit_flag
[i
] = e
->flags
;
7214 exit_succ
[i
++] = e
->dest
;
7226 /* Switch context to the child function to initialize DEST_FN's CFG. */
7227 gcc_assert (dest_cfun
->cfg
== NULL
);
7228 push_cfun (dest_cfun
);
7230 init_empty_tree_cfg ();
7232 /* Initialize EH information for the new function. */
7234 new_label_map
= NULL
;
7237 eh_region region
= NULL
;
7239 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7240 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
7242 init_eh_for_function ();
7245 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
7246 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
7247 new_label_mapper
, new_label_map
);
7251 /* Initialize an empty loop tree. */
7252 struct loops
*loops
= ggc_cleared_alloc
<struct loops
> ();
7253 init_loops_structure (dest_cfun
, loops
, 1);
7254 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
7255 set_loops_for_fn (dest_cfun
, loops
);
7257 /* Move the outlined loop tree part. */
7258 num_nodes
= bbs
.length ();
7259 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7261 if (bb
->loop_father
->header
== bb
)
7263 struct loop
*this_loop
= bb
->loop_father
;
7264 struct loop
*outer
= loop_outer (this_loop
);
7266 /* If the SESE region contains some bbs ending with
7267 a noreturn call, those are considered to belong
7268 to the outermost loop in saved_cfun, rather than
7269 the entry_bb's loop_father. */
7273 num_nodes
-= this_loop
->num_nodes
;
7274 flow_loop_tree_node_remove (bb
->loop_father
);
7275 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
7276 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
7279 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
7282 /* Remove loop exits from the outlined region. */
7283 if (loops_for_fn (saved_cfun
)->exits
)
7284 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7286 struct loops
*l
= loops_for_fn (saved_cfun
);
7288 = l
->exits
->find_slot_with_hash (e
, htab_hash_pointer (e
),
7291 l
->exits
->clear_slot (slot
);
7296 /* Adjust the number of blocks in the tree root of the outlined part. */
7297 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
7299 /* Setup a mapping to be used by move_block_to_fn. */
7300 loop
->aux
= current_loops
->tree_root
;
7301 loop0
->aux
= current_loops
->tree_root
;
7305 /* Move blocks from BBS into DEST_CFUN. */
7306 gcc_assert (bbs
.length () >= 2);
7307 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
7308 hash_map
<tree
, tree
> vars_map
;
7310 memset (&d
, 0, sizeof (d
));
7311 d
.orig_block
= orig_block
;
7312 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
7313 d
.from_context
= cfun
->decl
;
7314 d
.to_context
= dest_cfun
->decl
;
7315 d
.vars_map
= &vars_map
;
7316 d
.new_label_map
= new_label_map
;
7318 d
.remap_decls_p
= true;
7320 if (gimple_in_ssa_p (cfun
))
7321 for (tree arg
= DECL_ARGUMENTS (d
.to_context
); arg
; arg
= DECL_CHAIN (arg
))
7323 tree narg
= make_ssa_name_fn (dest_cfun
, arg
, gimple_build_nop ());
7324 set_ssa_default_def (dest_cfun
, arg
, narg
);
7325 vars_map
.put (arg
, narg
);
7328 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7330 /* No need to update edge counts on the last block. It has
7331 already been updated earlier when we detached the region from
7332 the original CFG. */
7333 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
7339 /* Loop sizes are no longer correct, fix them up. */
7340 loop
->num_nodes
-= num_nodes
;
7341 for (struct loop
*outer
= loop_outer (loop
);
7342 outer
; outer
= loop_outer (outer
))
7343 outer
->num_nodes
-= num_nodes
;
7344 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
7346 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vectorize_loops
)
7349 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
7354 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
7356 dest_cfun
->has_simduid_loops
= true;
7358 if (aloop
->force_vectorize
)
7359 dest_cfun
->has_force_vectorize_loops
= true;
7363 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7367 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7369 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7370 = BLOCK_SUBBLOCKS (orig_block
);
7371 for (block
= BLOCK_SUBBLOCKS (orig_block
);
7372 block
; block
= BLOCK_CHAIN (block
))
7373 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
7374 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
7377 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
7378 &vars_map
, dest_cfun
->decl
);
7381 htab_delete (new_label_map
);
7385 if (gimple_in_ssa_p (cfun
))
7387 /* We need to release ssa-names in a defined order, so first find them,
7388 and then iterate in ascending version order. */
7389 bitmap release_names
= BITMAP_ALLOC (NULL
);
7390 vars_map
.traverse
<void *, gather_ssa_name_hash_map_from
> (release_names
);
7393 EXECUTE_IF_SET_IN_BITMAP (release_names
, 0, i
, bi
)
7394 release_ssa_name (ssa_name (i
));
7395 BITMAP_FREE (release_names
);
7398 /* Rewire the entry and exit blocks. The successor to the entry
7399 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7400 the child function. Similarly, the predecessor of DEST_FN's
7401 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7402 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7403 various CFG manipulation function get to the right CFG.
7405 FIXME, this is silly. The CFG ought to become a parameter to
7407 push_cfun (dest_cfun
);
7408 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
7410 make_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
7413 /* Back in the original function, the SESE region has disappeared,
7414 create a new basic block in its place. */
7415 bb
= create_empty_bb (entry_pred
[0]);
7417 add_bb_to_loop (bb
, loop
);
7418 for (i
= 0; i
< num_entry_edges
; i
++)
7420 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
7421 e
->probability
= entry_prob
[i
];
7424 for (i
= 0; i
< num_exit_edges
; i
++)
7426 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
7427 e
->probability
= exit_prob
[i
];
7430 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
7431 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
7432 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
7449 /* Dump default def DEF to file FILE using FLAGS and indentation
7453 dump_default_def (FILE *file
, tree def
, int spc
, int flags
)
7455 for (int i
= 0; i
< spc
; ++i
)
7456 fprintf (file
, " ");
7457 dump_ssaname_info_to_file (file
, def
, spc
);
7459 print_generic_expr (file
, TREE_TYPE (def
), flags
);
7460 fprintf (file
, " ");
7461 print_generic_expr (file
, def
, flags
);
7462 fprintf (file
, " = ");
7463 print_generic_expr (file
, SSA_NAME_VAR (def
), flags
);
7464 fprintf (file
, ";\n");
7467 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7471 dump_function_to_file (tree fndecl
, FILE *file
, int flags
)
7473 tree arg
, var
, old_current_fndecl
= current_function_decl
;
7474 struct function
*dsf
;
7475 bool ignore_topmost_bind
= false, any_var
= false;
7478 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
7479 && decl_is_tm_clone (fndecl
));
7480 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
7482 if (DECL_ATTRIBUTES (fndecl
) != NULL_TREE
)
7484 fprintf (file
, "__attribute__((");
7488 for (chain
= DECL_ATTRIBUTES (fndecl
); chain
;
7489 first
= false, chain
= TREE_CHAIN (chain
))
7492 fprintf (file
, ", ");
7494 print_generic_expr (file
, get_attribute_name (chain
), dump_flags
);
7495 if (TREE_VALUE (chain
) != NULL_TREE
)
7497 fprintf (file
, " (");
7498 print_generic_expr (file
, TREE_VALUE (chain
), dump_flags
);
7499 fprintf (file
, ")");
7503 fprintf (file
, "))\n");
7506 current_function_decl
= fndecl
;
7507 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
7509 arg
= DECL_ARGUMENTS (fndecl
);
7512 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
7513 fprintf (file
, " ");
7514 print_generic_expr (file
, arg
, dump_flags
);
7515 if (flags
& TDF_VERBOSE
)
7516 print_node (file
, "", arg
, 4);
7517 if (DECL_CHAIN (arg
))
7518 fprintf (file
, ", ");
7519 arg
= DECL_CHAIN (arg
);
7521 fprintf (file
, ")\n");
7523 if (flags
& TDF_VERBOSE
)
7524 print_node (file
, "", fndecl
, 2);
7526 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
7527 if (dsf
&& (flags
& TDF_EH
))
7528 dump_eh_tree (file
, dsf
);
7530 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
7532 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
7533 current_function_decl
= old_current_fndecl
;
7537 /* When GIMPLE is lowered, the variables are no longer available in
7538 BIND_EXPRs, so display them separately. */
7539 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
7542 ignore_topmost_bind
= true;
7544 fprintf (file
, "{\n");
7545 if (gimple_in_ssa_p (fun
)
7546 && (flags
& TDF_ALIAS
))
7548 for (arg
= DECL_ARGUMENTS (fndecl
); arg
!= NULL
;
7549 arg
= DECL_CHAIN (arg
))
7551 tree def
= ssa_default_def (fun
, arg
);
7553 dump_default_def (file
, def
, 2, flags
);
7556 tree res
= DECL_RESULT (fun
->decl
);
7557 if (res
!= NULL_TREE
7558 && DECL_BY_REFERENCE (res
))
7560 tree def
= ssa_default_def (fun
, res
);
7562 dump_default_def (file
, def
, 2, flags
);
7565 tree static_chain
= fun
->static_chain_decl
;
7566 if (static_chain
!= NULL_TREE
)
7568 tree def
= ssa_default_def (fun
, static_chain
);
7570 dump_default_def (file
, def
, 2, flags
);
7574 if (!vec_safe_is_empty (fun
->local_decls
))
7575 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7577 print_generic_decl (file
, var
, flags
);
7578 if (flags
& TDF_VERBOSE
)
7579 print_node (file
, "", var
, 4);
7580 fprintf (file
, "\n");
7584 if (gimple_in_ssa_p (cfun
))
7585 for (ix
= 1; ix
< num_ssa_names
; ++ix
)
7587 tree name
= ssa_name (ix
);
7588 if (name
&& !SSA_NAME_VAR (name
))
7590 fprintf (file
, " ");
7591 print_generic_expr (file
, TREE_TYPE (name
), flags
);
7592 fprintf (file
, " ");
7593 print_generic_expr (file
, name
, flags
);
7594 fprintf (file
, ";\n");
7601 if (fun
&& fun
->decl
== fndecl
7603 && basic_block_info_for_fn (fun
))
7605 /* If the CFG has been built, emit a CFG-based dump. */
7606 if (!ignore_topmost_bind
)
7607 fprintf (file
, "{\n");
7609 if (any_var
&& n_basic_blocks_for_fn (fun
))
7610 fprintf (file
, "\n");
7612 FOR_EACH_BB_FN (bb
, fun
)
7613 dump_bb (file
, bb
, 2, flags
| TDF_COMMENT
);
7615 fprintf (file
, "}\n");
7617 else if (DECL_SAVED_TREE (fndecl
) == NULL
)
7619 /* The function is now in GIMPLE form but the CFG has not been
7620 built yet. Emit the single sequence of GIMPLE statements
7621 that make up its body. */
7622 gimple_seq body
= gimple_body (fndecl
);
7624 if (gimple_seq_first_stmt (body
)
7625 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
7626 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
7627 print_gimple_seq (file
, body
, 0, flags
);
7630 if (!ignore_topmost_bind
)
7631 fprintf (file
, "{\n");
7634 fprintf (file
, "\n");
7636 print_gimple_seq (file
, body
, 2, flags
);
7637 fprintf (file
, "}\n");
7644 /* Make a tree based dump. */
7645 chain
= DECL_SAVED_TREE (fndecl
);
7646 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
7648 if (ignore_topmost_bind
)
7650 chain
= BIND_EXPR_BODY (chain
);
7658 if (!ignore_topmost_bind
)
7660 fprintf (file
, "{\n");
7661 /* No topmost bind, pretend it's ignored for later. */
7662 ignore_topmost_bind
= true;
7668 fprintf (file
, "\n");
7670 print_generic_stmt_indented (file
, chain
, flags
, indent
);
7671 if (ignore_topmost_bind
)
7672 fprintf (file
, "}\n");
7675 if (flags
& TDF_ENUMERATE_LOCALS
)
7676 dump_enumerated_decls (file
, flags
);
7677 fprintf (file
, "\n\n");
7679 current_function_decl
= old_current_fndecl
;
7682 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7685 debug_function (tree fn
, int flags
)
7687 dump_function_to_file (fn
, stderr
, flags
);
7691 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7694 print_pred_bbs (FILE *file
, basic_block bb
)
7699 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7700 fprintf (file
, "bb_%d ", e
->src
->index
);
7704 /* Print on FILE the indexes for the successors of basic_block BB. */
7707 print_succ_bbs (FILE *file
, basic_block bb
)
7712 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7713 fprintf (file
, "bb_%d ", e
->dest
->index
);
7716 /* Print to FILE the basic block BB following the VERBOSITY level. */
7719 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
7721 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
7722 memset ((void *) s_indent
, ' ', (size_t) indent
);
7723 s_indent
[indent
] = '\0';
7725 /* Print basic_block's header. */
7728 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
7729 print_pred_bbs (file
, bb
);
7730 fprintf (file
, "}, succs = {");
7731 print_succ_bbs (file
, bb
);
7732 fprintf (file
, "})\n");
7735 /* Print basic_block's body. */
7738 fprintf (file
, "%s {\n", s_indent
);
7739 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
7740 fprintf (file
, "%s }\n", s_indent
);
7744 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
7746 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7747 VERBOSITY level this outputs the contents of the loop, or just its
7751 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
7759 s_indent
= (char *) alloca ((size_t) indent
+ 1);
7760 memset ((void *) s_indent
, ' ', (size_t) indent
);
7761 s_indent
[indent
] = '\0';
7763 /* Print loop's header. */
7764 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
7766 fprintf (file
, "header = %d", loop
->header
->index
);
7769 fprintf (file
, "deleted)\n");
7773 fprintf (file
, ", latch = %d", loop
->latch
->index
);
7775 fprintf (file
, ", multiple latches");
7776 fprintf (file
, ", niter = ");
7777 print_generic_expr (file
, loop
->nb_iterations
, 0);
7779 if (loop
->any_upper_bound
)
7781 fprintf (file
, ", upper_bound = ");
7782 print_decu (loop
->nb_iterations_upper_bound
, file
);
7784 if (loop
->any_likely_upper_bound
)
7786 fprintf (file
, ", likely_upper_bound = ");
7787 print_decu (loop
->nb_iterations_likely_upper_bound
, file
);
7790 if (loop
->any_estimate
)
7792 fprintf (file
, ", estimate = ");
7793 print_decu (loop
->nb_iterations_estimate
, file
);
7795 fprintf (file
, ")\n");
7797 /* Print loop's body. */
7800 fprintf (file
, "%s{\n", s_indent
);
7801 FOR_EACH_BB_FN (bb
, cfun
)
7802 if (bb
->loop_father
== loop
)
7803 print_loops_bb (file
, bb
, indent
, verbosity
);
7805 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
7806 fprintf (file
, "%s}\n", s_indent
);
7810 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7811 spaces. Following VERBOSITY level this outputs the contents of the
7812 loop, or just its structure. */
7815 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
7821 print_loop (file
, loop
, indent
, verbosity
);
7822 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
7825 /* Follow a CFG edge from the entry point of the program, and on entry
7826 of a loop, pretty print the loop structure on FILE. */
7829 print_loops (FILE *file
, int verbosity
)
7833 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
7834 fprintf (file
, "\nLoops in function: %s\n", current_function_name ());
7835 if (bb
&& bb
->loop_father
)
7836 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
7842 debug (struct loop
&ref
)
7844 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
7848 debug (struct loop
*ptr
)
7853 fprintf (stderr
, "<nil>\n");
7856 /* Dump a loop verbosely. */
7859 debug_verbose (struct loop
&ref
)
7861 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
7865 debug_verbose (struct loop
*ptr
)
7870 fprintf (stderr
, "<nil>\n");
7874 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7877 debug_loops (int verbosity
)
7879 print_loops (stderr
, verbosity
);
7882 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7885 debug_loop (struct loop
*loop
, int verbosity
)
7887 print_loop (stderr
, loop
, 0, verbosity
);
7890 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7894 debug_loop_num (unsigned num
, int verbosity
)
7896 debug_loop (get_loop (cfun
, num
), verbosity
);
7899 /* Return true if BB ends with a call, possibly followed by some
7900 instructions that must stay with the call. Return false,
7904 gimple_block_ends_with_call_p (basic_block bb
)
7906 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7907 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
7911 /* Return true if BB ends with a conditional branch. Return false,
7915 gimple_block_ends_with_condjump_p (const_basic_block bb
)
7917 gimple
*stmt
= last_stmt (CONST_CAST_BB (bb
));
7918 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
7922 /* Return true if we need to add fake edge to exit at statement T.
7923 Helper function for gimple_flow_call_edges_add. */
7926 need_fake_edge_p (gimple
*t
)
7928 tree fndecl
= NULL_TREE
;
7931 /* NORETURN and LONGJMP calls already have an edge to exit.
7932 CONST and PURE calls do not need one.
7933 We don't currently check for CONST and PURE here, although
7934 it would be a good idea, because those attributes are
7935 figured out from the RTL in mark_constant_function, and
7936 the counter incrementation code from -fprofile-arcs
7937 leads to different results from -fbranch-probabilities. */
7938 if (is_gimple_call (t
))
7940 fndecl
= gimple_call_fndecl (t
);
7941 call_flags
= gimple_call_flags (t
);
7944 if (is_gimple_call (t
)
7946 && DECL_BUILT_IN (fndecl
)
7947 && (call_flags
& ECF_NOTHROW
)
7948 && !(call_flags
& ECF_RETURNS_TWICE
)
7949 /* fork() doesn't really return twice, but the effect of
7950 wrapping it in __gcov_fork() which calls __gcov_flush()
7951 and clears the counters before forking has the same
7952 effect as returning twice. Force a fake edge. */
7953 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7954 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
7957 if (is_gimple_call (t
))
7963 if (!(call_flags
& ECF_NORETURN
))
7967 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7968 if ((e
->flags
& EDGE_FAKE
) == 0)
7972 if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (t
))
7973 if (gimple_asm_volatile_p (asm_stmt
) || gimple_asm_input_p (asm_stmt
))
7980 /* Add fake edges to the function exit for any non constant and non
7981 noreturn calls (or noreturn calls with EH/abnormal edges),
7982 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7983 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7986 The goal is to expose cases in which entering a basic block does
7987 not imply that all subsequent instructions must be executed. */
7990 gimple_flow_call_edges_add (sbitmap blocks
)
7993 int blocks_split
= 0;
7994 int last_bb
= last_basic_block_for_fn (cfun
);
7995 bool check_last_block
= false;
7997 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
8001 check_last_block
= true;
8003 check_last_block
= bitmap_bit_p (blocks
,
8004 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
8006 /* In the last basic block, before epilogue generation, there will be
8007 a fallthru edge to EXIT. Special care is required if the last insn
8008 of the last basic block is a call because make_edge folds duplicate
8009 edges, which would result in the fallthru edge also being marked
8010 fake, which would result in the fallthru edge being removed by
8011 remove_fake_edges, which would result in an invalid CFG.
8013 Moreover, we can't elide the outgoing fake edge, since the block
8014 profiler needs to take this into account in order to solve the minimal
8015 spanning tree in the case that the call doesn't return.
8017 Handle this by adding a dummy instruction in a new last basic block. */
8018 if (check_last_block
)
8020 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
8021 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
8024 if (!gsi_end_p (gsi
))
8027 if (t
&& need_fake_edge_p (t
))
8031 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8034 gsi_insert_on_edge (e
, gimple_build_nop ());
8035 gsi_commit_edge_inserts ();
8040 /* Now add fake edges to the function exit for any non constant
8041 calls since there is no way that we can determine if they will
8043 for (i
= 0; i
< last_bb
; i
++)
8045 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8046 gimple_stmt_iterator gsi
;
8047 gimple
*stmt
, *last_stmt
;
8052 if (blocks
&& !bitmap_bit_p (blocks
, i
))
8055 gsi
= gsi_last_nondebug_bb (bb
);
8056 if (!gsi_end_p (gsi
))
8058 last_stmt
= gsi_stmt (gsi
);
8061 stmt
= gsi_stmt (gsi
);
8062 if (need_fake_edge_p (stmt
))
8066 /* The handling above of the final block before the
8067 epilogue should be enough to verify that there is
8068 no edge to the exit block in CFG already.
8069 Calling make_edge in such case would cause us to
8070 mark that edge as fake and remove it later. */
8071 if (flag_checking
&& stmt
== last_stmt
)
8073 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8074 gcc_assert (e
== NULL
);
8077 /* Note that the following may create a new basic block
8078 and renumber the existing basic blocks. */
8079 if (stmt
!= last_stmt
)
8081 e
= split_block (bb
, stmt
);
8085 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
8089 while (!gsi_end_p (gsi
));
8094 verify_flow_info ();
8096 return blocks_split
;
8099 /* Removes edge E and all the blocks dominated by it, and updates dominance
8100 information. The IL in E->src needs to be updated separately.
8101 If dominance info is not available, only the edge E is removed.*/
8104 remove_edge_and_dominated_blocks (edge e
)
8106 vec
<basic_block
> bbs_to_remove
= vNULL
;
8107 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
8111 bool none_removed
= false;
8113 basic_block bb
, dbb
;
8116 /* If we are removing a path inside a non-root loop that may change
8117 loop ownership of blocks or remove loops. Mark loops for fixup. */
8119 && loop_outer (e
->src
->loop_father
) != NULL
8120 && e
->src
->loop_father
== e
->dest
->loop_father
)
8121 loops_state_set (LOOPS_NEED_FIXUP
);
8123 if (!dom_info_available_p (CDI_DOMINATORS
))
8129 /* No updating is needed for edges to exit. */
8130 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8132 if (cfgcleanup_altered_bbs
)
8133 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
8138 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8139 that is not dominated by E->dest, then this set is empty. Otherwise,
8140 all the basic blocks dominated by E->dest are removed.
8142 Also, to DF_IDOM we store the immediate dominators of the blocks in
8143 the dominance frontier of E (i.e., of the successors of the
8144 removed blocks, if there are any, and of E->dest otherwise). */
8145 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
8150 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
8152 none_removed
= true;
8157 df
= BITMAP_ALLOC (NULL
);
8158 df_idom
= BITMAP_ALLOC (NULL
);
8161 bitmap_set_bit (df_idom
,
8162 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
8165 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
8166 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
8168 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
8170 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
8171 bitmap_set_bit (df
, f
->dest
->index
);
8174 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
8175 bitmap_clear_bit (df
, bb
->index
);
8177 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
8179 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8180 bitmap_set_bit (df_idom
,
8181 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
8185 if (cfgcleanup_altered_bbs
)
8187 /* Record the set of the altered basic blocks. */
8188 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
8189 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
8192 /* Remove E and the cancelled blocks. */
8197 /* Walk backwards so as to get a chance to substitute all
8198 released DEFs into debug stmts. See
8199 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8201 for (i
= bbs_to_remove
.length (); i
-- > 0; )
8202 delete_basic_block (bbs_to_remove
[i
]);
8205 /* Update the dominance information. The immediate dominator may change only
8206 for blocks whose immediate dominator belongs to DF_IDOM:
8208 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8209 removal. Let Z the arbitrary block such that idom(Z) = Y and
8210 Z dominates X after the removal. Before removal, there exists a path P
8211 from Y to X that avoids Z. Let F be the last edge on P that is
8212 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8213 dominates W, and because of P, Z does not dominate W), and W belongs to
8214 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8215 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
8217 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8218 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
8220 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
8221 bbs_to_fix_dom
.safe_push (dbb
);
8224 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
8227 BITMAP_FREE (df_idom
);
8228 bbs_to_remove
.release ();
8229 bbs_to_fix_dom
.release ();
8232 /* Purge dead EH edges from basic block BB. */
8235 gimple_purge_dead_eh_edges (basic_block bb
)
8237 bool changed
= false;
8240 gimple
*stmt
= last_stmt (bb
);
8242 if (stmt
&& stmt_can_throw_internal (stmt
))
8245 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8247 if (e
->flags
& EDGE_EH
)
8249 remove_edge_and_dominated_blocks (e
);
8259 /* Purge dead EH edges from basic block listed in BLOCKS. */
8262 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
8264 bool changed
= false;
8268 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8270 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8272 /* Earlier gimple_purge_dead_eh_edges could have removed
8273 this basic block already. */
8274 gcc_assert (bb
|| changed
);
8276 changed
|= gimple_purge_dead_eh_edges (bb
);
8282 /* Purge dead abnormal call edges from basic block BB. */
8285 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
8287 bool changed
= false;
8290 gimple
*stmt
= last_stmt (bb
);
8292 if (!cfun
->has_nonlocal_label
8293 && !cfun
->calls_setjmp
)
8296 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
8299 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8301 if (e
->flags
& EDGE_ABNORMAL
)
8303 if (e
->flags
& EDGE_FALLTHRU
)
8304 e
->flags
&= ~EDGE_ABNORMAL
;
8306 remove_edge_and_dominated_blocks (e
);
8316 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8319 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
8321 bool changed
= false;
8325 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8327 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8329 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8330 this basic block already. */
8331 gcc_assert (bb
|| changed
);
8333 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
8339 /* This function is called whenever a new edge is created or
8343 gimple_execute_on_growing_pred (edge e
)
8345 basic_block bb
= e
->dest
;
8347 if (!gimple_seq_empty_p (phi_nodes (bb
)))
8348 reserve_phi_args_for_new_edge (bb
);
8351 /* This function is called immediately before edge E is removed from
8352 the edge vector E->dest->preds. */
8355 gimple_execute_on_shrinking_pred (edge e
)
8357 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
8358 remove_phi_args (e
);
8361 /*---------------------------------------------------------------------------
8362 Helper functions for Loop versioning
8363 ---------------------------------------------------------------------------*/
8365 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8366 of 'first'. Both of them are dominated by 'new_head' basic block. When
8367 'new_head' was created by 'second's incoming edge it received phi arguments
8368 on the edge by split_edge(). Later, additional edge 'e' was created to
8369 connect 'new_head' and 'first'. Now this routine adds phi args on this
8370 additional edge 'e' that new_head to second edge received as part of edge
8374 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
8375 basic_block new_head
, edge e
)
8378 gphi_iterator psi1
, psi2
;
8380 edge e2
= find_edge (new_head
, second
);
8382 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8383 edge, we should always have an edge from NEW_HEAD to SECOND. */
8384 gcc_assert (e2
!= NULL
);
8386 /* Browse all 'second' basic block phi nodes and add phi args to
8387 edge 'e' for 'first' head. PHI args are always in correct order. */
8389 for (psi2
= gsi_start_phis (second
),
8390 psi1
= gsi_start_phis (first
);
8391 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
8392 gsi_next (&psi2
), gsi_next (&psi1
))
8396 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
8397 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
8402 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8403 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8404 the destination of the ELSE part. */
8407 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
8408 basic_block second_head ATTRIBUTE_UNUSED
,
8409 basic_block cond_bb
, void *cond_e
)
8411 gimple_stmt_iterator gsi
;
8412 gimple
*new_cond_expr
;
8413 tree cond_expr
= (tree
) cond_e
;
8416 /* Build new conditional expr */
8417 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
8418 NULL_TREE
, NULL_TREE
);
8420 /* Add new cond in cond_bb. */
8421 gsi
= gsi_last_bb (cond_bb
);
8422 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
8424 /* Adjust edges appropriately to connect new head with first head
8425 as well as second head. */
8426 e0
= single_succ_edge (cond_bb
);
8427 e0
->flags
&= ~EDGE_FALLTHRU
;
8428 e0
->flags
|= EDGE_FALSE_VALUE
;
8432 /* Do book-keeping of basic block BB for the profile consistency checker.
8433 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8434 then do post-pass accounting. Store the counting in RECORD. */
8436 gimple_account_profile_record (basic_block bb
, int after_pass
,
8437 struct profile_record
*record
)
8439 gimple_stmt_iterator i
;
8440 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
8442 record
->size
[after_pass
]
8443 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
8444 if (profile_status_for_fn (cfun
) == PROFILE_READ
)
8445 record
->time
[after_pass
]
8446 += estimate_num_insns (gsi_stmt (i
),
8447 &eni_time_weights
) * bb
->count
;
8448 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
8449 record
->time
[after_pass
]
8450 += estimate_num_insns (gsi_stmt (i
),
8451 &eni_time_weights
) * bb
->frequency
;
8455 struct cfg_hooks gimple_cfg_hooks
= {
8457 gimple_verify_flow_info
,
8458 gimple_dump_bb
, /* dump_bb */
8459 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
8460 create_bb
, /* create_basic_block */
8461 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
8462 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
8463 gimple_can_remove_branch_p
, /* can_remove_branch_p */
8464 remove_bb
, /* delete_basic_block */
8465 gimple_split_block
, /* split_block */
8466 gimple_move_block_after
, /* move_block_after */
8467 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
8468 gimple_merge_blocks
, /* merge_blocks */
8469 gimple_predict_edge
, /* predict_edge */
8470 gimple_predicted_by_p
, /* predicted_by_p */
8471 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
8472 gimple_duplicate_bb
, /* duplicate_block */
8473 gimple_split_edge
, /* split_edge */
8474 gimple_make_forwarder_block
, /* make_forward_block */
8475 NULL
, /* tidy_fallthru_edge */
8476 NULL
, /* force_nonfallthru */
8477 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
8478 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
8479 gimple_flow_call_edges_add
, /* flow_call_edges_add */
8480 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
8481 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
8482 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
8483 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
8484 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
8485 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
8486 flush_pending_stmts
, /* flush_pending_stmts */
8487 gimple_empty_block_p
, /* block_empty_p */
8488 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
8489 gimple_account_profile_record
,
8493 /* Split all critical edges. */
8496 split_critical_edges (void)
8502 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8503 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8504 mappings around the calls to split_edge. */
8505 start_recording_case_labels ();
8506 FOR_ALL_BB_FN (bb
, cfun
)
8508 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8510 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
8512 /* PRE inserts statements to edges and expects that
8513 since split_critical_edges was done beforehand, committing edge
8514 insertions will not split more edges. In addition to critical
8515 edges we must split edges that have multiple successors and
8516 end by control flow statements, such as RESX.
8517 Go ahead and split them too. This matches the logic in
8518 gimple_find_edge_insert_loc. */
8519 else if ((!single_pred_p (e
->dest
)
8520 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
8521 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8522 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
8523 && !(e
->flags
& EDGE_ABNORMAL
))
8525 gimple_stmt_iterator gsi
;
8527 gsi
= gsi_last_bb (e
->src
);
8528 if (!gsi_end_p (gsi
)
8529 && stmt_ends_bb_p (gsi_stmt (gsi
))
8530 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
8531 && !gimple_call_builtin_p (gsi_stmt (gsi
),
8537 end_recording_case_labels ();
8543 const pass_data pass_data_split_crit_edges
=
8545 GIMPLE_PASS
, /* type */
8546 "crited", /* name */
8547 OPTGROUP_NONE
, /* optinfo_flags */
8548 TV_TREE_SPLIT_EDGES
, /* tv_id */
8549 PROP_cfg
, /* properties_required */
8550 PROP_no_crit_edges
, /* properties_provided */
8551 0, /* properties_destroyed */
8552 0, /* todo_flags_start */
8553 0, /* todo_flags_finish */
8556 class pass_split_crit_edges
: public gimple_opt_pass
8559 pass_split_crit_edges (gcc::context
*ctxt
)
8560 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
8563 /* opt_pass methods: */
8564 virtual unsigned int execute (function
*) { return split_critical_edges (); }
8566 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
8567 }; // class pass_split_crit_edges
8572 make_pass_split_crit_edges (gcc::context
*ctxt
)
8574 return new pass_split_crit_edges (ctxt
);
8578 /* Insert COND expression which is GIMPLE_COND after STMT
8579 in basic block BB with appropriate basic block split
8580 and creation of a new conditionally executed basic block.
8581 Return created basic block. */
8583 insert_cond_bb (basic_block bb
, gimple
*stmt
, gimple
*cond
)
8585 edge fall
= split_block (bb
, stmt
);
8586 gimple_stmt_iterator iter
= gsi_last_bb (bb
);
8589 /* Insert cond statement. */
8590 gcc_assert (gimple_code (cond
) == GIMPLE_COND
);
8591 if (gsi_end_p (iter
))
8592 gsi_insert_before (&iter
, cond
, GSI_CONTINUE_LINKING
);
8594 gsi_insert_after (&iter
, cond
, GSI_CONTINUE_LINKING
);
8596 /* Create conditionally executed block. */
8597 new_bb
= create_empty_bb (bb
);
8598 make_edge (bb
, new_bb
, EDGE_TRUE_VALUE
);
8599 make_single_succ_edge (new_bb
, fall
->dest
, EDGE_FALLTHRU
);
8601 /* Fix edge for split bb. */
8602 fall
->flags
= EDGE_FALSE_VALUE
;
8604 /* Update dominance info. */
8605 if (dom_info_available_p (CDI_DOMINATORS
))
8607 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, bb
);
8608 set_immediate_dominator (CDI_DOMINATORS
, fall
->dest
, bb
);
8611 /* Update loop info. */
8613 add_bb_to_loop (new_bb
, bb
->loop_father
);
8618 /* Build a ternary operation and gimplify it. Emit code before GSI.
8619 Return the gimple_val holding the result. */
8622 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8623 tree type
, tree a
, tree b
, tree c
)
8626 location_t loc
= gimple_location (gsi_stmt (*gsi
));
8628 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
8631 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8635 /* Build a binary operation and gimplify it. Emit code before GSI.
8636 Return the gimple_val holding the result. */
8639 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8640 tree type
, tree a
, tree b
)
8644 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
8647 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8651 /* Build a unary operation and gimplify it. Emit code before GSI.
8652 Return the gimple_val holding the result. */
8655 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
8660 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
8663 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8669 /* Given a basic block B which ends with a conditional and has
8670 precisely two successors, determine which of the edges is taken if
8671 the conditional is true and which is taken if the conditional is
8672 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8675 extract_true_false_edges_from_block (basic_block b
,
8679 edge e
= EDGE_SUCC (b
, 0);
8681 if (e
->flags
& EDGE_TRUE_VALUE
)
8684 *false_edge
= EDGE_SUCC (b
, 1);
8689 *true_edge
= EDGE_SUCC (b
, 1);
8694 /* From a controlling predicate in the immediate dominator DOM of
8695 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8696 predicate evaluates to true and false and store them to
8697 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8698 they are non-NULL. Returns true if the edges can be determined,
8699 else return false. */
8702 extract_true_false_controlled_edges (basic_block dom
, basic_block phiblock
,
8703 edge
*true_controlled_edge
,
8704 edge
*false_controlled_edge
)
8706 basic_block bb
= phiblock
;
8707 edge true_edge
, false_edge
, tem
;
8708 edge e0
= NULL
, e1
= NULL
;
8710 /* We have to verify that one edge into the PHI node is dominated
8711 by the true edge of the predicate block and the other edge
8712 dominated by the false edge. This ensures that the PHI argument
8713 we are going to take is completely determined by the path we
8714 take from the predicate block.
8715 We can only use BB dominance checks below if the destination of
8716 the true/false edges are dominated by their edge, thus only
8717 have a single predecessor. */
8718 extract_true_false_edges_from_block (dom
, &true_edge
, &false_edge
);
8719 tem
= EDGE_PRED (bb
, 0);
8720 if (tem
== true_edge
8721 || (single_pred_p (true_edge
->dest
)
8722 && (tem
->src
== true_edge
->dest
8723 || dominated_by_p (CDI_DOMINATORS
,
8724 tem
->src
, true_edge
->dest
))))
8726 else if (tem
== false_edge
8727 || (single_pred_p (false_edge
->dest
)
8728 && (tem
->src
== false_edge
->dest
8729 || dominated_by_p (CDI_DOMINATORS
,
8730 tem
->src
, false_edge
->dest
))))
8734 tem
= EDGE_PRED (bb
, 1);
8735 if (tem
== true_edge
8736 || (single_pred_p (true_edge
->dest
)
8737 && (tem
->src
== true_edge
->dest
8738 || dominated_by_p (CDI_DOMINATORS
,
8739 tem
->src
, true_edge
->dest
))))
8741 else if (tem
== false_edge
8742 || (single_pred_p (false_edge
->dest
)
8743 && (tem
->src
== false_edge
->dest
8744 || dominated_by_p (CDI_DOMINATORS
,
8745 tem
->src
, false_edge
->dest
))))
8752 if (true_controlled_edge
)
8753 *true_controlled_edge
= e0
;
8754 if (false_controlled_edge
)
8755 *false_controlled_edge
= e1
;
8762 /* Emit return warnings. */
8766 const pass_data pass_data_warn_function_return
=
8768 GIMPLE_PASS
, /* type */
8769 "*warn_function_return", /* name */
8770 OPTGROUP_NONE
, /* optinfo_flags */
8771 TV_NONE
, /* tv_id */
8772 PROP_cfg
, /* properties_required */
8773 0, /* properties_provided */
8774 0, /* properties_destroyed */
8775 0, /* todo_flags_start */
8776 0, /* todo_flags_finish */
8779 class pass_warn_function_return
: public gimple_opt_pass
8782 pass_warn_function_return (gcc::context
*ctxt
)
8783 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
8786 /* opt_pass methods: */
8787 virtual unsigned int execute (function
*);
8789 }; // class pass_warn_function_return
8792 pass_warn_function_return::execute (function
*fun
)
8794 source_location location
;
8799 if (!targetm
.warn_func_return (fun
->decl
))
8802 /* If we have a path to EXIT, then we do return. */
8803 if (TREE_THIS_VOLATILE (fun
->decl
)
8804 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0)
8806 location
= UNKNOWN_LOCATION
;
8807 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
8809 last
= last_stmt (e
->src
);
8810 if ((gimple_code (last
) == GIMPLE_RETURN
8811 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
8812 && (location
= gimple_location (last
)) != UNKNOWN_LOCATION
)
8815 if (location
== UNKNOWN_LOCATION
)
8816 location
= cfun
->function_end_locus
;
8817 warning_at (location
, 0, "%<noreturn%> function does return");
8820 /* If we see "return;" in some basic block, then we do reach the end
8821 without returning a value. */
8822 else if (warn_return_type
8823 && !TREE_NO_WARNING (fun
->decl
)
8824 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0
8825 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun
->decl
))))
8827 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
8829 gimple
*last
= last_stmt (e
->src
);
8830 greturn
*return_stmt
= dyn_cast
<greturn
*> (last
);
8832 && gimple_return_retval (return_stmt
) == NULL
8833 && !gimple_no_warning_p (last
))
8835 location
= gimple_location (last
);
8836 if (location
== UNKNOWN_LOCATION
)
8837 location
= fun
->function_end_locus
;
8838 warning_at (location
, OPT_Wreturn_type
, "control reaches end of non-void function");
8839 TREE_NO_WARNING (fun
->decl
) = 1;
8850 make_pass_warn_function_return (gcc::context
*ctxt
)
8852 return new pass_warn_function_return (ctxt
);
8855 /* Walk a gimplified function and warn for functions whose return value is
8856 ignored and attribute((warn_unused_result)) is set. This is done before
8857 inlining, so we don't have to worry about that. */
8860 do_warn_unused_result (gimple_seq seq
)
8863 gimple_stmt_iterator i
;
8865 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
8867 gimple
*g
= gsi_stmt (i
);
8869 switch (gimple_code (g
))
8872 do_warn_unused_result (gimple_bind_body (as_a
<gbind
*>(g
)));
8875 do_warn_unused_result (gimple_try_eval (g
));
8876 do_warn_unused_result (gimple_try_cleanup (g
));
8879 do_warn_unused_result (gimple_catch_handler (
8880 as_a
<gcatch
*> (g
)));
8882 case GIMPLE_EH_FILTER
:
8883 do_warn_unused_result (gimple_eh_filter_failure (g
));
8887 if (gimple_call_lhs (g
))
8889 if (gimple_call_internal_p (g
))
8892 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8893 LHS. All calls whose value is ignored should be
8894 represented like this. Look for the attribute. */
8895 fdecl
= gimple_call_fndecl (g
);
8896 ftype
= gimple_call_fntype (g
);
8898 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
8900 location_t loc
= gimple_location (g
);
8903 warning_at (loc
, OPT_Wunused_result
,
8904 "ignoring return value of %qD, "
8905 "declared with attribute warn_unused_result",
8908 warning_at (loc
, OPT_Wunused_result
,
8909 "ignoring return value of function "
8910 "declared with attribute warn_unused_result");
8915 /* Not a container, not a call, or a call whose value is used. */
8923 const pass_data pass_data_warn_unused_result
=
8925 GIMPLE_PASS
, /* type */
8926 "*warn_unused_result", /* name */
8927 OPTGROUP_NONE
, /* optinfo_flags */
8928 TV_NONE
, /* tv_id */
8929 PROP_gimple_any
, /* properties_required */
8930 0, /* properties_provided */
8931 0, /* properties_destroyed */
8932 0, /* todo_flags_start */
8933 0, /* todo_flags_finish */
8936 class pass_warn_unused_result
: public gimple_opt_pass
8939 pass_warn_unused_result (gcc::context
*ctxt
)
8940 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
8943 /* opt_pass methods: */
8944 virtual bool gate (function
*) { return flag_warn_unused_result
; }
8945 virtual unsigned int execute (function
*)
8947 do_warn_unused_result (gimple_body (current_function_decl
));
8951 }; // class pass_warn_unused_result
8956 make_pass_warn_unused_result (gcc::context
*ctxt
)
8958 return new pass_warn_unused_result (ctxt
);
8961 /* IPA passes, compilation of earlier functions or inlining
8962 might have changed some properties, such as marked functions nothrow,
8963 pure, const or noreturn.
8964 Remove redundant edges and basic blocks, and create new ones if necessary.
8966 This pass can't be executed as stand alone pass from pass manager, because
8967 in between inlining and this fixup the verify_flow_info would fail. */
8970 execute_fixup_cfg (void)
8973 gimple_stmt_iterator gsi
;
8975 gcov_type count_scale
;
8980 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl
)->count
,
8981 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
);
8983 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
8984 cgraph_node::get (current_function_decl
)->count
;
8985 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
8986 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
,
8989 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
8990 e
->count
= apply_scale (e
->count
, count_scale
);
8992 FOR_EACH_BB_FN (bb
, cfun
)
8994 bb
->count
= apply_scale (bb
->count
, count_scale
);
8995 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
8997 gimple
*stmt
= gsi_stmt (gsi
);
8998 tree decl
= is_gimple_call (stmt
)
8999 ? gimple_call_fndecl (stmt
)
9003 int flags
= gimple_call_flags (stmt
);
9004 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
9006 if (gimple_purge_dead_abnormal_call_edges (bb
))
9007 todo
|= TODO_cleanup_cfg
;
9009 if (gimple_in_ssa_p (cfun
))
9011 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9016 if (flags
& ECF_NORETURN
9017 && fixup_noreturn_call (stmt
))
9018 todo
|= TODO_cleanup_cfg
;
9021 /* Remove stores to variables we marked write-only.
9022 Keep access when store has side effect, i.e. in case when source
9024 if (gimple_store_p (stmt
)
9025 && !gimple_has_side_effects (stmt
))
9027 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
9029 if (TREE_CODE (lhs
) == VAR_DECL
9030 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
9031 && varpool_node::get (lhs
)->writeonly
)
9033 unlink_stmt_vdef (stmt
);
9034 gsi_remove (&gsi
, true);
9035 release_defs (stmt
);
9036 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9040 /* For calls we can simply remove LHS when it is known
9041 to be write-only. */
9042 if (is_gimple_call (stmt
)
9043 && gimple_get_lhs (stmt
))
9045 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
9047 if (TREE_CODE (lhs
) == VAR_DECL
9048 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
9049 && varpool_node::get (lhs
)->writeonly
)
9051 gimple_call_set_lhs (stmt
, NULL
);
9053 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9057 if (maybe_clean_eh_stmt (stmt
)
9058 && gimple_purge_dead_eh_edges (bb
))
9059 todo
|= TODO_cleanup_cfg
;
9063 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
9064 e
->count
= apply_scale (e
->count
, count_scale
);
9066 /* If we have a basic block with no successors that does not
9067 end with a control statement or a noreturn call end it with
9068 a call to __builtin_unreachable. This situation can occur
9069 when inlining a noreturn call that does in fact return. */
9070 if (EDGE_COUNT (bb
->succs
) == 0)
9072 gimple
*stmt
= last_stmt (bb
);
9074 || (!is_ctrl_stmt (stmt
)
9075 && (!is_gimple_call (stmt
)
9076 || (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0)))
9078 if (stmt
&& is_gimple_call (stmt
))
9079 gimple_call_set_ctrl_altering (stmt
, false);
9080 stmt
= gimple_build_call
9081 (builtin_decl_implicit (BUILT_IN_UNREACHABLE
), 0);
9082 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
9083 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
9087 if (count_scale
!= REG_BR_PROB_BASE
)
9088 compute_function_frequency ();
9091 && (todo
& TODO_cleanup_cfg
))
9092 loops_state_set (LOOPS_NEED_FIXUP
);
9099 const pass_data pass_data_fixup_cfg
=
9101 GIMPLE_PASS
, /* type */
9102 "fixup_cfg", /* name */
9103 OPTGROUP_NONE
, /* optinfo_flags */
9104 TV_NONE
, /* tv_id */
9105 PROP_cfg
, /* properties_required */
9106 0, /* properties_provided */
9107 0, /* properties_destroyed */
9108 0, /* todo_flags_start */
9109 0, /* todo_flags_finish */
9112 class pass_fixup_cfg
: public gimple_opt_pass
9115 pass_fixup_cfg (gcc::context
*ctxt
)
9116 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
9119 /* opt_pass methods: */
9120 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
9121 virtual unsigned int execute (function
*) { return execute_fixup_cfg (); }
9123 }; // class pass_fixup_cfg
9128 make_pass_fixup_cfg (gcc::context
*ctxt
)
9130 return new pass_fixup_cfg (ctxt
);
9133 /* Garbage collection support for edge_def. */
9135 extern void gt_ggc_mx (tree
&);
9136 extern void gt_ggc_mx (gimple
*&);
9137 extern void gt_ggc_mx (rtx
&);
9138 extern void gt_ggc_mx (basic_block
&);
9141 gt_ggc_mx (rtx_insn
*& x
)
9144 gt_ggc_mx_rtx_def ((void *) x
);
9148 gt_ggc_mx (edge_def
*e
)
9150 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9152 gt_ggc_mx (e
->dest
);
9153 if (current_ir_type () == IR_GIMPLE
)
9154 gt_ggc_mx (e
->insns
.g
);
9156 gt_ggc_mx (e
->insns
.r
);
9160 /* PCH support for edge_def. */
9162 extern void gt_pch_nx (tree
&);
9163 extern void gt_pch_nx (gimple
*&);
9164 extern void gt_pch_nx (rtx
&);
9165 extern void gt_pch_nx (basic_block
&);
9168 gt_pch_nx (rtx_insn
*& x
)
9171 gt_pch_nx_rtx_def ((void *) x
);
9175 gt_pch_nx (edge_def
*e
)
9177 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9179 gt_pch_nx (e
->dest
);
9180 if (current_ir_type () == IR_GIMPLE
)
9181 gt_pch_nx (e
->insns
.g
);
9183 gt_pch_nx (e
->insns
.r
);
9188 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
9190 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9191 op (&(e
->src
), cookie
);
9192 op (&(e
->dest
), cookie
);
9193 if (current_ir_type () == IR_GIMPLE
)
9194 op (&(e
->insns
.g
), cookie
);
9196 op (&(e
->insns
.r
), cookie
);
9197 op (&(block
), cookie
);
9202 namespace selftest
{
9204 /* Helper function for CFG selftests: create a dummy function decl
9205 and push it as cfun. */
9208 push_fndecl (const char *name
)
9210 tree fn_type
= build_function_type_array (integer_type_node
, 0, NULL
);
9211 /* FIXME: this uses input_location: */
9212 tree fndecl
= build_fn_decl (name
, fn_type
);
9213 tree retval
= build_decl (UNKNOWN_LOCATION
, RESULT_DECL
,
9214 NULL_TREE
, integer_type_node
);
9215 DECL_RESULT (fndecl
) = retval
;
9216 push_struct_function (fndecl
);
9217 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9218 ASSERT_TRUE (fun
!= NULL
);
9219 init_empty_tree_cfg_for_function (fun
);
9220 ASSERT_EQ (2, n_basic_blocks_for_fn (fun
));
9221 ASSERT_EQ (0, n_edges_for_fn (fun
));
9225 /* These tests directly create CFGs.
9226 Compare with the static fns within tree-cfg.c:
9228 - make_blocks: calls create_basic_block (seq, bb);
9231 /* Verify a simple cfg of the form:
9232 ENTRY -> A -> B -> C -> EXIT. */
9235 test_linear_chain ()
9237 gimple_register_cfg_hooks ();
9239 tree fndecl
= push_fndecl ("cfg_test_linear_chain");
9240 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9242 /* Create some empty blocks. */
9243 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
9244 basic_block bb_b
= create_empty_bb (bb_a
);
9245 basic_block bb_c
= create_empty_bb (bb_b
);
9247 ASSERT_EQ (5, n_basic_blocks_for_fn (fun
));
9248 ASSERT_EQ (0, n_edges_for_fn (fun
));
9250 /* Create some edges: a simple linear chain of BBs. */
9251 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
9252 make_edge (bb_a
, bb_b
, 0);
9253 make_edge (bb_b
, bb_c
, 0);
9254 make_edge (bb_c
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9256 /* Verify the edges. */
9257 ASSERT_EQ (4, n_edges_for_fn (fun
));
9258 ASSERT_EQ (NULL
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->preds
);
9259 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
->length ());
9260 ASSERT_EQ (1, bb_a
->preds
->length ());
9261 ASSERT_EQ (1, bb_a
->succs
->length ());
9262 ASSERT_EQ (1, bb_b
->preds
->length ());
9263 ASSERT_EQ (1, bb_b
->succs
->length ());
9264 ASSERT_EQ (1, bb_c
->preds
->length ());
9265 ASSERT_EQ (1, bb_c
->succs
->length ());
9266 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
->length ());
9267 ASSERT_EQ (NULL
, EXIT_BLOCK_PTR_FOR_FN (fun
)->succs
);
9269 /* Verify the dominance information
9270 Each BB in our simple chain should be dominated by the one before
9272 calculate_dominance_info (CDI_DOMINATORS
);
9273 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
9274 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
9275 vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
9276 ASSERT_EQ (1, dom_by_b
.length ());
9277 ASSERT_EQ (bb_c
, dom_by_b
[0]);
9278 free_dominance_info (CDI_DOMINATORS
);
9280 /* Similarly for post-dominance: each BB in our chain is post-dominated
9281 by the one after it. */
9282 calculate_dominance_info (CDI_POST_DOMINATORS
);
9283 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
9284 ASSERT_EQ (bb_c
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
9285 vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
9286 ASSERT_EQ (1, postdom_by_b
.length ());
9287 ASSERT_EQ (bb_a
, postdom_by_b
[0]);
9288 free_dominance_info (CDI_POST_DOMINATORS
);
9293 /* Verify a simple CFG of the form:
9309 gimple_register_cfg_hooks ();
9311 tree fndecl
= push_fndecl ("cfg_test_diamond");
9312 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9314 /* Create some empty blocks. */
9315 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
9316 basic_block bb_b
= create_empty_bb (bb_a
);
9317 basic_block bb_c
= create_empty_bb (bb_a
);
9318 basic_block bb_d
= create_empty_bb (bb_b
);
9320 ASSERT_EQ (6, n_basic_blocks_for_fn (fun
));
9321 ASSERT_EQ (0, n_edges_for_fn (fun
));
9323 /* Create the edges. */
9324 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
9325 make_edge (bb_a
, bb_b
, EDGE_TRUE_VALUE
);
9326 make_edge (bb_a
, bb_c
, EDGE_FALSE_VALUE
);
9327 make_edge (bb_b
, bb_d
, 0);
9328 make_edge (bb_c
, bb_d
, 0);
9329 make_edge (bb_d
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9331 /* Verify the edges. */
9332 ASSERT_EQ (6, n_edges_for_fn (fun
));
9333 ASSERT_EQ (1, bb_a
->preds
->length ());
9334 ASSERT_EQ (2, bb_a
->succs
->length ());
9335 ASSERT_EQ (1, bb_b
->preds
->length ());
9336 ASSERT_EQ (1, bb_b
->succs
->length ());
9337 ASSERT_EQ (1, bb_c
->preds
->length ());
9338 ASSERT_EQ (1, bb_c
->succs
->length ());
9339 ASSERT_EQ (2, bb_d
->preds
->length ());
9340 ASSERT_EQ (1, bb_d
->succs
->length ());
9342 /* Verify the dominance information. */
9343 calculate_dominance_info (CDI_DOMINATORS
);
9344 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
9345 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
9346 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_d
));
9347 vec
<basic_block
> dom_by_a
= get_dominated_by (CDI_DOMINATORS
, bb_a
);
9348 ASSERT_EQ (3, dom_by_a
.length ()); /* B, C, D, in some order. */
9349 vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
9350 ASSERT_EQ (0, dom_by_b
.length ());
9351 free_dominance_info (CDI_DOMINATORS
);
9353 /* Similarly for post-dominance. */
9354 calculate_dominance_info (CDI_POST_DOMINATORS
);
9355 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
9356 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
9357 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_c
));
9358 vec
<basic_block
> postdom_by_d
= get_dominated_by (CDI_POST_DOMINATORS
, bb_d
);
9359 ASSERT_EQ (3, postdom_by_d
.length ()); /* A, B, C in some order. */
9360 vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
9361 ASSERT_EQ (0, postdom_by_b
.length ());
9362 free_dominance_info (CDI_POST_DOMINATORS
);
9367 /* Verify that we can handle a CFG containing a "complete" aka
9368 fully-connected subgraph (where A B C D below all have edges
9369 pointing to each other node, also to themselves).
9387 test_fully_connected ()
9389 gimple_register_cfg_hooks ();
9391 tree fndecl
= push_fndecl ("cfg_fully_connected");
9392 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9396 /* Create some empty blocks. */
9397 auto_vec
<basic_block
> subgraph_nodes
;
9398 for (int i
= 0; i
< n
; i
++)
9399 subgraph_nodes
.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
)));
9401 ASSERT_EQ (n
+ 2, n_basic_blocks_for_fn (fun
));
9402 ASSERT_EQ (0, n_edges_for_fn (fun
));
9404 /* Create the edges. */
9405 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), subgraph_nodes
[0], EDGE_FALLTHRU
);
9406 make_edge (subgraph_nodes
[0], EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9407 for (int i
= 0; i
< n
; i
++)
9408 for (int j
= 0; j
< n
; j
++)
9409 make_edge (subgraph_nodes
[i
], subgraph_nodes
[j
], 0);
9411 /* Verify the edges. */
9412 ASSERT_EQ (2 + (n
* n
), n_edges_for_fn (fun
));
9413 /* The first one is linked to ENTRY/EXIT as well as itself and
9415 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->preds
->length ());
9416 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->succs
->length ());
9417 /* The other ones in the subgraph are linked to everything in
9418 the subgraph (including themselves). */
9419 for (int i
= 1; i
< n
; i
++)
9421 ASSERT_EQ (n
, subgraph_nodes
[i
]->preds
->length ());
9422 ASSERT_EQ (n
, subgraph_nodes
[i
]->succs
->length ());
9425 /* Verify the dominance information. */
9426 calculate_dominance_info (CDI_DOMINATORS
);
9427 /* The initial block in the subgraph should be dominated by ENTRY. */
9428 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun
),
9429 get_immediate_dominator (CDI_DOMINATORS
,
9430 subgraph_nodes
[0]));
9431 /* Every other block in the subgraph should be dominated by the
9433 for (int i
= 1; i
< n
; i
++)
9434 ASSERT_EQ (subgraph_nodes
[0],
9435 get_immediate_dominator (CDI_DOMINATORS
,
9436 subgraph_nodes
[i
]));
9437 free_dominance_info (CDI_DOMINATORS
);
9439 /* Similarly for post-dominance. */
9440 calculate_dominance_info (CDI_POST_DOMINATORS
);
9441 /* The initial block in the subgraph should be postdominated by EXIT. */
9442 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun
),
9443 get_immediate_dominator (CDI_POST_DOMINATORS
,
9444 subgraph_nodes
[0]));
9445 /* Every other block in the subgraph should be postdominated by the
9446 initial block, since that leads to EXIT. */
9447 for (int i
= 1; i
< n
; i
++)
9448 ASSERT_EQ (subgraph_nodes
[0],
9449 get_immediate_dominator (CDI_POST_DOMINATORS
,
9450 subgraph_nodes
[i
]));
9451 free_dominance_info (CDI_POST_DOMINATORS
);
9456 /* Run all of the selftests within this file. */
9461 test_linear_chain ();
9463 test_fully_connected ();
9466 } // namespace selftest
9468 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9471 - switch statement (a block with many out-edges)
9472 - something that jumps to itself
9475 #endif /* CHECKING_P */