1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
66 /* This file contains functions for building the Control Flow Graph (CFG)
67 for a function tree. */
69 /* Local declarations. */
71 /* Initial capacity for the basic block array. */
72 static const int initial_cfg_capacity
= 20;
74 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
75 which use a particular edge. The CASE_LABEL_EXPRs are chained together
76 via their CASE_CHAIN field, which we clear after we're done with the
77 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
80 update the case vector in response to edge redirections.
82 Right now this table is set up and torn down at key points in the
83 compilation process. It would be nice if we could make the table
84 more persistent. The key is getting notification of changes to
85 the CFG (particularly edge removal, creation and redirection). */
87 static hash_map
<edge
, tree
> *edge_to_cases
;
89 /* If we record edge_to_cases, this bitmap will hold indexes
90 of basic blocks that end in a GIMPLE_SWITCH which we touched
91 due to edge manipulations. */
93 static bitmap touched_switch_bbs
;
98 long num_merged_labels
;
101 static struct cfg_stats_d cfg_stats
;
103 /* Data to pass to replace_block_vars_by_duplicates_1. */
104 struct replace_decls_d
106 hash_map
<tree
, tree
> *vars_map
;
110 /* Hash table to store last discriminator assigned for each locus. */
111 struct locus_discrim_map
117 /* Hashtable helpers. */
119 struct locus_discrim_hasher
: free_ptr_hash
<locus_discrim_map
>
121 static inline hashval_t
hash (const locus_discrim_map
*);
122 static inline bool equal (const locus_discrim_map
*,
123 const locus_discrim_map
*);
126 /* Trivial hash function for a location_t. ITEM is a pointer to
127 a hash table entry that maps a location_t to a discriminator. */
130 locus_discrim_hasher::hash (const locus_discrim_map
*item
)
132 return LOCATION_LINE (item
->locus
);
135 /* Equality function for the locus-to-discriminator map. A and B
136 point to the two hash table entries to compare. */
139 locus_discrim_hasher::equal (const locus_discrim_map
*a
,
140 const locus_discrim_map
*b
)
142 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
145 static hash_table
<locus_discrim_hasher
> *discriminator_per_locus
;
147 /* Basic blocks and flowgraphs. */
148 static void make_blocks (gimple_seq
);
151 static void make_edges (void);
152 static void assign_discriminators (void);
153 static void make_cond_expr_edges (basic_block
);
154 static void make_gimple_switch_edges (gswitch
*, basic_block
);
155 static bool make_goto_expr_edges (basic_block
);
156 static void make_gimple_asm_edges (basic_block
);
157 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
158 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
160 /* Various helpers. */
161 static inline bool stmt_starts_bb_p (gimple
*, gimple
*);
162 static int gimple_verify_flow_info (void);
163 static void gimple_make_forwarder_block (edge
);
164 static gimple
*first_non_label_stmt (basic_block
);
165 static bool verify_gimple_transaction (gtransaction
*);
166 static bool call_can_make_abnormal_goto (gimple
*);
168 /* Flowgraph optimization and cleanup. */
169 static void gimple_merge_blocks (basic_block
, basic_block
);
170 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
171 static void remove_bb (basic_block
);
172 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
173 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
174 static edge
find_taken_edge_switch_expr (gswitch
*, basic_block
, tree
);
175 static tree
find_case_label_for_value (gswitch
*, tree
);
176 static void lower_phi_internal_fn ();
179 init_empty_tree_cfg_for_function (struct function
*fn
)
181 /* Initialize the basic block array. */
183 profile_status_for_fn (fn
) = PROFILE_ABSENT
;
184 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
185 last_basic_block_for_fn (fn
) = NUM_FIXED_BLOCKS
;
186 vec_alloc (basic_block_info_for_fn (fn
), initial_cfg_capacity
);
187 vec_safe_grow_cleared (basic_block_info_for_fn (fn
),
188 initial_cfg_capacity
);
190 /* Build a mapping of labels to their associated blocks. */
191 vec_alloc (label_to_block_map_for_fn (fn
), initial_cfg_capacity
);
192 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
),
193 initial_cfg_capacity
);
195 SET_BASIC_BLOCK_FOR_FN (fn
, ENTRY_BLOCK
, ENTRY_BLOCK_PTR_FOR_FN (fn
));
196 SET_BASIC_BLOCK_FOR_FN (fn
, EXIT_BLOCK
, EXIT_BLOCK_PTR_FOR_FN (fn
));
198 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
199 = EXIT_BLOCK_PTR_FOR_FN (fn
);
200 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
201 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
205 init_empty_tree_cfg (void)
207 init_empty_tree_cfg_for_function (cfun
);
210 /*---------------------------------------------------------------------------
212 ---------------------------------------------------------------------------*/
214 /* Entry point to the CFG builder for trees. SEQ is the sequence of
215 statements to be added to the flowgraph. */
218 build_gimple_cfg (gimple_seq seq
)
220 /* Register specific gimple functions. */
221 gimple_register_cfg_hooks ();
223 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
225 init_empty_tree_cfg ();
229 /* Make sure there is always at least one block, even if it's empty. */
230 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
231 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
233 /* Adjust the size of the array. */
234 if (basic_block_info_for_fn (cfun
)->length ()
235 < (size_t) n_basic_blocks_for_fn (cfun
))
236 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
237 n_basic_blocks_for_fn (cfun
));
239 /* To speed up statement iterator walks, we first purge dead labels. */
240 cleanup_dead_labels ();
242 /* Group case nodes to reduce the number of edges.
243 We do this after cleaning up dead labels because otherwise we miss
244 a lot of obvious case merging opportunities. */
245 group_case_labels ();
247 /* Create the edges of the flowgraph. */
248 discriminator_per_locus
= new hash_table
<locus_discrim_hasher
> (13);
250 assign_discriminators ();
251 lower_phi_internal_fn ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus
;
254 discriminator_per_locus
= NULL
;
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
262 replace_loop_annotate_in_block (basic_block bb
, struct loop
*loop
)
264 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
265 gimple
*stmt
= gsi_stmt (gsi
);
267 if (!(stmt
&& gimple_code (stmt
) == GIMPLE_COND
))
270 for (gsi_prev_nondebug (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
272 stmt
= gsi_stmt (gsi
);
273 if (gimple_code (stmt
) != GIMPLE_CALL
)
275 if (!gimple_call_internal_p (stmt
)
276 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
279 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
281 case annot_expr_ivdep_kind
:
282 loop
->safelen
= INT_MAX
;
284 case annot_expr_unroll_kind
:
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt
, 2));
287 cfun
->has_unroll
= true;
289 case annot_expr_no_vector_kind
:
290 loop
->dont_vectorize
= true;
292 case annot_expr_vector_kind
:
293 loop
->force_vectorize
= true;
294 cfun
->has_force_vectorize_loops
= true;
296 case annot_expr_parallel_kind
:
297 loop
->can_be_parallel
= true;
298 loop
->safelen
= INT_MAX
;
304 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
305 gimple_call_arg (stmt
, 0));
306 gsi_replace (&gsi
, stmt
, true);
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
315 replace_loop_annotate (void)
319 gimple_stmt_iterator gsi
;
322 FOR_EACH_LOOP (loop
, 0)
324 /* First look into the header. */
325 replace_loop_annotate_in_block (loop
->header
, loop
);
327 /* Then look into the latch, if any. */
329 replace_loop_annotate_in_block (loop
->latch
, loop
);
332 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
333 FOR_EACH_BB_FN (bb
, cfun
)
335 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
337 stmt
= gsi_stmt (gsi
);
338 if (gimple_code (stmt
) != GIMPLE_CALL
)
340 if (!gimple_call_internal_p (stmt
)
341 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
344 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
346 case annot_expr_ivdep_kind
:
347 case annot_expr_unroll_kind
:
348 case annot_expr_no_vector_kind
:
349 case annot_expr_vector_kind
:
355 warning_at (gimple_location (stmt
), 0, "ignoring loop annotation");
356 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
357 gimple_call_arg (stmt
, 0));
358 gsi_replace (&gsi
, stmt
, true);
363 /* Lower internal PHI function from GIMPLE FE. */
366 lower_phi_internal_fn ()
368 basic_block bb
, pred
= NULL
;
369 gimple_stmt_iterator gsi
;
374 /* After edge creation, handle __PHI function from GIMPLE FE. */
375 FOR_EACH_BB_FN (bb
, cfun
)
377 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
);)
379 stmt
= gsi_stmt (gsi
);
380 if (! gimple_call_internal_p (stmt
, IFN_PHI
))
383 lhs
= gimple_call_lhs (stmt
);
384 phi_node
= create_phi_node (lhs
, bb
);
386 /* Add arguments to the PHI node. */
387 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
389 tree arg
= gimple_call_arg (stmt
, i
);
390 if (TREE_CODE (arg
) == LABEL_DECL
)
391 pred
= label_to_block (arg
);
394 edge e
= find_edge (pred
, bb
);
395 add_phi_arg (phi_node
, arg
, e
, UNKNOWN_LOCATION
);
399 gsi_remove (&gsi
, true);
405 execute_build_cfg (void)
407 gimple_seq body
= gimple_body (current_function_decl
);
409 build_gimple_cfg (body
);
410 gimple_set_body (current_function_decl
, NULL
);
411 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
413 fprintf (dump_file
, "Scope blocks:\n");
414 dump_scope_blocks (dump_file
, dump_flags
);
417 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
418 replace_loop_annotate ();
424 const pass_data pass_data_build_cfg
=
426 GIMPLE_PASS
, /* type */
428 OPTGROUP_NONE
, /* optinfo_flags */
429 TV_TREE_CFG
, /* tv_id */
430 PROP_gimple_leh
, /* properties_required */
431 ( PROP_cfg
| PROP_loops
), /* properties_provided */
432 0, /* properties_destroyed */
433 0, /* todo_flags_start */
434 0, /* todo_flags_finish */
437 class pass_build_cfg
: public gimple_opt_pass
440 pass_build_cfg (gcc::context
*ctxt
)
441 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
444 /* opt_pass methods: */
445 virtual unsigned int execute (function
*) { return execute_build_cfg (); }
447 }; // class pass_build_cfg
452 make_pass_build_cfg (gcc::context
*ctxt
)
454 return new pass_build_cfg (ctxt
);
458 /* Return true if T is a computed goto. */
461 computed_goto_p (gimple
*t
)
463 return (gimple_code (t
) == GIMPLE_GOTO
464 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
467 /* Returns true if the sequence of statements STMTS only contains
468 a call to __builtin_unreachable (). */
471 gimple_seq_unreachable_p (gimple_seq stmts
)
474 /* Return false if -fsanitize=unreachable, we don't want to
475 optimize away those calls, but rather turn them into
476 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
478 || sanitize_flags_p (SANITIZE_UNREACHABLE
))
481 gimple_stmt_iterator gsi
= gsi_last (stmts
);
483 if (!gimple_call_builtin_p (gsi_stmt (gsi
), BUILT_IN_UNREACHABLE
))
486 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
488 gimple
*stmt
= gsi_stmt (gsi
);
489 if (gimple_code (stmt
) != GIMPLE_LABEL
490 && !is_gimple_debug (stmt
)
491 && !gimple_clobber_p (stmt
))
497 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
498 the other edge points to a bb with just __builtin_unreachable ().
499 I.e. return true for C->M edge in:
507 __builtin_unreachable ();
511 assert_unreachable_fallthru_edge_p (edge e
)
513 basic_block pred_bb
= e
->src
;
514 gimple
*last
= last_stmt (pred_bb
);
515 if (last
&& gimple_code (last
) == GIMPLE_COND
)
517 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
518 if (other_bb
== e
->dest
)
519 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
520 if (EDGE_COUNT (other_bb
->succs
) == 0)
521 return gimple_seq_unreachable_p (bb_seq (other_bb
));
527 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
528 could alter control flow except via eh. We initialize the flag at
529 CFG build time and only ever clear it later. */
532 gimple_call_initialize_ctrl_altering (gimple
*stmt
)
534 int flags
= gimple_call_flags (stmt
);
536 /* A call alters control flow if it can make an abnormal goto. */
537 if (call_can_make_abnormal_goto (stmt
)
538 /* A call also alters control flow if it does not return. */
539 || flags
& ECF_NORETURN
540 /* TM ending statements have backedges out of the transaction.
541 Return true so we split the basic block containing them.
542 Note that the TM_BUILTIN test is merely an optimization. */
543 || ((flags
& ECF_TM_BUILTIN
)
544 && is_tm_ending_fndecl (gimple_call_fndecl (stmt
)))
545 /* BUILT_IN_RETURN call is same as return statement. */
546 || gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
)
547 /* IFN_UNIQUE should be the last insn, to make checking for it
548 as cheap as possible. */
549 || (gimple_call_internal_p (stmt
)
550 && gimple_call_internal_unique_p (stmt
)))
551 gimple_call_set_ctrl_altering (stmt
, true);
553 gimple_call_set_ctrl_altering (stmt
, false);
557 /* Insert SEQ after BB and build a flowgraph. */
560 make_blocks_1 (gimple_seq seq
, basic_block bb
)
562 gimple_stmt_iterator i
= gsi_start (seq
);
564 bool start_new_block
= true;
565 bool first_stmt_of_seq
= true;
567 while (!gsi_end_p (i
))
574 if (stmt
&& is_gimple_call (stmt
))
575 gimple_call_initialize_ctrl_altering (stmt
);
577 /* If the statement starts a new basic block or if we have determined
578 in a previous pass that we need to create a new block for STMT, do
580 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
582 if (!first_stmt_of_seq
)
583 gsi_split_seq_before (&i
, &seq
);
584 bb
= create_basic_block (seq
, bb
);
585 start_new_block
= false;
588 /* Now add STMT to BB and create the subgraphs for special statement
590 gimple_set_bb (stmt
, bb
);
592 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
594 if (stmt_ends_bb_p (stmt
))
596 /* If the stmt can make abnormal goto use a new temporary
597 for the assignment to the LHS. This makes sure the old value
598 of the LHS is available on the abnormal edge. Otherwise
599 we will end up with overlapping life-ranges for abnormal
601 if (gimple_has_lhs (stmt
)
602 && stmt_can_make_abnormal_goto (stmt
)
603 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
605 tree lhs
= gimple_get_lhs (stmt
);
606 tree tmp
= create_tmp_var (TREE_TYPE (lhs
));
607 gimple
*s
= gimple_build_assign (lhs
, tmp
);
608 gimple_set_location (s
, gimple_location (stmt
));
609 gimple_set_block (s
, gimple_block (stmt
));
610 gimple_set_lhs (stmt
, tmp
);
611 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
612 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
613 DECL_GIMPLE_REG_P (tmp
) = 1;
614 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
616 start_new_block
= true;
620 first_stmt_of_seq
= false;
625 /* Build a flowgraph for the sequence of stmts SEQ. */
628 make_blocks (gimple_seq seq
)
630 make_blocks_1 (seq
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
633 /* Create and return a new empty basic block after bb AFTER. */
636 create_bb (void *h
, void *e
, basic_block after
)
642 /* Create and initialize a new basic block. Since alloc_block uses
643 GC allocation that clears memory to allocate a basic block, we do
644 not have to clear the newly allocated basic block here. */
647 bb
->index
= last_basic_block_for_fn (cfun
);
649 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
651 /* Add the new block to the linked list of blocks. */
652 link_block (bb
, after
);
654 /* Grow the basic block array if needed. */
655 if ((size_t) last_basic_block_for_fn (cfun
)
656 == basic_block_info_for_fn (cfun
)->length ())
659 (last_basic_block_for_fn (cfun
)
660 + (last_basic_block_for_fn (cfun
) + 3) / 4);
661 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
), new_size
);
664 /* Add the newly created block to the array. */
665 SET_BASIC_BLOCK_FOR_FN (cfun
, last_basic_block_for_fn (cfun
), bb
);
667 n_basic_blocks_for_fn (cfun
)++;
668 last_basic_block_for_fn (cfun
)++;
674 /*---------------------------------------------------------------------------
676 ---------------------------------------------------------------------------*/
678 /* If basic block BB has an abnormal edge to a basic block
679 containing IFN_ABNORMAL_DISPATCHER internal call, return
680 that the dispatcher's basic block, otherwise return NULL. */
683 get_abnormal_succ_dispatcher (basic_block bb
)
688 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
689 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)) == EDGE_ABNORMAL
)
691 gimple_stmt_iterator gsi
692 = gsi_start_nondebug_after_labels_bb (e
->dest
);
693 gimple
*g
= gsi_stmt (gsi
);
694 if (g
&& gimple_call_internal_p (g
, IFN_ABNORMAL_DISPATCHER
))
700 /* Helper function for make_edges. Create a basic block with
701 with ABNORMAL_DISPATCHER internal call in it if needed, and
702 create abnormal edges from BBS to it and from it to FOR_BB
703 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
706 handle_abnormal_edges (basic_block
*dispatcher_bbs
,
707 basic_block for_bb
, int *bb_to_omp_idx
,
708 auto_vec
<basic_block
> *bbs
, bool computed_goto
)
710 basic_block
*dispatcher
= dispatcher_bbs
+ (computed_goto
? 1 : 0);
711 unsigned int idx
= 0;
717 dispatcher
= dispatcher_bbs
+ 2 * bb_to_omp_idx
[for_bb
->index
];
718 if (bb_to_omp_idx
[for_bb
->index
] != 0)
722 /* If the dispatcher has been created already, then there are basic
723 blocks with abnormal edges to it, so just make a new edge to
725 if (*dispatcher
== NULL
)
727 /* Check if there are any basic blocks that need to have
728 abnormal edges to this dispatcher. If there are none, return
730 if (bb_to_omp_idx
== NULL
)
732 if (bbs
->is_empty ())
737 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
738 if (bb_to_omp_idx
[bb
->index
] == bb_to_omp_idx
[for_bb
->index
])
744 /* Create the dispatcher bb. */
745 *dispatcher
= create_basic_block (NULL
, for_bb
);
748 /* Factor computed gotos into a common computed goto site. Also
749 record the location of that site so that we can un-factor the
750 gotos after we have converted back to normal form. */
751 gimple_stmt_iterator gsi
= gsi_start_bb (*dispatcher
);
753 /* Create the destination of the factored goto. Each original
754 computed goto will put its desired destination into this
755 variable and jump to the label we create immediately below. */
756 tree var
= create_tmp_var (ptr_type_node
, "gotovar");
758 /* Build a label for the new block which will contain the
759 factored computed goto. */
760 tree factored_label_decl
761 = create_artificial_label (UNKNOWN_LOCATION
);
762 gimple
*factored_computed_goto_label
763 = gimple_build_label (factored_label_decl
);
764 gsi_insert_after (&gsi
, factored_computed_goto_label
, GSI_NEW_STMT
);
766 /* Build our new computed goto. */
767 gimple
*factored_computed_goto
= gimple_build_goto (var
);
768 gsi_insert_after (&gsi
, factored_computed_goto
, GSI_NEW_STMT
);
770 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
773 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
776 gsi
= gsi_last_bb (bb
);
777 gimple
*last
= gsi_stmt (gsi
);
779 gcc_assert (computed_goto_p (last
));
781 /* Copy the original computed goto's destination into VAR. */
783 = gimple_build_assign (var
, gimple_goto_dest (last
));
784 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
786 edge e
= make_edge (bb
, *dispatcher
, EDGE_FALLTHRU
);
787 e
->goto_locus
= gimple_location (last
);
788 gsi_remove (&gsi
, true);
793 tree arg
= inner
? boolean_true_node
: boolean_false_node
;
794 gimple
*g
= gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER
,
796 gimple_stmt_iterator gsi
= gsi_after_labels (*dispatcher
);
797 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
799 /* Create predecessor edges of the dispatcher. */
800 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
803 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
805 make_edge (bb
, *dispatcher
, EDGE_ABNORMAL
);
810 make_edge (*dispatcher
, for_bb
, EDGE_ABNORMAL
);
813 /* Creates outgoing edges for BB. Returns 1 when it ends with an
814 computed goto, returns 2 when it ends with a statement that
815 might return to this function via an nonlocal goto, otherwise
816 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
819 make_edges_bb (basic_block bb
, struct omp_region
**pcur_region
, int *pomp_index
)
821 gimple
*last
= last_stmt (bb
);
822 bool fallthru
= false;
828 switch (gimple_code (last
))
831 if (make_goto_expr_edges (bb
))
837 edge e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
838 e
->goto_locus
= gimple_location (last
);
843 make_cond_expr_edges (bb
);
847 make_gimple_switch_edges (as_a
<gswitch
*> (last
), bb
);
851 make_eh_edges (last
);
854 case GIMPLE_EH_DISPATCH
:
855 fallthru
= make_eh_dispatch_edges (as_a
<geh_dispatch
*> (last
));
859 /* If this function receives a nonlocal goto, then we need to
860 make edges from this call site to all the nonlocal goto
862 if (stmt_can_make_abnormal_goto (last
))
865 /* If this statement has reachable exception handlers, then
866 create abnormal edges to them. */
867 make_eh_edges (last
);
869 /* BUILTIN_RETURN is really a return statement. */
870 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
872 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
875 /* Some calls are known not to return. */
877 fallthru
= !gimple_call_noreturn_p (last
);
881 /* A GIMPLE_ASSIGN may throw internally and thus be considered
883 if (is_ctrl_altering_stmt (last
))
884 make_eh_edges (last
);
889 make_gimple_asm_edges (bb
);
894 fallthru
= omp_make_gimple_edges (bb
, pcur_region
, pomp_index
);
897 case GIMPLE_TRANSACTION
:
899 gtransaction
*txn
= as_a
<gtransaction
*> (last
);
900 tree label1
= gimple_transaction_label_norm (txn
);
901 tree label2
= gimple_transaction_label_uninst (txn
);
904 make_edge (bb
, label_to_block (label1
), EDGE_FALLTHRU
);
906 make_edge (bb
, label_to_block (label2
),
907 EDGE_TM_UNINSTRUMENTED
| (label1
? 0 : EDGE_FALLTHRU
));
909 tree label3
= gimple_transaction_label_over (txn
);
910 if (gimple_transaction_subcode (txn
)
911 & (GTMA_HAVE_ABORT
| GTMA_IS_OUTER
))
912 make_edge (bb
, label_to_block (label3
), EDGE_TM_ABORT
);
919 gcc_assert (!stmt_ends_bb_p (last
));
925 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
930 /* Join all the blocks in the flowgraph. */
936 struct omp_region
*cur_region
= NULL
;
937 auto_vec
<basic_block
> ab_edge_goto
;
938 auto_vec
<basic_block
> ab_edge_call
;
939 int *bb_to_omp_idx
= NULL
;
940 int cur_omp_region_idx
= 0;
942 /* Create an edge from entry to the first block with executable
944 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
945 BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
),
948 /* Traverse the basic block array placing edges. */
949 FOR_EACH_BB_FN (bb
, cfun
)
954 bb_to_omp_idx
[bb
->index
] = cur_omp_region_idx
;
956 mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
958 ab_edge_goto
.safe_push (bb
);
960 ab_edge_call
.safe_push (bb
);
962 if (cur_region
&& bb_to_omp_idx
== NULL
)
963 bb_to_omp_idx
= XCNEWVEC (int, n_basic_blocks_for_fn (cfun
));
966 /* Computed gotos are hell to deal with, especially if there are
967 lots of them with a large number of destinations. So we factor
968 them to a common computed goto location before we build the
969 edge list. After we convert back to normal form, we will un-factor
970 the computed gotos since factoring introduces an unwanted jump.
971 For non-local gotos and abnormal edges from calls to calls that return
972 twice or forced labels, factor the abnormal edges too, by having all
973 abnormal edges from the calls go to a common artificial basic block
974 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
975 basic block to all forced labels and calls returning twice.
976 We do this per-OpenMP structured block, because those regions
977 are guaranteed to be single entry single exit by the standard,
978 so it is not allowed to enter or exit such regions abnormally this way,
979 thus all computed gotos, non-local gotos and setjmp/longjmp calls
980 must not transfer control across SESE region boundaries. */
981 if (!ab_edge_goto
.is_empty () || !ab_edge_call
.is_empty ())
983 gimple_stmt_iterator gsi
;
984 basic_block dispatcher_bb_array
[2] = { NULL
, NULL
};
985 basic_block
*dispatcher_bbs
= dispatcher_bb_array
;
986 int count
= n_basic_blocks_for_fn (cfun
);
989 dispatcher_bbs
= XCNEWVEC (basic_block
, 2 * count
);
991 FOR_EACH_BB_FN (bb
, cfun
)
993 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
995 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1001 target
= gimple_label_label (label_stmt
);
1003 /* Make an edge to every label block that has been marked as a
1004 potential target for a computed goto or a non-local goto. */
1005 if (FORCED_LABEL (target
))
1006 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1007 &ab_edge_goto
, true);
1008 if (DECL_NONLOCAL (target
))
1010 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1011 &ab_edge_call
, false);
1016 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
1017 gsi_next_nondebug (&gsi
);
1018 if (!gsi_end_p (gsi
))
1020 /* Make an edge to every setjmp-like call. */
1021 gimple
*call_stmt
= gsi_stmt (gsi
);
1022 if (is_gimple_call (call_stmt
)
1023 && ((gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
)
1024 || gimple_call_builtin_p (call_stmt
,
1025 BUILT_IN_SETJMP_RECEIVER
)))
1026 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1027 &ab_edge_call
, false);
1032 XDELETE (dispatcher_bbs
);
1035 XDELETE (bb_to_omp_idx
);
1037 omp_free_regions ();
1040 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1041 needed. Returns true if new bbs were created.
1042 Note: This is transitional code, and should not be used for new code. We
1043 should be able to get rid of this by rewriting all target va-arg
1044 gimplification hooks to use an interface gimple_build_cond_value as described
1045 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1048 gimple_find_sub_bbs (gimple_seq seq
, gimple_stmt_iterator
*gsi
)
1050 gimple
*stmt
= gsi_stmt (*gsi
);
1051 basic_block bb
= gimple_bb (stmt
);
1052 basic_block lastbb
, afterbb
;
1053 int old_num_bbs
= n_basic_blocks_for_fn (cfun
);
1055 lastbb
= make_blocks_1 (seq
, bb
);
1056 if (old_num_bbs
== n_basic_blocks_for_fn (cfun
))
1058 e
= split_block (bb
, stmt
);
1059 /* Move e->dest to come after the new basic blocks. */
1061 unlink_block (afterbb
);
1062 link_block (afterbb
, lastbb
);
1063 redirect_edge_succ (e
, bb
->next_bb
);
1065 while (bb
!= afterbb
)
1067 struct omp_region
*cur_region
= NULL
;
1068 profile_count cnt
= profile_count::zero ();
1071 int cur_omp_region_idx
= 0;
1072 int mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
1073 gcc_assert (!mer
&& !cur_region
);
1074 add_bb_to_loop (bb
, afterbb
->loop_father
);
1078 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1080 if (e
->count ().initialized_p ())
1085 tree_guess_outgoing_edge_probabilities (bb
);
1086 if (all
|| profile_status_for_fn (cfun
) == PROFILE_READ
)
1094 /* Find the next available discriminator value for LOCUS. The
1095 discriminator distinguishes among several basic blocks that
1096 share a common locus, allowing for more accurate sample-based
1100 next_discriminator_for_locus (location_t locus
)
1102 struct locus_discrim_map item
;
1103 struct locus_discrim_map
**slot
;
1106 item
.discriminator
= 0;
1107 slot
= discriminator_per_locus
->find_slot_with_hash (
1108 &item
, LOCATION_LINE (locus
), INSERT
);
1110 if (*slot
== HTAB_EMPTY_ENTRY
)
1112 *slot
= XNEW (struct locus_discrim_map
);
1114 (*slot
)->locus
= locus
;
1115 (*slot
)->discriminator
= 0;
1117 (*slot
)->discriminator
++;
1118 return (*slot
)->discriminator
;
1121 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1124 same_line_p (location_t locus1
, location_t locus2
)
1126 expanded_location from
, to
;
1128 if (locus1
== locus2
)
1131 from
= expand_location (locus1
);
1132 to
= expand_location (locus2
);
1134 if (from
.line
!= to
.line
)
1136 if (from
.file
== to
.file
)
1138 return (from
.file
!= NULL
1140 && filename_cmp (from
.file
, to
.file
) == 0);
1143 /* Assign discriminators to each basic block. */
1146 assign_discriminators (void)
1150 FOR_EACH_BB_FN (bb
, cfun
)
1154 gimple
*last
= last_stmt (bb
);
1155 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
1157 if (locus
== UNKNOWN_LOCATION
)
1160 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1162 gimple
*first
= first_non_label_stmt (e
->dest
);
1163 gimple
*last
= last_stmt (e
->dest
);
1164 if ((first
&& same_line_p (locus
, gimple_location (first
)))
1165 || (last
&& same_line_p (locus
, gimple_location (last
))))
1167 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
1168 bb
->discriminator
= next_discriminator_for_locus (locus
);
1170 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
1176 /* Create the edges for a GIMPLE_COND starting at block BB. */
1179 make_cond_expr_edges (basic_block bb
)
1181 gcond
*entry
= as_a
<gcond
*> (last_stmt (bb
));
1182 gimple
*then_stmt
, *else_stmt
;
1183 basic_block then_bb
, else_bb
;
1184 tree then_label
, else_label
;
1188 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
1190 /* Entry basic blocks for each component. */
1191 then_label
= gimple_cond_true_label (entry
);
1192 else_label
= gimple_cond_false_label (entry
);
1193 then_bb
= label_to_block (then_label
);
1194 else_bb
= label_to_block (else_label
);
1195 then_stmt
= first_stmt (then_bb
);
1196 else_stmt
= first_stmt (else_bb
);
1198 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1199 e
->goto_locus
= gimple_location (then_stmt
);
1200 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1202 e
->goto_locus
= gimple_location (else_stmt
);
1204 /* We do not need the labels anymore. */
1205 gimple_cond_set_true_label (entry
, NULL_TREE
);
1206 gimple_cond_set_false_label (entry
, NULL_TREE
);
1210 /* Called for each element in the hash table (P) as we delete the
1211 edge to cases hash table.
1213 Clear all the CASE_CHAINs to prevent problems with copying of
1214 SWITCH_EXPRs and structure sharing rules, then free the hash table
1218 edge_to_cases_cleanup (edge
const &, tree
const &value
, void *)
1222 for (t
= value
; t
; t
= next
)
1224 next
= CASE_CHAIN (t
);
1225 CASE_CHAIN (t
) = NULL
;
1231 /* Start recording information mapping edges to case labels. */
1234 start_recording_case_labels (void)
1236 gcc_assert (edge_to_cases
== NULL
);
1237 edge_to_cases
= new hash_map
<edge
, tree
>;
1238 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
1241 /* Return nonzero if we are recording information for case labels. */
1244 recording_case_labels_p (void)
1246 return (edge_to_cases
!= NULL
);
1249 /* Stop recording information mapping edges to case labels and
1250 remove any information we have recorded. */
1252 end_recording_case_labels (void)
1256 edge_to_cases
->traverse
<void *, edge_to_cases_cleanup
> (NULL
);
1257 delete edge_to_cases
;
1258 edge_to_cases
= NULL
;
1259 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
1261 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
1264 gimple
*stmt
= last_stmt (bb
);
1265 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1266 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1269 BITMAP_FREE (touched_switch_bbs
);
1272 /* If we are inside a {start,end}_recording_cases block, then return
1273 a chain of CASE_LABEL_EXPRs from T which reference E.
1275 Otherwise return NULL. */
1278 get_cases_for_edge (edge e
, gswitch
*t
)
1283 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1284 chains available. Return NULL so the caller can detect this case. */
1285 if (!recording_case_labels_p ())
1288 slot
= edge_to_cases
->get (e
);
1292 /* If we did not find E in the hash table, then this must be the first
1293 time we have been queried for information about E & T. Add all the
1294 elements from T to the hash table then perform the query again. */
1296 n
= gimple_switch_num_labels (t
);
1297 for (i
= 0; i
< n
; i
++)
1299 tree elt
= gimple_switch_label (t
, i
);
1300 tree lab
= CASE_LABEL (elt
);
1301 basic_block label_bb
= label_to_block (lab
);
1302 edge this_edge
= find_edge (e
->src
, label_bb
);
1304 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1306 tree
&s
= edge_to_cases
->get_or_insert (this_edge
);
1307 CASE_CHAIN (elt
) = s
;
1311 return *edge_to_cases
->get (e
);
1314 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1317 make_gimple_switch_edges (gswitch
*entry
, basic_block bb
)
1321 n
= gimple_switch_num_labels (entry
);
1323 for (i
= 0; i
< n
; ++i
)
1325 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1326 basic_block label_bb
= label_to_block (lab
);
1327 make_edge (bb
, label_bb
, 0);
1332 /* Return the basic block holding label DEST. */
1335 label_to_block_fn (struct function
*ifun
, tree dest
)
1337 int uid
= LABEL_DECL_UID (dest
);
1339 /* We would die hard when faced by an undefined label. Emit a label to
1340 the very first basic block. This will hopefully make even the dataflow
1341 and undefined variable warnings quite right. */
1342 if (seen_error () && uid
< 0)
1344 gimple_stmt_iterator gsi
=
1345 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
));
1348 stmt
= gimple_build_label (dest
);
1349 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1350 uid
= LABEL_DECL_UID (dest
);
1352 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1354 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1357 /* Create edges for a goto statement at block BB. Returns true
1358 if abnormal edges should be created. */
1361 make_goto_expr_edges (basic_block bb
)
1363 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1364 gimple
*goto_t
= gsi_stmt (last
);
1366 /* A simple GOTO creates normal edges. */
1367 if (simple_goto_p (goto_t
))
1369 tree dest
= gimple_goto_dest (goto_t
);
1370 basic_block label_bb
= label_to_block (dest
);
1371 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1372 e
->goto_locus
= gimple_location (goto_t
);
1373 gsi_remove (&last
, true);
1377 /* A computed GOTO creates abnormal edges. */
1381 /* Create edges for an asm statement with labels at block BB. */
1384 make_gimple_asm_edges (basic_block bb
)
1386 gasm
*stmt
= as_a
<gasm
*> (last_stmt (bb
));
1387 int i
, n
= gimple_asm_nlabels (stmt
);
1389 for (i
= 0; i
< n
; ++i
)
1391 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1392 basic_block label_bb
= label_to_block (label
);
1393 make_edge (bb
, label_bb
, 0);
1397 /*---------------------------------------------------------------------------
1399 ---------------------------------------------------------------------------*/
1401 /* Cleanup useless labels in basic blocks. This is something we wish
1402 to do early because it allows us to group case labels before creating
1403 the edges for the CFG, and it speeds up block statement iterators in
1404 all passes later on.
1405 We rerun this pass after CFG is created, to get rid of the labels that
1406 are no longer referenced. After then we do not run it any more, since
1407 (almost) no new labels should be created. */
1409 /* A map from basic block index to the leading label of that block. */
1410 static struct label_record
1415 /* True if the label is referenced from somewhere. */
1419 /* Given LABEL return the first label in the same basic block. */
1422 main_block_label (tree label
)
1424 basic_block bb
= label_to_block (label
);
1425 tree main_label
= label_for_bb
[bb
->index
].label
;
1427 /* label_to_block possibly inserted undefined label into the chain. */
1430 label_for_bb
[bb
->index
].label
= label
;
1434 label_for_bb
[bb
->index
].used
= true;
1438 /* Clean up redundant labels within the exception tree. */
1441 cleanup_dead_labels_eh (void)
1448 if (cfun
->eh
== NULL
)
1451 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1452 if (lp
&& lp
->post_landing_pad
)
1454 lab
= main_block_label (lp
->post_landing_pad
);
1455 if (lab
!= lp
->post_landing_pad
)
1457 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1458 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1462 FOR_ALL_EH_REGION (r
)
1466 case ERT_MUST_NOT_THROW
:
1472 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1476 c
->label
= main_block_label (lab
);
1481 case ERT_ALLOWED_EXCEPTIONS
:
1482 lab
= r
->u
.allowed
.label
;
1484 r
->u
.allowed
.label
= main_block_label (lab
);
1490 /* Cleanup redundant labels. This is a three-step process:
1491 1) Find the leading label for each block.
1492 2) Redirect all references to labels to the leading labels.
1493 3) Cleanup all useless labels. */
1496 cleanup_dead_labels (void)
1499 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block_for_fn (cfun
));
1501 /* Find a suitable label for each block. We use the first user-defined
1502 label if there is one, or otherwise just the first label we see. */
1503 FOR_EACH_BB_FN (bb
, cfun
)
1505 gimple_stmt_iterator i
;
1507 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1510 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1515 label
= gimple_label_label (label_stmt
);
1517 /* If we have not yet seen a label for the current block,
1518 remember this one and see if there are more labels. */
1519 if (!label_for_bb
[bb
->index
].label
)
1521 label_for_bb
[bb
->index
].label
= label
;
1525 /* If we did see a label for the current block already, but it
1526 is an artificially created label, replace it if the current
1527 label is a user defined label. */
1528 if (!DECL_ARTIFICIAL (label
)
1529 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1531 label_for_bb
[bb
->index
].label
= label
;
1537 /* Now redirect all jumps/branches to the selected label.
1538 First do so for each block ending in a control statement. */
1539 FOR_EACH_BB_FN (bb
, cfun
)
1541 gimple
*stmt
= last_stmt (bb
);
1542 tree label
, new_label
;
1547 switch (gimple_code (stmt
))
1551 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
1552 label
= gimple_cond_true_label (cond_stmt
);
1555 new_label
= main_block_label (label
);
1556 if (new_label
!= label
)
1557 gimple_cond_set_true_label (cond_stmt
, new_label
);
1560 label
= gimple_cond_false_label (cond_stmt
);
1563 new_label
= main_block_label (label
);
1564 if (new_label
!= label
)
1565 gimple_cond_set_false_label (cond_stmt
, new_label
);
1572 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
1573 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
1575 /* Replace all destination labels. */
1576 for (i
= 0; i
< n
; ++i
)
1578 tree case_label
= gimple_switch_label (switch_stmt
, i
);
1579 label
= CASE_LABEL (case_label
);
1580 new_label
= main_block_label (label
);
1581 if (new_label
!= label
)
1582 CASE_LABEL (case_label
) = new_label
;
1589 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1590 int i
, n
= gimple_asm_nlabels (asm_stmt
);
1592 for (i
= 0; i
< n
; ++i
)
1594 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
1595 tree label
= main_block_label (TREE_VALUE (cons
));
1596 TREE_VALUE (cons
) = label
;
1601 /* We have to handle gotos until they're removed, and we don't
1602 remove them until after we've created the CFG edges. */
1604 if (!computed_goto_p (stmt
))
1606 ggoto
*goto_stmt
= as_a
<ggoto
*> (stmt
);
1607 label
= gimple_goto_dest (goto_stmt
);
1608 new_label
= main_block_label (label
);
1609 if (new_label
!= label
)
1610 gimple_goto_set_dest (goto_stmt
, new_label
);
1614 case GIMPLE_TRANSACTION
:
1616 gtransaction
*txn
= as_a
<gtransaction
*> (stmt
);
1618 label
= gimple_transaction_label_norm (txn
);
1621 new_label
= main_block_label (label
);
1622 if (new_label
!= label
)
1623 gimple_transaction_set_label_norm (txn
, new_label
);
1626 label
= gimple_transaction_label_uninst (txn
);
1629 new_label
= main_block_label (label
);
1630 if (new_label
!= label
)
1631 gimple_transaction_set_label_uninst (txn
, new_label
);
1634 label
= gimple_transaction_label_over (txn
);
1637 new_label
= main_block_label (label
);
1638 if (new_label
!= label
)
1639 gimple_transaction_set_label_over (txn
, new_label
);
1649 /* Do the same for the exception region tree labels. */
1650 cleanup_dead_labels_eh ();
1652 /* Finally, purge dead labels. All user-defined labels and labels that
1653 can be the target of non-local gotos and labels which have their
1654 address taken are preserved. */
1655 FOR_EACH_BB_FN (bb
, cfun
)
1657 gimple_stmt_iterator i
;
1658 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1660 if (!label_for_this_bb
)
1663 /* If the main label of the block is unused, we may still remove it. */
1664 if (!label_for_bb
[bb
->index
].used
)
1665 label_for_this_bb
= NULL
;
1667 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1670 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1675 label
= gimple_label_label (label_stmt
);
1677 if (label
== label_for_this_bb
1678 || !DECL_ARTIFICIAL (label
)
1679 || DECL_NONLOCAL (label
)
1680 || FORCED_LABEL (label
))
1683 gsi_remove (&i
, true);
1687 free (label_for_bb
);
1690 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1691 the ones jumping to the same label.
1692 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1695 group_case_labels_stmt (gswitch
*stmt
)
1697 int old_size
= gimple_switch_num_labels (stmt
);
1698 int i
, next_index
, new_size
;
1699 basic_block default_bb
= NULL
;
1701 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1703 /* Look for possible opportunities to merge cases. */
1705 while (i
< old_size
)
1707 tree base_case
, base_high
;
1708 basic_block base_bb
;
1710 base_case
= gimple_switch_label (stmt
, i
);
1712 gcc_assert (base_case
);
1713 base_bb
= label_to_block (CASE_LABEL (base_case
));
1715 /* Discard cases that have the same destination as the default case or
1716 whose destiniation blocks have already been removed as unreachable. */
1717 if (base_bb
== NULL
|| base_bb
== default_bb
)
1723 base_high
= CASE_HIGH (base_case
)
1724 ? CASE_HIGH (base_case
)
1725 : CASE_LOW (base_case
);
1728 /* Try to merge case labels. Break out when we reach the end
1729 of the label vector or when we cannot merge the next case
1730 label with the current one. */
1731 while (next_index
< old_size
)
1733 tree merge_case
= gimple_switch_label (stmt
, next_index
);
1734 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1735 wide_int bhp1
= wi::to_wide (base_high
) + 1;
1737 /* Merge the cases if they jump to the same place,
1738 and their ranges are consecutive. */
1739 if (merge_bb
== base_bb
1740 && wi::to_wide (CASE_LOW (merge_case
)) == bhp1
)
1742 base_high
= CASE_HIGH (merge_case
) ?
1743 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1744 CASE_HIGH (base_case
) = base_high
;
1751 /* Discard cases that have an unreachable destination block. */
1752 if (EDGE_COUNT (base_bb
->succs
) == 0
1753 && gimple_seq_unreachable_p (bb_seq (base_bb
))
1754 /* Don't optimize this if __builtin_unreachable () is the
1755 implicitly added one by the C++ FE too early, before
1756 -Wreturn-type can be diagnosed. We'll optimize it later
1757 during switchconv pass or any other cfg cleanup. */
1758 && (gimple_in_ssa_p (cfun
)
1759 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb
)))
1760 != BUILTINS_LOCATION
)))
1762 edge base_edge
= find_edge (gimple_bb (stmt
), base_bb
);
1763 if (base_edge
!= NULL
)
1764 remove_edge_and_dominated_blocks (base_edge
);
1770 gimple_switch_set_label (stmt
, new_size
,
1771 gimple_switch_label (stmt
, i
));
1776 gcc_assert (new_size
<= old_size
);
1778 if (new_size
< old_size
)
1779 gimple_switch_set_num_labels (stmt
, new_size
);
1781 return new_size
< old_size
;
1784 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1785 and scan the sorted vector of cases. Combine the ones jumping to the
1789 group_case_labels (void)
1792 bool changed
= false;
1794 FOR_EACH_BB_FN (bb
, cfun
)
1796 gimple
*stmt
= last_stmt (bb
);
1797 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1798 changed
|= group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1804 /* Checks whether we can merge block B into block A. */
1807 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1811 if (!single_succ_p (a
))
1814 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1817 if (single_succ (a
) != b
)
1820 if (!single_pred_p (b
))
1823 if (a
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
1824 || b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1827 /* If A ends by a statement causing exceptions or something similar, we
1828 cannot merge the blocks. */
1829 stmt
= last_stmt (a
);
1830 if (stmt
&& stmt_ends_bb_p (stmt
))
1833 /* Do not allow a block with only a non-local label to be merged. */
1835 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
1836 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
1839 /* Examine the labels at the beginning of B. */
1840 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);
1844 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1847 lab
= gimple_label_label (label_stmt
);
1849 /* Do not remove user forced labels or for -O0 any user labels. */
1850 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1854 /* Protect simple loop latches. We only want to avoid merging
1855 the latch with the loop header or with a block in another
1856 loop in this case. */
1858 && b
->loop_father
->latch
== b
1859 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
)
1860 && (b
->loop_father
->header
== a
1861 || b
->loop_father
!= a
->loop_father
))
1864 /* It must be possible to eliminate all phi nodes in B. If ssa form
1865 is not up-to-date and a name-mapping is registered, we cannot eliminate
1866 any phis. Symbols marked for renaming are never a problem though. */
1867 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);
1870 gphi
*phi
= gsi
.phi ();
1871 /* Technically only new names matter. */
1872 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1876 /* When not optimizing, don't merge if we'd lose goto_locus. */
1878 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1880 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1881 gimple_stmt_iterator prev
, next
;
1882 prev
= gsi_last_nondebug_bb (a
);
1883 next
= gsi_after_labels (b
);
1884 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1885 gsi_next_nondebug (&next
);
1886 if ((gsi_end_p (prev
)
1887 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1888 && (gsi_end_p (next
)
1889 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1896 /* Replaces all uses of NAME by VAL. */
1899 replace_uses_by (tree name
, tree val
)
1901 imm_use_iterator imm_iter
;
1906 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1908 /* Mark the block if we change the last stmt in it. */
1909 if (cfgcleanup_altered_bbs
1910 && stmt_ends_bb_p (stmt
))
1911 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1913 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1915 replace_exp (use
, val
);
1917 if (gimple_code (stmt
) == GIMPLE_PHI
)
1919 e
= gimple_phi_arg_edge (as_a
<gphi
*> (stmt
),
1920 PHI_ARG_INDEX_FROM_USE (use
));
1921 if (e
->flags
& EDGE_ABNORMAL
1922 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
))
1924 /* This can only occur for virtual operands, since
1925 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1926 would prevent replacement. */
1927 gcc_checking_assert (virtual_operand_p (name
));
1928 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1933 if (gimple_code (stmt
) != GIMPLE_PHI
)
1935 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1936 gimple
*orig_stmt
= stmt
;
1939 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1940 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1941 only change sth from non-invariant to invariant, and only
1942 when propagating constants. */
1943 if (is_gimple_min_invariant (val
))
1944 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1946 tree op
= gimple_op (stmt
, i
);
1947 /* Operands may be empty here. For example, the labels
1948 of a GIMPLE_COND are nulled out following the creation
1949 of the corresponding CFG edges. */
1950 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
1951 recompute_tree_invariant_for_addr_expr (op
);
1954 if (fold_stmt (&gsi
))
1955 stmt
= gsi_stmt (gsi
);
1957 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
1958 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
1964 gcc_checking_assert (has_zero_uses (name
));
1966 /* Also update the trees stored in loop structures. */
1971 FOR_EACH_LOOP (loop
, 0)
1973 substitute_in_loop_info (loop
, name
, val
);
1978 /* Merge block B into block A. */
1981 gimple_merge_blocks (basic_block a
, basic_block b
)
1983 gimple_stmt_iterator last
, gsi
;
1987 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1989 /* Remove all single-valued PHI nodes from block B of the form
1990 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1991 gsi
= gsi_last_bb (a
);
1992 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
1994 gimple
*phi
= gsi_stmt (psi
);
1995 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
1997 bool may_replace_uses
= (virtual_operand_p (def
)
1998 || may_propagate_copy (def
, use
));
2000 /* In case we maintain loop closed ssa form, do not propagate arguments
2001 of loop exit phi nodes. */
2003 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
2004 && !virtual_operand_p (def
)
2005 && TREE_CODE (use
) == SSA_NAME
2006 && a
->loop_father
!= b
->loop_father
)
2007 may_replace_uses
= false;
2009 if (!may_replace_uses
)
2011 gcc_assert (!virtual_operand_p (def
));
2013 /* Note that just emitting the copies is fine -- there is no problem
2014 with ordering of phi nodes. This is because A is the single
2015 predecessor of B, therefore results of the phi nodes cannot
2016 appear as arguments of the phi nodes. */
2017 copy
= gimple_build_assign (def
, use
);
2018 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
2019 remove_phi_node (&psi
, false);
2023 /* If we deal with a PHI for virtual operands, we can simply
2024 propagate these without fussing with folding or updating
2026 if (virtual_operand_p (def
))
2028 imm_use_iterator iter
;
2029 use_operand_p use_p
;
2032 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
2033 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
2034 SET_USE (use_p
, use
);
2036 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
2037 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
2040 replace_uses_by (def
, use
);
2042 remove_phi_node (&psi
, true);
2046 /* Ensure that B follows A. */
2047 move_block_after (b
, a
);
2049 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
2050 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
2052 /* Remove labels from B and set gimple_bb to A for other statements. */
2053 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
2055 gimple
*stmt
= gsi_stmt (gsi
);
2056 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2058 tree label
= gimple_label_label (label_stmt
);
2061 gsi_remove (&gsi
, false);
2063 /* Now that we can thread computed gotos, we might have
2064 a situation where we have a forced label in block B
2065 However, the label at the start of block B might still be
2066 used in other ways (think about the runtime checking for
2067 Fortran assigned gotos). So we can not just delete the
2068 label. Instead we move the label to the start of block A. */
2069 if (FORCED_LABEL (label
))
2071 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
2072 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
2074 /* Other user labels keep around in a form of a debug stmt. */
2075 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_STMTS
)
2077 gimple
*dbg
= gimple_build_debug_bind (label
,
2080 gimple_debug_bind_reset_value (dbg
);
2081 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
2084 lp_nr
= EH_LANDING_PAD_NR (label
);
2087 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
2088 lp
->post_landing_pad
= NULL
;
2093 gimple_set_bb (stmt
, a
);
2098 /* When merging two BBs, if their counts are different, the larger count
2099 is selected as the new bb count. This is to handle inconsistent
2101 if (a
->loop_father
== b
->loop_father
)
2103 a
->count
= a
->count
.merge (b
->count
);
2106 /* Merge the sequences. */
2107 last
= gsi_last_bb (a
);
2108 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
2109 set_bb_seq (b
, NULL
);
2111 if (cfgcleanup_altered_bbs
)
2112 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
2116 /* Return the one of two successors of BB that is not reachable by a
2117 complex edge, if there is one. Else, return BB. We use
2118 this in optimizations that use post-dominators for their heuristics,
2119 to catch the cases in C++ where function calls are involved. */
2122 single_noncomplex_succ (basic_block bb
)
2125 if (EDGE_COUNT (bb
->succs
) != 2)
2128 e0
= EDGE_SUCC (bb
, 0);
2129 e1
= EDGE_SUCC (bb
, 1);
2130 if (e0
->flags
& EDGE_COMPLEX
)
2132 if (e1
->flags
& EDGE_COMPLEX
)
2138 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2141 notice_special_calls (gcall
*call
)
2143 int flags
= gimple_call_flags (call
);
2145 if (flags
& ECF_MAY_BE_ALLOCA
)
2146 cfun
->calls_alloca
= true;
2147 if (flags
& ECF_RETURNS_TWICE
)
2148 cfun
->calls_setjmp
= true;
2152 /* Clear flags set by notice_special_calls. Used by dead code removal
2153 to update the flags. */
2156 clear_special_calls (void)
2158 cfun
->calls_alloca
= false;
2159 cfun
->calls_setjmp
= false;
2162 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2165 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
2167 /* Since this block is no longer reachable, we can just delete all
2168 of its PHI nodes. */
2169 remove_phi_nodes (bb
);
2171 /* Remove edges to BB's successors. */
2172 while (EDGE_COUNT (bb
->succs
) > 0)
2173 remove_edge (EDGE_SUCC (bb
, 0));
2177 /* Remove statements of basic block BB. */
2180 remove_bb (basic_block bb
)
2182 gimple_stmt_iterator i
;
2186 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
2187 if (dump_flags
& TDF_DETAILS
)
2189 dump_bb (dump_file
, bb
, 0, TDF_BLOCKS
);
2190 fprintf (dump_file
, "\n");
2196 struct loop
*loop
= bb
->loop_father
;
2198 /* If a loop gets removed, clean up the information associated
2200 if (loop
->latch
== bb
2201 || loop
->header
== bb
)
2202 free_numbers_of_iterations_estimates (loop
);
2205 /* Remove all the instructions in the block. */
2206 if (bb_seq (bb
) != NULL
)
2208 /* Walk backwards so as to get a chance to substitute all
2209 released DEFs into debug stmts. See
2210 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2212 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
2214 gimple
*stmt
= gsi_stmt (i
);
2215 glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
);
2217 && (FORCED_LABEL (gimple_label_label (label_stmt
))
2218 || DECL_NONLOCAL (gimple_label_label (label_stmt
))))
2221 gimple_stmt_iterator new_gsi
;
2223 /* A non-reachable non-local label may still be referenced.
2224 But it no longer needs to carry the extra semantics of
2226 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
2228 DECL_NONLOCAL (gimple_label_label (label_stmt
)) = 0;
2229 FORCED_LABEL (gimple_label_label (label_stmt
)) = 1;
2232 new_bb
= bb
->prev_bb
;
2233 new_gsi
= gsi_start_bb (new_bb
);
2234 gsi_remove (&i
, false);
2235 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
2239 /* Release SSA definitions. */
2240 release_defs (stmt
);
2241 gsi_remove (&i
, true);
2245 i
= gsi_last_bb (bb
);
2251 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2252 bb
->il
.gimple
.seq
= NULL
;
2253 bb
->il
.gimple
.phi_nodes
= NULL
;
2257 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2258 predicate VAL, return the edge that will be taken out of the block.
2259 If VAL does not match a unique edge, NULL is returned. */
2262 find_taken_edge (basic_block bb
, tree val
)
2266 stmt
= last_stmt (bb
);
2268 gcc_assert (is_ctrl_stmt (stmt
));
2270 if (gimple_code (stmt
) == GIMPLE_COND
)
2271 return find_taken_edge_cond_expr (bb
, val
);
2273 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
2274 return find_taken_edge_switch_expr (as_a
<gswitch
*> (stmt
), bb
, val
);
2276 if (computed_goto_p (stmt
))
2278 /* Only optimize if the argument is a label, if the argument is
2279 not a label then we can not construct a proper CFG.
2281 It may be the case that we only need to allow the LABEL_REF to
2282 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2283 appear inside a LABEL_EXPR just to be safe. */
2285 && (TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
2286 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
2287 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
2294 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2295 statement, determine which of the outgoing edges will be taken out of the
2296 block. Return NULL if either edge may be taken. */
2299 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2304 dest
= label_to_block (val
);
2307 e
= find_edge (bb
, dest
);
2308 gcc_assert (e
!= NULL
);
2314 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2315 statement, determine which of the two edges will be taken out of the
2316 block. Return NULL if either edge may be taken. */
2319 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2321 edge true_edge
, false_edge
;
2324 || TREE_CODE (val
) != INTEGER_CST
)
2327 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2329 return (integer_zerop (val
) ? false_edge
: true_edge
);
2332 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2333 statement, determine which edge will be taken out of the block. Return
2334 NULL if any edge may be taken. */
2337 find_taken_edge_switch_expr (gswitch
*switch_stmt
, basic_block bb
,
2340 basic_block dest_bb
;
2344 if (gimple_switch_num_labels (switch_stmt
) == 1)
2345 taken_case
= gimple_switch_default_label (switch_stmt
);
2346 else if (! val
|| TREE_CODE (val
) != INTEGER_CST
)
2349 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2350 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2352 e
= find_edge (bb
, dest_bb
);
2358 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2359 We can make optimal use here of the fact that the case labels are
2360 sorted: We can do a binary search for a case matching VAL. */
2363 find_case_label_for_value (gswitch
*switch_stmt
, tree val
)
2365 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2366 tree default_case
= gimple_switch_default_label (switch_stmt
);
2368 for (low
= 0, high
= n
; high
- low
> 1; )
2370 size_t i
= (high
+ low
) / 2;
2371 tree t
= gimple_switch_label (switch_stmt
, i
);
2374 /* Cache the result of comparing CASE_LOW and val. */
2375 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2382 if (CASE_HIGH (t
) == NULL
)
2384 /* A singe-valued case label. */
2390 /* A case range. We can only handle integer ranges. */
2391 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2396 return default_case
;
2400 /* Dump a basic block on stderr. */
2403 gimple_debug_bb (basic_block bb
)
2405 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2409 /* Dump basic block with index N on stderr. */
2412 gimple_debug_bb_n (int n
)
2414 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun
, n
));
2415 return BASIC_BLOCK_FOR_FN (cfun
, n
);
2419 /* Dump the CFG on stderr.
2421 FLAGS are the same used by the tree dumping functions
2422 (see TDF_* in dumpfile.h). */
2425 gimple_debug_cfg (dump_flags_t flags
)
2427 gimple_dump_cfg (stderr
, flags
);
2431 /* Dump the program showing basic block boundaries on the given FILE.
2433 FLAGS are the same used by the tree dumping functions (see TDF_* in
2437 gimple_dump_cfg (FILE *file
, dump_flags_t flags
)
2439 if (flags
& TDF_DETAILS
)
2441 dump_function_header (file
, current_function_decl
, flags
);
2442 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2443 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2444 last_basic_block_for_fn (cfun
));
2446 brief_dump_cfg (file
, flags
);
2447 fprintf (file
, "\n");
2450 if (flags
& TDF_STATS
)
2451 dump_cfg_stats (file
);
2453 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2457 /* Dump CFG statistics on FILE. */
2460 dump_cfg_stats (FILE *file
)
2462 static long max_num_merged_labels
= 0;
2463 unsigned long size
, total
= 0;
2466 const char * const fmt_str
= "%-30s%-13s%12s\n";
2467 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2468 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2469 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2470 const char *funcname
= current_function_name ();
2472 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2474 fprintf (file
, "---------------------------------------------------------\n");
2475 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2476 fprintf (file
, fmt_str
, "", " instances ", "used ");
2477 fprintf (file
, "---------------------------------------------------------\n");
2479 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2481 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2482 SCALE (size
), LABEL (size
));
2485 FOR_EACH_BB_FN (bb
, cfun
)
2486 num_edges
+= EDGE_COUNT (bb
->succs
);
2487 size
= num_edges
* sizeof (struct edge_def
);
2489 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2491 fprintf (file
, "---------------------------------------------------------\n");
2492 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2494 fprintf (file
, "---------------------------------------------------------\n");
2495 fprintf (file
, "\n");
2497 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2498 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2500 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2501 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2503 fprintf (file
, "\n");
2507 /* Dump CFG statistics on stderr. Keep extern so that it's always
2508 linked in the final executable. */
2511 debug_cfg_stats (void)
2513 dump_cfg_stats (stderr
);
2516 /*---------------------------------------------------------------------------
2517 Miscellaneous helpers
2518 ---------------------------------------------------------------------------*/
2520 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2521 flow. Transfers of control flow associated with EH are excluded. */
2524 call_can_make_abnormal_goto (gimple
*t
)
2526 /* If the function has no non-local labels, then a call cannot make an
2527 abnormal transfer of control. */
2528 if (!cfun
->has_nonlocal_label
2529 && !cfun
->calls_setjmp
)
2532 /* Likewise if the call has no side effects. */
2533 if (!gimple_has_side_effects (t
))
2536 /* Likewise if the called function is leaf. */
2537 if (gimple_call_flags (t
) & ECF_LEAF
)
2544 /* Return true if T can make an abnormal transfer of control flow.
2545 Transfers of control flow associated with EH are excluded. */
2548 stmt_can_make_abnormal_goto (gimple
*t
)
2550 if (computed_goto_p (t
))
2552 if (is_gimple_call (t
))
2553 return call_can_make_abnormal_goto (t
);
2558 /* Return true if T represents a stmt that always transfers control. */
2561 is_ctrl_stmt (gimple
*t
)
2563 switch (gimple_code (t
))
2577 /* Return true if T is a statement that may alter the flow of control
2578 (e.g., a call to a non-returning function). */
2581 is_ctrl_altering_stmt (gimple
*t
)
2585 switch (gimple_code (t
))
2588 /* Per stmt call flag indicates whether the call could alter
2590 if (gimple_call_ctrl_altering_p (t
))
2594 case GIMPLE_EH_DISPATCH
:
2595 /* EH_DISPATCH branches to the individual catch handlers at
2596 this level of a try or allowed-exceptions region. It can
2597 fallthru to the next statement as well. */
2601 if (gimple_asm_nlabels (as_a
<gasm
*> (t
)) > 0)
2606 /* OpenMP directives alter control flow. */
2609 case GIMPLE_TRANSACTION
:
2610 /* A transaction start alters control flow. */
2617 /* If a statement can throw, it alters control flow. */
2618 return stmt_can_throw_internal (t
);
2622 /* Return true if T is a simple local goto. */
2625 simple_goto_p (gimple
*t
)
2627 return (gimple_code (t
) == GIMPLE_GOTO
2628 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2632 /* Return true if STMT should start a new basic block. PREV_STMT is
2633 the statement preceding STMT. It is used when STMT is a label or a
2634 case label. Labels should only start a new basic block if their
2635 previous statement wasn't a label. Otherwise, sequence of labels
2636 would generate unnecessary basic blocks that only contain a single
2640 stmt_starts_bb_p (gimple
*stmt
, gimple
*prev_stmt
)
2645 /* Labels start a new basic block only if the preceding statement
2646 wasn't a label of the same type. This prevents the creation of
2647 consecutive blocks that have nothing but a single label. */
2648 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2650 /* Nonlocal and computed GOTO targets always start a new block. */
2651 if (DECL_NONLOCAL (gimple_label_label (label_stmt
))
2652 || FORCED_LABEL (gimple_label_label (label_stmt
)))
2655 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2657 if (DECL_NONLOCAL (gimple_label_label (
2658 as_a
<glabel
*> (prev_stmt
))))
2661 cfg_stats
.num_merged_labels
++;
2667 else if (gimple_code (stmt
) == GIMPLE_CALL
)
2669 if (gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2670 /* setjmp acts similar to a nonlocal GOTO target and thus should
2671 start a new block. */
2673 if (gimple_call_internal_p (stmt
, IFN_PHI
)
2675 && gimple_code (prev_stmt
) != GIMPLE_LABEL
2676 && (gimple_code (prev_stmt
) != GIMPLE_CALL
2677 || ! gimple_call_internal_p (prev_stmt
, IFN_PHI
)))
2678 /* PHI nodes start a new block unless preceeded by a label
2687 /* Return true if T should end a basic block. */
2690 stmt_ends_bb_p (gimple
*t
)
2692 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2695 /* Remove block annotations and other data structures. */
2698 delete_tree_cfg_annotations (struct function
*fn
)
2700 vec_free (label_to_block_map_for_fn (fn
));
2703 /* Return the virtual phi in BB. */
2706 get_virtual_phi (basic_block bb
)
2708 for (gphi_iterator gsi
= gsi_start_phis (bb
);
2712 gphi
*phi
= gsi
.phi ();
2714 if (virtual_operand_p (PHI_RESULT (phi
)))
2721 /* Return the first statement in basic block BB. */
2724 first_stmt (basic_block bb
)
2726 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2727 gimple
*stmt
= NULL
;
2729 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2737 /* Return the first non-label statement in basic block BB. */
2740 first_non_label_stmt (basic_block bb
)
2742 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2743 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2745 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2748 /* Return the last statement in basic block BB. */
2751 last_stmt (basic_block bb
)
2753 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2754 gimple
*stmt
= NULL
;
2756 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2764 /* Return the last statement of an otherwise empty block. Return NULL
2765 if the block is totally empty, or if it contains more than one
2769 last_and_only_stmt (basic_block bb
)
2771 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2772 gimple
*last
, *prev
;
2777 last
= gsi_stmt (i
);
2778 gsi_prev_nondebug (&i
);
2782 /* Empty statements should no longer appear in the instruction stream.
2783 Everything that might have appeared before should be deleted by
2784 remove_useless_stmts, and the optimizers should just gsi_remove
2785 instead of smashing with build_empty_stmt.
2787 Thus the only thing that should appear here in a block containing
2788 one executable statement is a label. */
2789 prev
= gsi_stmt (i
);
2790 if (gimple_code (prev
) == GIMPLE_LABEL
)
2796 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2799 reinstall_phi_args (edge new_edge
, edge old_edge
)
2805 vec
<edge_var_map
> *v
= redirect_edge_var_map_vector (old_edge
);
2809 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2810 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2811 i
++, gsi_next (&phis
))
2813 gphi
*phi
= phis
.phi ();
2814 tree result
= redirect_edge_var_map_result (vm
);
2815 tree arg
= redirect_edge_var_map_def (vm
);
2817 gcc_assert (result
== gimple_phi_result (phi
));
2819 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2822 redirect_edge_var_map_clear (old_edge
);
2825 /* Returns the basic block after which the new basic block created
2826 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2827 near its "logical" location. This is of most help to humans looking
2828 at debugging dumps. */
2831 split_edge_bb_loc (edge edge_in
)
2833 basic_block dest
= edge_in
->dest
;
2834 basic_block dest_prev
= dest
->prev_bb
;
2838 edge e
= find_edge (dest_prev
, dest
);
2839 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2840 return edge_in
->src
;
2845 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2846 Abort on abnormal edges. */
2849 gimple_split_edge (edge edge_in
)
2851 basic_block new_bb
, after_bb
, dest
;
2854 /* Abnormal edges cannot be split. */
2855 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2857 dest
= edge_in
->dest
;
2859 after_bb
= split_edge_bb_loc (edge_in
);
2861 new_bb
= create_empty_bb (after_bb
);
2862 new_bb
->count
= edge_in
->count ();
2864 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2865 gcc_assert (e
== edge_in
);
2867 new_edge
= make_single_succ_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2868 reinstall_phi_args (new_edge
, e
);
2874 /* Verify properties of the address expression T with base object BASE. */
2877 verify_address (tree t
, tree base
)
2880 bool old_side_effects
;
2882 bool new_side_effects
;
2884 old_constant
= TREE_CONSTANT (t
);
2885 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2887 recompute_tree_invariant_for_addr_expr (t
);
2888 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2889 new_constant
= TREE_CONSTANT (t
);
2891 if (old_constant
!= new_constant
)
2893 error ("constant not recomputed when ADDR_EXPR changed");
2896 if (old_side_effects
!= new_side_effects
)
2898 error ("side effects not recomputed when ADDR_EXPR changed");
2903 || TREE_CODE (base
) == PARM_DECL
2904 || TREE_CODE (base
) == RESULT_DECL
))
2907 if (DECL_GIMPLE_REG_P (base
))
2909 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2916 /* Callback for walk_tree, check that all elements with address taken are
2917 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2918 inside a PHI node. */
2921 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2928 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2929 #define CHECK_OP(N, MSG) \
2930 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2931 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2933 switch (TREE_CODE (t
))
2936 if (SSA_NAME_IN_FREE_LIST (t
))
2938 error ("SSA name in freelist but still referenced");
2947 tree context
= decl_function_context (t
);
2948 if (context
!= cfun
->decl
2949 && !SCOPE_FILE_SCOPE_P (context
)
2951 && !DECL_EXTERNAL (t
))
2953 error ("Local declaration from a different function");
2960 error ("INDIRECT_REF in gimple IL");
2964 x
= TREE_OPERAND (t
, 0);
2965 if (!POINTER_TYPE_P (TREE_TYPE (x
))
2966 || !is_gimple_mem_ref_addr (x
))
2968 error ("invalid first operand of MEM_REF");
2971 if (TREE_CODE (TREE_OPERAND (t
, 1)) != INTEGER_CST
2972 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
2974 error ("invalid offset operand of MEM_REF");
2975 return TREE_OPERAND (t
, 1);
2977 if (TREE_CODE (x
) == ADDR_EXPR
)
2979 tree va
= verify_address (x
, TREE_OPERAND (x
, 0));
2982 x
= TREE_OPERAND (x
, 0);
2984 walk_tree (&x
, verify_expr
, data
, NULL
);
2989 x
= fold (ASSERT_EXPR_COND (t
));
2990 if (x
== boolean_false_node
)
2992 error ("ASSERT_EXPR with an always-false condition");
2998 error ("MODIFY_EXPR not expected while having tuples");
3005 gcc_assert (is_gimple_address (t
));
3007 /* Skip any references (they will be checked when we recurse down the
3008 tree) and ensure that any variable used as a prefix is marked
3010 for (x
= TREE_OPERAND (t
, 0);
3011 handled_component_p (x
);
3012 x
= TREE_OPERAND (x
, 0))
3015 if ((tem
= verify_address (t
, x
)))
3019 || TREE_CODE (x
) == PARM_DECL
3020 || TREE_CODE (x
) == RESULT_DECL
))
3023 if (!TREE_ADDRESSABLE (x
))
3025 error ("address taken, but ADDRESSABLE bit not set");
3033 x
= COND_EXPR_COND (t
);
3034 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
3036 error ("non-integral used in condition");
3039 if (!is_gimple_condexpr (x
))
3041 error ("invalid conditional operand");
3046 case NON_LVALUE_EXPR
:
3047 case TRUTH_NOT_EXPR
:
3051 case FIX_TRUNC_EXPR
:
3056 CHECK_OP (0, "invalid operand to unary operator");
3062 if (!is_gimple_reg_type (TREE_TYPE (t
)))
3064 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3068 if (TREE_CODE (t
) == BIT_FIELD_REF
)
3070 tree t0
= TREE_OPERAND (t
, 0);
3071 tree t1
= TREE_OPERAND (t
, 1);
3072 tree t2
= TREE_OPERAND (t
, 2);
3073 if (!tree_fits_uhwi_p (t1
)
3074 || !tree_fits_uhwi_p (t2
)
3075 || !types_compatible_p (bitsizetype
, TREE_TYPE (t1
))
3076 || !types_compatible_p (bitsizetype
, TREE_TYPE (t2
)))
3078 error ("invalid position or size operand to BIT_FIELD_REF");
3081 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
3082 && (TYPE_PRECISION (TREE_TYPE (t
))
3083 != tree_to_uhwi (t1
)))
3085 error ("integral result type precision does not match "
3086 "field size of BIT_FIELD_REF");
3089 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
3090 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
3091 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t
)))
3092 != tree_to_uhwi (t1
)))
3094 error ("mode size of non-integral result does not "
3095 "match field size of BIT_FIELD_REF");
3098 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0
))
3099 && (tree_to_uhwi (t1
) + tree_to_uhwi (t2
)
3100 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0
)))))
3102 error ("position plus size exceeds size of referenced object in "
3107 t
= TREE_OPERAND (t
, 0);
3112 case ARRAY_RANGE_REF
:
3113 case VIEW_CONVERT_EXPR
:
3114 /* We have a nest of references. Verify that each of the operands
3115 that determine where to reference is either a constant or a variable,
3116 verify that the base is valid, and then show we've already checked
3118 while (handled_component_p (t
))
3120 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
3121 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3122 else if (TREE_CODE (t
) == ARRAY_REF
3123 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
3125 CHECK_OP (1, "invalid array index");
3126 if (TREE_OPERAND (t
, 2))
3127 CHECK_OP (2, "invalid array lower bound");
3128 if (TREE_OPERAND (t
, 3))
3129 CHECK_OP (3, "invalid array stride");
3131 else if (TREE_CODE (t
) == BIT_FIELD_REF
3132 || TREE_CODE (t
) == REALPART_EXPR
3133 || TREE_CODE (t
) == IMAGPART_EXPR
)
3135 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3140 t
= TREE_OPERAND (t
, 0);
3143 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
3145 error ("invalid reference prefix");
3148 walk_tree (&t
, verify_expr
, data
, NULL
);
3153 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3154 POINTER_PLUS_EXPR. */
3155 if (POINTER_TYPE_P (TREE_TYPE (t
)))
3157 error ("invalid operand to plus/minus, type is a pointer");
3160 CHECK_OP (0, "invalid operand to binary operator");
3161 CHECK_OP (1, "invalid operand to binary operator");
3164 case POINTER_DIFF_EXPR
:
3165 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0)))
3166 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
3168 error ("invalid operand to pointer diff, operand is not a pointer");
3171 if (TREE_CODE (TREE_TYPE (t
)) != INTEGER_TYPE
3172 || TYPE_UNSIGNED (TREE_TYPE (t
))
3173 || (TYPE_PRECISION (TREE_TYPE (t
))
3174 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t
, 0)))))
3176 error ("invalid type for pointer diff");
3179 CHECK_OP (0, "invalid operand to pointer diff");
3180 CHECK_OP (1, "invalid operand to pointer diff");
3183 case POINTER_PLUS_EXPR
:
3184 /* Check to make sure the first operand is a pointer or reference type. */
3185 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
3187 error ("invalid operand to pointer plus, first operand is not a pointer");
3190 /* Check to make sure the second operand is a ptrofftype. */
3191 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
3193 error ("invalid operand to pointer plus, second operand is not an "
3194 "integer type of appropriate width");
3204 case UNORDERED_EXPR
:
3213 case TRUNC_DIV_EXPR
:
3215 case FLOOR_DIV_EXPR
:
3216 case ROUND_DIV_EXPR
:
3217 case TRUNC_MOD_EXPR
:
3219 case FLOOR_MOD_EXPR
:
3220 case ROUND_MOD_EXPR
:
3222 case EXACT_DIV_EXPR
:
3232 CHECK_OP (0, "invalid operand to binary operator");
3233 CHECK_OP (1, "invalid operand to binary operator");
3237 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
3241 case CASE_LABEL_EXPR
:
3244 error ("invalid CASE_CHAIN");
3258 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3259 Returns true if there is an error, otherwise false. */
3262 verify_types_in_gimple_min_lval (tree expr
)
3266 if (is_gimple_id (expr
))
3269 if (TREE_CODE (expr
) != TARGET_MEM_REF
3270 && TREE_CODE (expr
) != MEM_REF
)
3272 error ("invalid expression for min lvalue");
3276 /* TARGET_MEM_REFs are strange beasts. */
3277 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3280 op
= TREE_OPERAND (expr
, 0);
3281 if (!is_gimple_val (op
))
3283 error ("invalid operand in indirect reference");
3284 debug_generic_stmt (op
);
3287 /* Memory references now generally can involve a value conversion. */
3292 /* Verify if EXPR is a valid GIMPLE reference expression. If
3293 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3294 if there is an error, otherwise false. */
3297 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
3299 while (handled_component_p (expr
))
3301 tree op
= TREE_OPERAND (expr
, 0);
3303 if (TREE_CODE (expr
) == ARRAY_REF
3304 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
3306 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
3307 || (TREE_OPERAND (expr
, 2)
3308 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3309 || (TREE_OPERAND (expr
, 3)
3310 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
3312 error ("invalid operands to array reference");
3313 debug_generic_stmt (expr
);
3318 /* Verify if the reference array element types are compatible. */
3319 if (TREE_CODE (expr
) == ARRAY_REF
3320 && !useless_type_conversion_p (TREE_TYPE (expr
),
3321 TREE_TYPE (TREE_TYPE (op
))))
3323 error ("type mismatch in array reference");
3324 debug_generic_stmt (TREE_TYPE (expr
));
3325 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3328 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
3329 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
3330 TREE_TYPE (TREE_TYPE (op
))))
3332 error ("type mismatch in array range reference");
3333 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
3334 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3338 if ((TREE_CODE (expr
) == REALPART_EXPR
3339 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3340 && !useless_type_conversion_p (TREE_TYPE (expr
),
3341 TREE_TYPE (TREE_TYPE (op
))))
3343 error ("type mismatch in real/imagpart reference");
3344 debug_generic_stmt (TREE_TYPE (expr
));
3345 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3349 if (TREE_CODE (expr
) == COMPONENT_REF
3350 && !useless_type_conversion_p (TREE_TYPE (expr
),
3351 TREE_TYPE (TREE_OPERAND (expr
, 1))))
3353 error ("type mismatch in component reference");
3354 debug_generic_stmt (TREE_TYPE (expr
));
3355 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
3359 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3361 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3362 that their operand is not an SSA name or an invariant when
3363 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3364 bug). Otherwise there is nothing to verify, gross mismatches at
3365 most invoke undefined behavior. */
3367 && (TREE_CODE (op
) == SSA_NAME
3368 || is_gimple_min_invariant (op
)))
3370 error ("conversion of an SSA_NAME on the left hand side");
3371 debug_generic_stmt (expr
);
3374 else if (TREE_CODE (op
) == SSA_NAME
3375 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3377 error ("conversion of register to a different size");
3378 debug_generic_stmt (expr
);
3381 else if (!handled_component_p (op
))
3388 if (TREE_CODE (expr
) == MEM_REF
)
3390 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
3392 error ("invalid address operand in MEM_REF");
3393 debug_generic_stmt (expr
);
3396 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
3397 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3399 error ("invalid offset operand in MEM_REF");
3400 debug_generic_stmt (expr
);
3404 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3406 if (!TMR_BASE (expr
)
3407 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3409 error ("invalid address operand in TARGET_MEM_REF");
3412 if (!TMR_OFFSET (expr
)
3413 || TREE_CODE (TMR_OFFSET (expr
)) != INTEGER_CST
3414 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3416 error ("invalid offset operand in TARGET_MEM_REF");
3417 debug_generic_stmt (expr
);
3422 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3423 && verify_types_in_gimple_min_lval (expr
));
3426 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3427 list of pointer-to types that is trivially convertible to DEST. */
3430 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3434 if (!TYPE_POINTER_TO (src_obj
))
3437 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3438 if (useless_type_conversion_p (dest
, src
))
3444 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3445 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3448 valid_fixed_convert_types_p (tree type1
, tree type2
)
3450 return (FIXED_POINT_TYPE_P (type1
)
3451 && (INTEGRAL_TYPE_P (type2
)
3452 || SCALAR_FLOAT_TYPE_P (type2
)
3453 || FIXED_POINT_TYPE_P (type2
)));
3456 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3457 is a problem, otherwise false. */
3460 verify_gimple_call (gcall
*stmt
)
3462 tree fn
= gimple_call_fn (stmt
);
3463 tree fntype
, fndecl
;
3466 if (gimple_call_internal_p (stmt
))
3470 error ("gimple call has two targets");
3471 debug_generic_stmt (fn
);
3474 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3475 else if (gimple_call_internal_fn (stmt
) == IFN_PHI
)
3484 error ("gimple call has no target");
3489 if (fn
&& !is_gimple_call_addr (fn
))
3491 error ("invalid function in gimple call");
3492 debug_generic_stmt (fn
);
3497 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3498 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3499 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3501 error ("non-function in gimple call");
3505 fndecl
= gimple_call_fndecl (stmt
);
3507 && TREE_CODE (fndecl
) == FUNCTION_DECL
3508 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3509 && !DECL_PURE_P (fndecl
)
3510 && !TREE_READONLY (fndecl
))
3512 error ("invalid pure const state for function");
3516 tree lhs
= gimple_call_lhs (stmt
);
3518 && (!is_gimple_lvalue (lhs
)
3519 || verify_types_in_gimple_reference (lhs
, true)))
3521 error ("invalid LHS in gimple call");
3525 if (gimple_call_ctrl_altering_p (stmt
)
3526 && gimple_call_noreturn_p (stmt
)
3527 && should_remove_lhs_p (lhs
))
3529 error ("LHS in noreturn call");
3533 fntype
= gimple_call_fntype (stmt
);
3536 && !useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (fntype
))
3537 /* ??? At least C++ misses conversions at assignments from
3538 void * call results.
3539 For now simply allow arbitrary pointer type conversions. */
3540 && !(POINTER_TYPE_P (TREE_TYPE (lhs
))
3541 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3543 error ("invalid conversion in gimple call");
3544 debug_generic_stmt (TREE_TYPE (lhs
));
3545 debug_generic_stmt (TREE_TYPE (fntype
));
3549 if (gimple_call_chain (stmt
)
3550 && !is_gimple_val (gimple_call_chain (stmt
)))
3552 error ("invalid static chain in gimple call");
3553 debug_generic_stmt (gimple_call_chain (stmt
));
3557 /* If there is a static chain argument, the call should either be
3558 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3559 if (gimple_call_chain (stmt
)
3561 && !DECL_STATIC_CHAIN (fndecl
))
3563 error ("static chain with function that doesn%'t use one");
3567 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3569 switch (DECL_FUNCTION_CODE (fndecl
))
3571 case BUILT_IN_UNREACHABLE
:
3573 if (gimple_call_num_args (stmt
) > 0)
3575 /* Built-in unreachable with parameters might not be caught by
3576 undefined behavior sanitizer. Front-ends do check users do not
3577 call them that way but we also produce calls to
3578 __builtin_unreachable internally, for example when IPA figures
3579 out a call cannot happen in a legal program. In such cases,
3580 we must make sure arguments are stripped off. */
3581 error ("__builtin_unreachable or __builtin_trap call with "
3591 /* ??? The C frontend passes unpromoted arguments in case it
3592 didn't see a function declaration before the call. So for now
3593 leave the call arguments mostly unverified. Once we gimplify
3594 unit-at-a-time we have a chance to fix this. */
3596 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3598 tree arg
= gimple_call_arg (stmt
, i
);
3599 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3600 && !is_gimple_val (arg
))
3601 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3602 && !is_gimple_lvalue (arg
)))
3604 error ("invalid argument to gimple call");
3605 debug_generic_expr (arg
);
3613 /* Verifies the gimple comparison with the result type TYPE and
3614 the operands OP0 and OP1, comparison code is CODE. */
3617 verify_gimple_comparison (tree type
, tree op0
, tree op1
, enum tree_code code
)
3619 tree op0_type
= TREE_TYPE (op0
);
3620 tree op1_type
= TREE_TYPE (op1
);
3622 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3624 error ("invalid operands in gimple comparison");
3628 /* For comparisons we do not have the operations type as the
3629 effective type the comparison is carried out in. Instead
3630 we require that either the first operand is trivially
3631 convertible into the second, or the other way around.
3632 Because we special-case pointers to void we allow
3633 comparisons of pointers with the same mode as well. */
3634 if (!useless_type_conversion_p (op0_type
, op1_type
)
3635 && !useless_type_conversion_p (op1_type
, op0_type
)
3636 && (!POINTER_TYPE_P (op0_type
)
3637 || !POINTER_TYPE_P (op1_type
)
3638 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3640 error ("mismatching comparison operand types");
3641 debug_generic_expr (op0_type
);
3642 debug_generic_expr (op1_type
);
3646 /* The resulting type of a comparison may be an effective boolean type. */
3647 if (INTEGRAL_TYPE_P (type
)
3648 && (TREE_CODE (type
) == BOOLEAN_TYPE
3649 || TYPE_PRECISION (type
) == 1))
3651 if ((TREE_CODE (op0_type
) == VECTOR_TYPE
3652 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3653 && code
!= EQ_EXPR
&& code
!= NE_EXPR
3654 && !VECTOR_BOOLEAN_TYPE_P (op0_type
)
3655 && !VECTOR_INTEGER_TYPE_P (op0_type
))
3657 error ("unsupported operation or type for vector comparison"
3658 " returning a boolean");
3659 debug_generic_expr (op0_type
);
3660 debug_generic_expr (op1_type
);
3664 /* Or a boolean vector type with the same element count
3665 as the comparison operand types. */
3666 else if (TREE_CODE (type
) == VECTOR_TYPE
3667 && TREE_CODE (TREE_TYPE (type
)) == BOOLEAN_TYPE
)
3669 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3670 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3672 error ("non-vector operands in vector comparison");
3673 debug_generic_expr (op0_type
);
3674 debug_generic_expr (op1_type
);
3678 if (TYPE_VECTOR_SUBPARTS (type
) != TYPE_VECTOR_SUBPARTS (op0_type
))
3680 error ("invalid vector comparison resulting type");
3681 debug_generic_expr (type
);
3687 error ("bogus comparison result type");
3688 debug_generic_expr (type
);
3695 /* Verify a gimple assignment statement STMT with an unary rhs.
3696 Returns true if anything is wrong. */
3699 verify_gimple_assign_unary (gassign
*stmt
)
3701 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3702 tree lhs
= gimple_assign_lhs (stmt
);
3703 tree lhs_type
= TREE_TYPE (lhs
);
3704 tree rhs1
= gimple_assign_rhs1 (stmt
);
3705 tree rhs1_type
= TREE_TYPE (rhs1
);
3707 if (!is_gimple_reg (lhs
))
3709 error ("non-register as LHS of unary operation");
3713 if (!is_gimple_val (rhs1
))
3715 error ("invalid operand in unary operation");
3719 /* First handle conversions. */
3724 /* Allow conversions from pointer type to integral type only if
3725 there is no sign or zero extension involved.
3726 For targets were the precision of ptrofftype doesn't match that
3727 of pointers we need to allow arbitrary conversions to ptrofftype. */
3728 if ((POINTER_TYPE_P (lhs_type
)
3729 && INTEGRAL_TYPE_P (rhs1_type
))
3730 || (POINTER_TYPE_P (rhs1_type
)
3731 && INTEGRAL_TYPE_P (lhs_type
)
3732 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3733 || ptrofftype_p (sizetype
))))
3736 /* Allow conversion from integral to offset type and vice versa. */
3737 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3738 && INTEGRAL_TYPE_P (rhs1_type
))
3739 || (INTEGRAL_TYPE_P (lhs_type
)
3740 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3743 /* Otherwise assert we are converting between types of the
3745 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3747 error ("invalid types in nop conversion");
3748 debug_generic_expr (lhs_type
);
3749 debug_generic_expr (rhs1_type
);
3756 case ADDR_SPACE_CONVERT_EXPR
:
3758 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3759 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3760 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3762 error ("invalid types in address space conversion");
3763 debug_generic_expr (lhs_type
);
3764 debug_generic_expr (rhs1_type
);
3771 case FIXED_CONVERT_EXPR
:
3773 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3774 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3776 error ("invalid types in fixed-point conversion");
3777 debug_generic_expr (lhs_type
);
3778 debug_generic_expr (rhs1_type
);
3787 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3788 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3789 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3791 error ("invalid types in conversion to floating point");
3792 debug_generic_expr (lhs_type
);
3793 debug_generic_expr (rhs1_type
);
3800 case FIX_TRUNC_EXPR
:
3802 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3803 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3804 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3806 error ("invalid types in conversion to integer");
3807 debug_generic_expr (lhs_type
);
3808 debug_generic_expr (rhs1_type
);
3815 case VEC_UNPACK_HI_EXPR
:
3816 case VEC_UNPACK_LO_EXPR
:
3817 case VEC_UNPACK_FLOAT_HI_EXPR
:
3818 case VEC_UNPACK_FLOAT_LO_EXPR
:
3833 /* For the remaining codes assert there is no conversion involved. */
3834 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3836 error ("non-trivial conversion in unary operation");
3837 debug_generic_expr (lhs_type
);
3838 debug_generic_expr (rhs1_type
);
3845 /* Verify a gimple assignment statement STMT with a binary rhs.
3846 Returns true if anything is wrong. */
3849 verify_gimple_assign_binary (gassign
*stmt
)
3851 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3852 tree lhs
= gimple_assign_lhs (stmt
);
3853 tree lhs_type
= TREE_TYPE (lhs
);
3854 tree rhs1
= gimple_assign_rhs1 (stmt
);
3855 tree rhs1_type
= TREE_TYPE (rhs1
);
3856 tree rhs2
= gimple_assign_rhs2 (stmt
);
3857 tree rhs2_type
= TREE_TYPE (rhs2
);
3859 if (!is_gimple_reg (lhs
))
3861 error ("non-register as LHS of binary operation");
3865 if (!is_gimple_val (rhs1
)
3866 || !is_gimple_val (rhs2
))
3868 error ("invalid operands in binary operation");
3872 /* First handle operations that involve different types. */
3877 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3878 || !(INTEGRAL_TYPE_P (rhs1_type
)
3879 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3880 || !(INTEGRAL_TYPE_P (rhs2_type
)
3881 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3883 error ("type mismatch in complex expression");
3884 debug_generic_expr (lhs_type
);
3885 debug_generic_expr (rhs1_type
);
3886 debug_generic_expr (rhs2_type
);
3898 /* Shifts and rotates are ok on integral types, fixed point
3899 types and integer vector types. */
3900 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3901 && !FIXED_POINT_TYPE_P (rhs1_type
)
3902 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3903 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3904 || (!INTEGRAL_TYPE_P (rhs2_type
)
3905 /* Vector shifts of vectors are also ok. */
3906 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3907 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3908 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3909 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3910 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3912 error ("type mismatch in shift expression");
3913 debug_generic_expr (lhs_type
);
3914 debug_generic_expr (rhs1_type
);
3915 debug_generic_expr (rhs2_type
);
3922 case WIDEN_LSHIFT_EXPR
:
3924 if (!INTEGRAL_TYPE_P (lhs_type
)
3925 || !INTEGRAL_TYPE_P (rhs1_type
)
3926 || TREE_CODE (rhs2
) != INTEGER_CST
3927 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3929 error ("type mismatch in widening vector shift expression");
3930 debug_generic_expr (lhs_type
);
3931 debug_generic_expr (rhs1_type
);
3932 debug_generic_expr (rhs2_type
);
3939 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3940 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3942 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3943 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3944 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3945 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3946 || TREE_CODE (rhs2
) != INTEGER_CST
3947 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3948 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3950 error ("type mismatch in widening vector shift expression");
3951 debug_generic_expr (lhs_type
);
3952 debug_generic_expr (rhs1_type
);
3953 debug_generic_expr (rhs2_type
);
3963 tree lhs_etype
= lhs_type
;
3964 tree rhs1_etype
= rhs1_type
;
3965 tree rhs2_etype
= rhs2_type
;
3966 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3968 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3969 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3971 error ("invalid non-vector operands to vector valued plus");
3974 lhs_etype
= TREE_TYPE (lhs_type
);
3975 rhs1_etype
= TREE_TYPE (rhs1_type
);
3976 rhs2_etype
= TREE_TYPE (rhs2_type
);
3978 if (POINTER_TYPE_P (lhs_etype
)
3979 || POINTER_TYPE_P (rhs1_etype
)
3980 || POINTER_TYPE_P (rhs2_etype
))
3982 error ("invalid (pointer) operands to plus/minus");
3986 /* Continue with generic binary expression handling. */
3990 case POINTER_PLUS_EXPR
:
3992 if (!POINTER_TYPE_P (rhs1_type
)
3993 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3994 || !ptrofftype_p (rhs2_type
))
3996 error ("type mismatch in pointer plus expression");
3997 debug_generic_stmt (lhs_type
);
3998 debug_generic_stmt (rhs1_type
);
3999 debug_generic_stmt (rhs2_type
);
4006 case POINTER_DIFF_EXPR
:
4008 if (!POINTER_TYPE_P (rhs1_type
)
4009 || !POINTER_TYPE_P (rhs2_type
)
4010 || !types_compatible_p (rhs1_type
, rhs2_type
)
4011 || TREE_CODE (lhs_type
) != INTEGER_TYPE
4012 || TYPE_UNSIGNED (lhs_type
)
4013 || TYPE_PRECISION (lhs_type
) != TYPE_PRECISION (rhs1_type
))
4015 error ("type mismatch in pointer diff expression");
4016 debug_generic_stmt (lhs_type
);
4017 debug_generic_stmt (rhs1_type
);
4018 debug_generic_stmt (rhs2_type
);
4025 case TRUTH_ANDIF_EXPR
:
4026 case TRUTH_ORIF_EXPR
:
4027 case TRUTH_AND_EXPR
:
4029 case TRUTH_XOR_EXPR
:
4039 case UNORDERED_EXPR
:
4047 /* Comparisons are also binary, but the result type is not
4048 connected to the operand types. */
4049 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
, rhs_code
);
4051 case WIDEN_MULT_EXPR
:
4052 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
4054 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
4055 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
4057 case WIDEN_SUM_EXPR
:
4059 if (((TREE_CODE (rhs1_type
) != VECTOR_TYPE
4060 || TREE_CODE (lhs_type
) != VECTOR_TYPE
)
4061 && ((!INTEGRAL_TYPE_P (rhs1_type
)
4062 && !SCALAR_FLOAT_TYPE_P (rhs1_type
))
4063 || (!INTEGRAL_TYPE_P (lhs_type
)
4064 && !SCALAR_FLOAT_TYPE_P (lhs_type
))))
4065 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
4066 || (GET_MODE_SIZE (element_mode (rhs2_type
))
4067 < 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4069 error ("type mismatch in widening sum reduction");
4070 debug_generic_expr (lhs_type
);
4071 debug_generic_expr (rhs1_type
);
4072 debug_generic_expr (rhs2_type
);
4078 case VEC_WIDEN_MULT_HI_EXPR
:
4079 case VEC_WIDEN_MULT_LO_EXPR
:
4080 case VEC_WIDEN_MULT_EVEN_EXPR
:
4081 case VEC_WIDEN_MULT_ODD_EXPR
:
4083 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4084 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4085 || !types_compatible_p (rhs1_type
, rhs2_type
)
4086 || (GET_MODE_SIZE (element_mode (lhs_type
))
4087 != 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4089 error ("type mismatch in vector widening multiplication");
4090 debug_generic_expr (lhs_type
);
4091 debug_generic_expr (rhs1_type
);
4092 debug_generic_expr (rhs2_type
);
4098 case VEC_PACK_TRUNC_EXPR
:
4099 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4100 vector boolean types. */
4101 if (VECTOR_BOOLEAN_TYPE_P (lhs_type
)
4102 && VECTOR_BOOLEAN_TYPE_P (rhs1_type
)
4103 && types_compatible_p (rhs1_type
, rhs2_type
)
4104 && (TYPE_VECTOR_SUBPARTS (lhs_type
)
4105 == 2 * TYPE_VECTOR_SUBPARTS (rhs1_type
)))
4109 case VEC_PACK_SAT_EXPR
:
4110 case VEC_PACK_FIX_TRUNC_EXPR
:
4112 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4113 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4114 || !((rhs_code
== VEC_PACK_FIX_TRUNC_EXPR
4115 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
))
4116 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
)))
4117 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
4118 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))))
4119 || !types_compatible_p (rhs1_type
, rhs2_type
)
4120 || (GET_MODE_SIZE (element_mode (rhs1_type
))
4121 != 2 * GET_MODE_SIZE (element_mode (lhs_type
))))
4123 error ("type mismatch in vector pack expression");
4124 debug_generic_expr (lhs_type
);
4125 debug_generic_expr (rhs1_type
);
4126 debug_generic_expr (rhs2_type
);
4134 case MULT_HIGHPART_EXPR
:
4135 case TRUNC_DIV_EXPR
:
4137 case FLOOR_DIV_EXPR
:
4138 case ROUND_DIV_EXPR
:
4139 case TRUNC_MOD_EXPR
:
4141 case FLOOR_MOD_EXPR
:
4142 case ROUND_MOD_EXPR
:
4144 case EXACT_DIV_EXPR
:
4150 /* Continue with generic binary expression handling. */
4157 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4158 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4160 error ("type mismatch in binary expression");
4161 debug_generic_stmt (lhs_type
);
4162 debug_generic_stmt (rhs1_type
);
4163 debug_generic_stmt (rhs2_type
);
4170 /* Verify a gimple assignment statement STMT with a ternary rhs.
4171 Returns true if anything is wrong. */
4174 verify_gimple_assign_ternary (gassign
*stmt
)
4176 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4177 tree lhs
= gimple_assign_lhs (stmt
);
4178 tree lhs_type
= TREE_TYPE (lhs
);
4179 tree rhs1
= gimple_assign_rhs1 (stmt
);
4180 tree rhs1_type
= TREE_TYPE (rhs1
);
4181 tree rhs2
= gimple_assign_rhs2 (stmt
);
4182 tree rhs2_type
= TREE_TYPE (rhs2
);
4183 tree rhs3
= gimple_assign_rhs3 (stmt
);
4184 tree rhs3_type
= TREE_TYPE (rhs3
);
4186 if (!is_gimple_reg (lhs
))
4188 error ("non-register as LHS of ternary operation");
4192 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
4193 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
4194 || !is_gimple_val (rhs2
)
4195 || !is_gimple_val (rhs3
))
4197 error ("invalid operands in ternary operation");
4201 /* First handle operations that involve different types. */
4204 case WIDEN_MULT_PLUS_EXPR
:
4205 case WIDEN_MULT_MINUS_EXPR
:
4206 if ((!INTEGRAL_TYPE_P (rhs1_type
)
4207 && !FIXED_POINT_TYPE_P (rhs1_type
))
4208 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
4209 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4210 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
4211 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
4213 error ("type mismatch in widening multiply-accumulate expression");
4214 debug_generic_expr (lhs_type
);
4215 debug_generic_expr (rhs1_type
);
4216 debug_generic_expr (rhs2_type
);
4217 debug_generic_expr (rhs3_type
);
4223 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4224 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
4225 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4227 error ("type mismatch in fused multiply-add expression");
4228 debug_generic_expr (lhs_type
);
4229 debug_generic_expr (rhs1_type
);
4230 debug_generic_expr (rhs2_type
);
4231 debug_generic_expr (rhs3_type
);
4237 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type
)
4238 || TYPE_VECTOR_SUBPARTS (rhs1_type
)
4239 != TYPE_VECTOR_SUBPARTS (lhs_type
))
4241 error ("the first argument of a VEC_COND_EXPR must be of a "
4242 "boolean vector type of the same number of elements "
4244 debug_generic_expr (lhs_type
);
4245 debug_generic_expr (rhs1_type
);
4250 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
4251 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4253 error ("type mismatch in conditional expression");
4254 debug_generic_expr (lhs_type
);
4255 debug_generic_expr (rhs2_type
);
4256 debug_generic_expr (rhs3_type
);
4262 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4263 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4265 error ("type mismatch in vector permute expression");
4266 debug_generic_expr (lhs_type
);
4267 debug_generic_expr (rhs1_type
);
4268 debug_generic_expr (rhs2_type
);
4269 debug_generic_expr (rhs3_type
);
4273 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4274 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4275 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4277 error ("vector types expected in vector permute expression");
4278 debug_generic_expr (lhs_type
);
4279 debug_generic_expr (rhs1_type
);
4280 debug_generic_expr (rhs2_type
);
4281 debug_generic_expr (rhs3_type
);
4285 if (TYPE_VECTOR_SUBPARTS (rhs1_type
) != TYPE_VECTOR_SUBPARTS (rhs2_type
)
4286 || TYPE_VECTOR_SUBPARTS (rhs2_type
)
4287 != TYPE_VECTOR_SUBPARTS (rhs3_type
)
4288 || TYPE_VECTOR_SUBPARTS (rhs3_type
)
4289 != TYPE_VECTOR_SUBPARTS (lhs_type
))
4291 error ("vectors with different element number found "
4292 "in vector permute expression");
4293 debug_generic_expr (lhs_type
);
4294 debug_generic_expr (rhs1_type
);
4295 debug_generic_expr (rhs2_type
);
4296 debug_generic_expr (rhs3_type
);
4300 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
4301 || GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (rhs3_type
)))
4302 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (rhs1_type
))))
4304 error ("invalid mask type in vector permute expression");
4305 debug_generic_expr (lhs_type
);
4306 debug_generic_expr (rhs1_type
);
4307 debug_generic_expr (rhs2_type
);
4308 debug_generic_expr (rhs3_type
);
4315 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
)
4316 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4317 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
)))
4318 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type
))))
4320 error ("type mismatch in sad expression");
4321 debug_generic_expr (lhs_type
);
4322 debug_generic_expr (rhs1_type
);
4323 debug_generic_expr (rhs2_type
);
4324 debug_generic_expr (rhs3_type
);
4328 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4329 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4330 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4332 error ("vector types expected in sad expression");
4333 debug_generic_expr (lhs_type
);
4334 debug_generic_expr (rhs1_type
);
4335 debug_generic_expr (rhs2_type
);
4336 debug_generic_expr (rhs3_type
);
4342 case BIT_INSERT_EXPR
:
4343 if (! useless_type_conversion_p (lhs_type
, rhs1_type
))
4345 error ("type mismatch in BIT_INSERT_EXPR");
4346 debug_generic_expr (lhs_type
);
4347 debug_generic_expr (rhs1_type
);
4350 if (! ((INTEGRAL_TYPE_P (rhs1_type
)
4351 && INTEGRAL_TYPE_P (rhs2_type
))
4352 || (VECTOR_TYPE_P (rhs1_type
)
4353 && types_compatible_p (TREE_TYPE (rhs1_type
), rhs2_type
))))
4355 error ("not allowed type combination in BIT_INSERT_EXPR");
4356 debug_generic_expr (rhs1_type
);
4357 debug_generic_expr (rhs2_type
);
4360 if (! tree_fits_uhwi_p (rhs3
)
4361 || ! types_compatible_p (bitsizetype
, TREE_TYPE (rhs3
))
4362 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type
)))
4364 error ("invalid position or size in BIT_INSERT_EXPR");
4367 if (INTEGRAL_TYPE_P (rhs1_type
))
4369 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4370 if (bitpos
>= TYPE_PRECISION (rhs1_type
)
4371 || (bitpos
+ TYPE_PRECISION (rhs2_type
)
4372 > TYPE_PRECISION (rhs1_type
)))
4374 error ("insertion out of range in BIT_INSERT_EXPR");
4378 else if (VECTOR_TYPE_P (rhs1_type
))
4380 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4381 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (TYPE_SIZE (rhs2_type
));
4382 if (bitpos
% bitsize
!= 0)
4384 error ("vector insertion not at element boundary");
4392 if (((TREE_CODE (rhs1_type
) != VECTOR_TYPE
4393 || TREE_CODE (lhs_type
) != VECTOR_TYPE
)
4394 && ((!INTEGRAL_TYPE_P (rhs1_type
)
4395 && !SCALAR_FLOAT_TYPE_P (rhs1_type
))
4396 || (!INTEGRAL_TYPE_P (lhs_type
)
4397 && !SCALAR_FLOAT_TYPE_P (lhs_type
))))
4398 || !types_compatible_p (rhs1_type
, rhs2_type
)
4399 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4400 || (GET_MODE_SIZE (element_mode (rhs3_type
))
4401 < 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4403 error ("type mismatch in dot product reduction");
4404 debug_generic_expr (lhs_type
);
4405 debug_generic_expr (rhs1_type
);
4406 debug_generic_expr (rhs2_type
);
4412 case REALIGN_LOAD_EXPR
:
4422 /* Verify a gimple assignment statement STMT with a single rhs.
4423 Returns true if anything is wrong. */
4426 verify_gimple_assign_single (gassign
*stmt
)
4428 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4429 tree lhs
= gimple_assign_lhs (stmt
);
4430 tree lhs_type
= TREE_TYPE (lhs
);
4431 tree rhs1
= gimple_assign_rhs1 (stmt
);
4432 tree rhs1_type
= TREE_TYPE (rhs1
);
4435 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
4437 error ("non-trivial conversion at assignment");
4438 debug_generic_expr (lhs_type
);
4439 debug_generic_expr (rhs1_type
);
4443 if (gimple_clobber_p (stmt
)
4444 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
4446 error ("non-decl/MEM_REF LHS in clobber statement");
4447 debug_generic_expr (lhs
);
4451 if (handled_component_p (lhs
)
4452 || TREE_CODE (lhs
) == MEM_REF
4453 || TREE_CODE (lhs
) == TARGET_MEM_REF
)
4454 res
|= verify_types_in_gimple_reference (lhs
, true);
4456 /* Special codes we cannot handle via their class. */
4461 tree op
= TREE_OPERAND (rhs1
, 0);
4462 if (!is_gimple_addressable (op
))
4464 error ("invalid operand in unary expression");
4468 /* Technically there is no longer a need for matching types, but
4469 gimple hygiene asks for this check. In LTO we can end up
4470 combining incompatible units and thus end up with addresses
4471 of globals that change their type to a common one. */
4473 && !types_compatible_p (TREE_TYPE (op
),
4474 TREE_TYPE (TREE_TYPE (rhs1
)))
4475 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
4478 error ("type mismatch in address expression");
4479 debug_generic_stmt (TREE_TYPE (rhs1
));
4480 debug_generic_stmt (TREE_TYPE (op
));
4484 return verify_types_in_gimple_reference (op
, true);
4489 error ("INDIRECT_REF in gimple IL");
4495 case ARRAY_RANGE_REF
:
4496 case VIEW_CONVERT_EXPR
:
4499 case TARGET_MEM_REF
:
4501 if (!is_gimple_reg (lhs
)
4502 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4504 error ("invalid rhs for gimple memory store");
4505 debug_generic_stmt (lhs
);
4506 debug_generic_stmt (rhs1
);
4509 return res
|| verify_types_in_gimple_reference (rhs1
, false);
4521 /* tcc_declaration */
4526 if (!is_gimple_reg (lhs
)
4527 && !is_gimple_reg (rhs1
)
4528 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4530 error ("invalid rhs for gimple memory store");
4531 debug_generic_stmt (lhs
);
4532 debug_generic_stmt (rhs1
);
4538 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
4541 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
4543 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
4545 /* For vector CONSTRUCTORs we require that either it is empty
4546 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4547 (then the element count must be correct to cover the whole
4548 outer vector and index must be NULL on all elements, or it is
4549 a CONSTRUCTOR of scalar elements, where we as an exception allow
4550 smaller number of elements (assuming zero filling) and
4551 consecutive indexes as compared to NULL indexes (such
4552 CONSTRUCTORs can appear in the IL from FEs). */
4553 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4555 if (elt_t
== NULL_TREE
)
4557 elt_t
= TREE_TYPE (elt_v
);
4558 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4560 tree elt_t
= TREE_TYPE (elt_v
);
4561 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4564 error ("incorrect type of vector CONSTRUCTOR"
4566 debug_generic_stmt (rhs1
);
4569 else if (CONSTRUCTOR_NELTS (rhs1
)
4570 * TYPE_VECTOR_SUBPARTS (elt_t
)
4571 != TYPE_VECTOR_SUBPARTS (rhs1_type
))
4573 error ("incorrect number of vector CONSTRUCTOR"
4575 debug_generic_stmt (rhs1
);
4579 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4582 error ("incorrect type of vector CONSTRUCTOR elements");
4583 debug_generic_stmt (rhs1
);
4586 else if (CONSTRUCTOR_NELTS (rhs1
)
4587 > TYPE_VECTOR_SUBPARTS (rhs1_type
))
4589 error ("incorrect number of vector CONSTRUCTOR elements");
4590 debug_generic_stmt (rhs1
);
4594 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4596 error ("incorrect type of vector CONSTRUCTOR elements");
4597 debug_generic_stmt (rhs1
);
4600 if (elt_i
!= NULL_TREE
4601 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4602 || TREE_CODE (elt_i
) != INTEGER_CST
4603 || compare_tree_int (elt_i
, i
) != 0))
4605 error ("vector CONSTRUCTOR with non-NULL element index");
4606 debug_generic_stmt (rhs1
);
4609 if (!is_gimple_val (elt_v
))
4611 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4612 debug_generic_stmt (rhs1
);
4617 else if (CONSTRUCTOR_NELTS (rhs1
) != 0)
4619 error ("non-vector CONSTRUCTOR with elements");
4620 debug_generic_stmt (rhs1
);
4626 case WITH_SIZE_EXPR
:
4636 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4637 is a problem, otherwise false. */
4640 verify_gimple_assign (gassign
*stmt
)
4642 switch (gimple_assign_rhs_class (stmt
))
4644 case GIMPLE_SINGLE_RHS
:
4645 return verify_gimple_assign_single (stmt
);
4647 case GIMPLE_UNARY_RHS
:
4648 return verify_gimple_assign_unary (stmt
);
4650 case GIMPLE_BINARY_RHS
:
4651 return verify_gimple_assign_binary (stmt
);
4653 case GIMPLE_TERNARY_RHS
:
4654 return verify_gimple_assign_ternary (stmt
);
4661 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4662 is a problem, otherwise false. */
4665 verify_gimple_return (greturn
*stmt
)
4667 tree op
= gimple_return_retval (stmt
);
4668 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4670 /* We cannot test for present return values as we do not fix up missing
4671 return values from the original source. */
4675 if (!is_gimple_val (op
)
4676 && TREE_CODE (op
) != RESULT_DECL
)
4678 error ("invalid operand in return statement");
4679 debug_generic_stmt (op
);
4683 if ((TREE_CODE (op
) == RESULT_DECL
4684 && DECL_BY_REFERENCE (op
))
4685 || (TREE_CODE (op
) == SSA_NAME
4686 && SSA_NAME_VAR (op
)
4687 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4688 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4689 op
= TREE_TYPE (op
);
4691 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4693 error ("invalid conversion in return statement");
4694 debug_generic_stmt (restype
);
4695 debug_generic_stmt (TREE_TYPE (op
));
4703 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4704 is a problem, otherwise false. */
4707 verify_gimple_goto (ggoto
*stmt
)
4709 tree dest
= gimple_goto_dest (stmt
);
4711 /* ??? We have two canonical forms of direct goto destinations, a
4712 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4713 if (TREE_CODE (dest
) != LABEL_DECL
4714 && (!is_gimple_val (dest
)
4715 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4717 error ("goto destination is neither a label nor a pointer");
4724 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4725 is a problem, otherwise false. */
4728 verify_gimple_switch (gswitch
*stmt
)
4731 tree elt
, prev_upper_bound
= NULL_TREE
;
4732 tree index_type
, elt_type
= NULL_TREE
;
4734 if (!is_gimple_val (gimple_switch_index (stmt
)))
4736 error ("invalid operand to switch statement");
4737 debug_generic_stmt (gimple_switch_index (stmt
));
4741 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4742 if (! INTEGRAL_TYPE_P (index_type
))
4744 error ("non-integral type switch statement");
4745 debug_generic_expr (index_type
);
4749 elt
= gimple_switch_label (stmt
, 0);
4750 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4752 error ("invalid default case label in switch statement");
4753 debug_generic_expr (elt
);
4757 n
= gimple_switch_num_labels (stmt
);
4758 for (i
= 1; i
< n
; i
++)
4760 elt
= gimple_switch_label (stmt
, i
);
4762 if (! CASE_LOW (elt
))
4764 error ("invalid case label in switch statement");
4765 debug_generic_expr (elt
);
4769 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4771 error ("invalid case range in switch statement");
4772 debug_generic_expr (elt
);
4778 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4779 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4781 error ("type mismatch for case label in switch statement");
4782 debug_generic_expr (elt
);
4788 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4789 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4791 error ("type precision mismatch in switch statement");
4796 if (prev_upper_bound
)
4798 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4800 error ("case labels not sorted in switch statement");
4805 prev_upper_bound
= CASE_HIGH (elt
);
4806 if (! prev_upper_bound
)
4807 prev_upper_bound
= CASE_LOW (elt
);
4813 /* Verify a gimple debug statement STMT.
4814 Returns true if anything is wrong. */
4817 verify_gimple_debug (gimple
*stmt ATTRIBUTE_UNUSED
)
4819 /* There isn't much that could be wrong in a gimple debug stmt. A
4820 gimple debug bind stmt, for example, maps a tree, that's usually
4821 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4822 component or member of an aggregate type, to another tree, that
4823 can be an arbitrary expression. These stmts expand into debug
4824 insns, and are converted to debug notes by var-tracking.c. */
4828 /* Verify a gimple label statement STMT.
4829 Returns true if anything is wrong. */
4832 verify_gimple_label (glabel
*stmt
)
4834 tree decl
= gimple_label_label (stmt
);
4838 if (TREE_CODE (decl
) != LABEL_DECL
)
4840 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4841 && DECL_CONTEXT (decl
) != current_function_decl
)
4843 error ("label's context is not the current function decl");
4847 uid
= LABEL_DECL_UID (decl
);
4850 || (*label_to_block_map_for_fn (cfun
))[uid
] != gimple_bb (stmt
)))
4852 error ("incorrect entry in label_to_block_map");
4856 uid
= EH_LANDING_PAD_NR (decl
);
4859 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4860 if (decl
!= lp
->post_landing_pad
)
4862 error ("incorrect setting of landing pad number");
4870 /* Verify a gimple cond statement STMT.
4871 Returns true if anything is wrong. */
4874 verify_gimple_cond (gcond
*stmt
)
4876 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4878 error ("invalid comparison code in gimple cond");
4881 if (!(!gimple_cond_true_label (stmt
)
4882 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4883 || !(!gimple_cond_false_label (stmt
)
4884 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4886 error ("invalid labels in gimple cond");
4890 return verify_gimple_comparison (boolean_type_node
,
4891 gimple_cond_lhs (stmt
),
4892 gimple_cond_rhs (stmt
),
4893 gimple_cond_code (stmt
));
4896 /* Verify the GIMPLE statement STMT. Returns true if there is an
4897 error, otherwise false. */
4900 verify_gimple_stmt (gimple
*stmt
)
4902 switch (gimple_code (stmt
))
4905 return verify_gimple_assign (as_a
<gassign
*> (stmt
));
4908 return verify_gimple_label (as_a
<glabel
*> (stmt
));
4911 return verify_gimple_call (as_a
<gcall
*> (stmt
));
4914 return verify_gimple_cond (as_a
<gcond
*> (stmt
));
4917 return verify_gimple_goto (as_a
<ggoto
*> (stmt
));
4920 return verify_gimple_switch (as_a
<gswitch
*> (stmt
));
4923 return verify_gimple_return (as_a
<greturn
*> (stmt
));
4928 case GIMPLE_TRANSACTION
:
4929 return verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4931 /* Tuples that do not have tree operands. */
4933 case GIMPLE_PREDICT
:
4935 case GIMPLE_EH_DISPATCH
:
4936 case GIMPLE_EH_MUST_NOT_THROW
:
4940 /* OpenMP directives are validated by the FE and never operated
4941 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4942 non-gimple expressions when the main index variable has had
4943 its address taken. This does not affect the loop itself
4944 because the header of an GIMPLE_OMP_FOR is merely used to determine
4945 how to setup the parallel iteration. */
4949 return verify_gimple_debug (stmt
);
4956 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4957 and false otherwise. */
4960 verify_gimple_phi (gimple
*phi
)
4964 tree phi_result
= gimple_phi_result (phi
);
4969 error ("invalid PHI result");
4973 virtual_p
= virtual_operand_p (phi_result
);
4974 if (TREE_CODE (phi_result
) != SSA_NAME
4976 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4978 error ("invalid PHI result");
4982 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4984 tree t
= gimple_phi_arg_def (phi
, i
);
4988 error ("missing PHI def");
4992 /* Addressable variables do have SSA_NAMEs but they
4993 are not considered gimple values. */
4994 else if ((TREE_CODE (t
) == SSA_NAME
4995 && virtual_p
!= virtual_operand_p (t
))
4997 && (TREE_CODE (t
) != SSA_NAME
4998 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
5000 && !is_gimple_val (t
)))
5002 error ("invalid PHI argument");
5003 debug_generic_expr (t
);
5006 #ifdef ENABLE_TYPES_CHECKING
5007 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
5009 error ("incompatible types in PHI argument %u", i
);
5010 debug_generic_stmt (TREE_TYPE (phi_result
));
5011 debug_generic_stmt (TREE_TYPE (t
));
5020 /* Verify the GIMPLE statements inside the sequence STMTS. */
5023 verify_gimple_in_seq_2 (gimple_seq stmts
)
5025 gimple_stmt_iterator ittr
;
5028 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
5030 gimple
*stmt
= gsi_stmt (ittr
);
5032 switch (gimple_code (stmt
))
5035 err
|= verify_gimple_in_seq_2 (
5036 gimple_bind_body (as_a
<gbind
*> (stmt
)));
5040 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
5041 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
5044 case GIMPLE_EH_FILTER
:
5045 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
5048 case GIMPLE_EH_ELSE
:
5050 geh_else
*eh_else
= as_a
<geh_else
*> (stmt
);
5051 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else
));
5052 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else
));
5057 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (
5058 as_a
<gcatch
*> (stmt
)));
5061 case GIMPLE_TRANSACTION
:
5062 err
|= verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
5067 bool err2
= verify_gimple_stmt (stmt
);
5069 debug_gimple_stmt (stmt
);
5078 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5079 is a problem, otherwise false. */
5082 verify_gimple_transaction (gtransaction
*stmt
)
5086 lab
= gimple_transaction_label_norm (stmt
);
5087 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5089 lab
= gimple_transaction_label_uninst (stmt
);
5090 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5092 lab
= gimple_transaction_label_over (stmt
);
5093 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5096 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
5100 /* Verify the GIMPLE statements inside the statement list STMTS. */
5103 verify_gimple_in_seq (gimple_seq stmts
)
5105 timevar_push (TV_TREE_STMT_VERIFY
);
5106 if (verify_gimple_in_seq_2 (stmts
))
5107 internal_error ("verify_gimple failed");
5108 timevar_pop (TV_TREE_STMT_VERIFY
);
5111 /* Return true when the T can be shared. */
5114 tree_node_can_be_shared (tree t
)
5116 if (IS_TYPE_OR_DECL_P (t
)
5117 || is_gimple_min_invariant (t
)
5118 || TREE_CODE (t
) == SSA_NAME
5119 || t
== error_mark_node
5120 || TREE_CODE (t
) == IDENTIFIER_NODE
)
5123 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
5132 /* Called via walk_tree. Verify tree sharing. */
5135 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
5137 hash_set
<void *> *visited
= (hash_set
<void *> *) data
;
5139 if (tree_node_can_be_shared (*tp
))
5141 *walk_subtrees
= false;
5145 if (visited
->add (*tp
))
5151 /* Called via walk_gimple_stmt. Verify tree sharing. */
5154 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
5156 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5157 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
5160 static bool eh_error_found
;
5162 verify_eh_throw_stmt_node (gimple
*const &stmt
, const int &,
5163 hash_set
<gimple
*> *visited
)
5165 if (!visited
->contains (stmt
))
5167 error ("dead STMT in EH table");
5168 debug_gimple_stmt (stmt
);
5169 eh_error_found
= true;
5174 /* Verify if the location LOCs block is in BLOCKS. */
5177 verify_location (hash_set
<tree
> *blocks
, location_t loc
)
5179 tree block
= LOCATION_BLOCK (loc
);
5180 if (block
!= NULL_TREE
5181 && !blocks
->contains (block
))
5183 error ("location references block not in block tree");
5186 if (block
!= NULL_TREE
)
5187 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
5191 /* Called via walk_tree. Verify that expressions have no blocks. */
5194 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
5198 *walk_subtrees
= false;
5202 location_t loc
= EXPR_LOCATION (*tp
);
5203 if (LOCATION_BLOCK (loc
) != NULL
)
5209 /* Called via walk_tree. Verify locations of expressions. */
5212 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
5214 hash_set
<tree
> *blocks
= (hash_set
<tree
> *) data
;
5216 if (VAR_P (*tp
) && DECL_HAS_DEBUG_EXPR_P (*tp
))
5218 tree t
= DECL_DEBUG_EXPR (*tp
);
5219 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
5224 || TREE_CODE (*tp
) == PARM_DECL
5225 || TREE_CODE (*tp
) == RESULT_DECL
)
5226 && DECL_HAS_VALUE_EXPR_P (*tp
))
5228 tree t
= DECL_VALUE_EXPR (*tp
);
5229 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
5236 *walk_subtrees
= false;
5240 location_t loc
= EXPR_LOCATION (*tp
);
5241 if (verify_location (blocks
, loc
))
5247 /* Called via walk_gimple_op. Verify locations of expressions. */
5250 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
5252 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5253 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
5256 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5259 collect_subblocks (hash_set
<tree
> *blocks
, tree block
)
5262 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
5265 collect_subblocks (blocks
, t
);
5269 /* Verify the GIMPLE statements in the CFG of FN. */
5272 verify_gimple_in_cfg (struct function
*fn
, bool verify_nothrow
)
5277 timevar_push (TV_TREE_STMT_VERIFY
);
5278 hash_set
<void *> visited
;
5279 hash_set
<gimple
*> visited_stmts
;
5281 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5282 hash_set
<tree
> blocks
;
5283 if (DECL_INITIAL (fn
->decl
))
5285 blocks
.add (DECL_INITIAL (fn
->decl
));
5286 collect_subblocks (&blocks
, DECL_INITIAL (fn
->decl
));
5289 FOR_EACH_BB_FN (bb
, fn
)
5291 gimple_stmt_iterator gsi
;
5293 for (gphi_iterator gpi
= gsi_start_phis (bb
);
5297 gphi
*phi
= gpi
.phi ();
5301 visited_stmts
.add (phi
);
5303 if (gimple_bb (phi
) != bb
)
5305 error ("gimple_bb (phi) is set to a wrong basic block");
5309 err2
|= verify_gimple_phi (phi
);
5311 /* Only PHI arguments have locations. */
5312 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
5314 error ("PHI node with location");
5318 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5320 tree arg
= gimple_phi_arg_def (phi
, i
);
5321 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
5325 error ("incorrect sharing of tree nodes");
5326 debug_generic_expr (addr
);
5329 location_t loc
= gimple_phi_arg_location (phi
, i
);
5330 if (virtual_operand_p (gimple_phi_result (phi
))
5331 && loc
!= UNKNOWN_LOCATION
)
5333 error ("virtual PHI with argument locations");
5336 addr
= walk_tree (&arg
, verify_expr_location_1
, &blocks
, NULL
);
5339 debug_generic_expr (addr
);
5342 err2
|= verify_location (&blocks
, loc
);
5346 debug_gimple_stmt (phi
);
5350 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5352 gimple
*stmt
= gsi_stmt (gsi
);
5354 struct walk_stmt_info wi
;
5358 visited_stmts
.add (stmt
);
5360 if (gimple_bb (stmt
) != bb
)
5362 error ("gimple_bb (stmt) is set to a wrong basic block");
5366 err2
|= verify_gimple_stmt (stmt
);
5367 err2
|= verify_location (&blocks
, gimple_location (stmt
));
5369 memset (&wi
, 0, sizeof (wi
));
5370 wi
.info
= (void *) &visited
;
5371 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
5374 error ("incorrect sharing of tree nodes");
5375 debug_generic_expr (addr
);
5379 memset (&wi
, 0, sizeof (wi
));
5380 wi
.info
= (void *) &blocks
;
5381 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
5384 debug_generic_expr (addr
);
5388 /* ??? Instead of not checking these stmts at all the walker
5389 should know its context via wi. */
5390 if (!is_gimple_debug (stmt
)
5391 && !is_gimple_omp (stmt
))
5393 memset (&wi
, 0, sizeof (wi
));
5394 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
5397 debug_generic_expr (addr
);
5398 inform (gimple_location (stmt
), "in statement");
5403 /* If the statement is marked as part of an EH region, then it is
5404 expected that the statement could throw. Verify that when we
5405 have optimizations that simplify statements such that we prove
5406 that they cannot throw, that we update other data structures
5408 lp_nr
= lookup_stmt_eh_lp (stmt
);
5411 if (!stmt_could_throw_p (stmt
))
5415 error ("statement marked for throw, but doesn%'t");
5419 else if (!gsi_one_before_end_p (gsi
))
5421 error ("statement marked for throw in middle of block");
5427 debug_gimple_stmt (stmt
);
5432 eh_error_found
= false;
5433 hash_map
<gimple
*, int> *eh_table
= get_eh_throw_stmt_table (cfun
);
5435 eh_table
->traverse
<hash_set
<gimple
*> *, verify_eh_throw_stmt_node
>
5438 if (err
|| eh_error_found
)
5439 internal_error ("verify_gimple failed");
5441 verify_histograms ();
5442 timevar_pop (TV_TREE_STMT_VERIFY
);
5446 /* Verifies that the flow information is OK. */
5449 gimple_verify_flow_info (void)
5453 gimple_stmt_iterator gsi
;
5458 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5459 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5461 error ("ENTRY_BLOCK has IL associated with it");
5465 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5466 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5468 error ("EXIT_BLOCK has IL associated with it");
5472 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5473 if (e
->flags
& EDGE_FALLTHRU
)
5475 error ("fallthru to exit from bb %d", e
->src
->index
);
5479 FOR_EACH_BB_FN (bb
, cfun
)
5481 bool found_ctrl_stmt
= false;
5485 /* Skip labels on the start of basic block. */
5486 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5489 gimple
*prev_stmt
= stmt
;
5491 stmt
= gsi_stmt (gsi
);
5493 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5496 label
= gimple_label_label (as_a
<glabel
*> (stmt
));
5497 if (prev_stmt
&& DECL_NONLOCAL (label
))
5499 error ("nonlocal label ");
5500 print_generic_expr (stderr
, label
);
5501 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5506 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
5508 error ("EH landing pad label ");
5509 print_generic_expr (stderr
, label
);
5510 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5515 if (label_to_block (label
) != bb
)
5518 print_generic_expr (stderr
, label
);
5519 fprintf (stderr
, " to block does not match in bb %d",
5524 if (decl_function_context (label
) != current_function_decl
)
5527 print_generic_expr (stderr
, label
);
5528 fprintf (stderr
, " has incorrect context in bb %d",
5534 /* Verify that body of basic block BB is free of control flow. */
5535 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5537 gimple
*stmt
= gsi_stmt (gsi
);
5539 if (found_ctrl_stmt
)
5541 error ("control flow in the middle of basic block %d",
5546 if (stmt_ends_bb_p (stmt
))
5547 found_ctrl_stmt
= true;
5549 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
5552 print_generic_expr (stderr
, gimple_label_label (label_stmt
));
5553 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
5558 gsi
= gsi_last_bb (bb
);
5559 if (gsi_end_p (gsi
))
5562 stmt
= gsi_stmt (gsi
);
5564 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5567 err
|= verify_eh_edges (stmt
);
5569 if (is_ctrl_stmt (stmt
))
5571 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5572 if (e
->flags
& EDGE_FALLTHRU
)
5574 error ("fallthru edge after a control statement in bb %d",
5580 if (gimple_code (stmt
) != GIMPLE_COND
)
5582 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5583 after anything else but if statement. */
5584 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5585 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5587 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5593 switch (gimple_code (stmt
))
5600 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5604 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5605 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5606 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5607 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5608 || EDGE_COUNT (bb
->succs
) >= 3)
5610 error ("wrong outgoing edge flags at end of bb %d",
5618 if (simple_goto_p (stmt
))
5620 error ("explicit goto at end of bb %d", bb
->index
);
5625 /* FIXME. We should double check that the labels in the
5626 destination blocks have their address taken. */
5627 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5628 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5629 | EDGE_FALSE_VALUE
))
5630 || !(e
->flags
& EDGE_ABNORMAL
))
5632 error ("wrong outgoing edge flags at end of bb %d",
5640 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5644 if (!single_succ_p (bb
)
5645 || (single_succ_edge (bb
)->flags
5646 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5647 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5649 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5652 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5654 error ("return edge does not point to exit in bb %d",
5662 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5667 n
= gimple_switch_num_labels (switch_stmt
);
5669 /* Mark all the destination basic blocks. */
5670 for (i
= 0; i
< n
; ++i
)
5672 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5673 basic_block label_bb
= label_to_block (lab
);
5674 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5675 label_bb
->aux
= (void *)1;
5678 /* Verify that the case labels are sorted. */
5679 prev
= gimple_switch_label (switch_stmt
, 0);
5680 for (i
= 1; i
< n
; ++i
)
5682 tree c
= gimple_switch_label (switch_stmt
, i
);
5685 error ("found default case not at the start of "
5691 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5693 error ("case labels not sorted: ");
5694 print_generic_expr (stderr
, prev
);
5695 fprintf (stderr
," is greater than ");
5696 print_generic_expr (stderr
, c
);
5697 fprintf (stderr
," but comes before it.\n");
5702 /* VRP will remove the default case if it can prove it will
5703 never be executed. So do not verify there always exists
5704 a default case here. */
5706 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5710 error ("extra outgoing edge %d->%d",
5711 bb
->index
, e
->dest
->index
);
5715 e
->dest
->aux
= (void *)2;
5716 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5717 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5719 error ("wrong outgoing edge flags at end of bb %d",
5725 /* Check that we have all of them. */
5726 for (i
= 0; i
< n
; ++i
)
5728 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5729 basic_block label_bb
= label_to_block (lab
);
5731 if (label_bb
->aux
!= (void *)2)
5733 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5738 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5739 e
->dest
->aux
= (void *)0;
5743 case GIMPLE_EH_DISPATCH
:
5744 err
|= verify_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
));
5752 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5753 verify_dominators (CDI_DOMINATORS
);
5759 /* Updates phi nodes after creating a forwarder block joined
5760 by edge FALLTHRU. */
5763 gimple_make_forwarder_block (edge fallthru
)
5767 basic_block dummy
, bb
;
5771 dummy
= fallthru
->src
;
5772 bb
= fallthru
->dest
;
5774 if (single_pred_p (bb
))
5777 /* If we redirected a branch we must create new PHI nodes at the
5779 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5781 gphi
*phi
, *new_phi
;
5784 var
= gimple_phi_result (phi
);
5785 new_phi
= create_phi_node (var
, bb
);
5786 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5787 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5791 /* Add the arguments we have stored on edges. */
5792 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5797 flush_pending_stmts (e
);
5802 /* Return a non-special label in the head of basic block BLOCK.
5803 Create one if it doesn't exist. */
5806 gimple_block_label (basic_block bb
)
5808 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5813 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5815 stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
5818 label
= gimple_label_label (stmt
);
5819 if (!DECL_NONLOCAL (label
))
5822 gsi_move_before (&i
, &s
);
5827 label
= create_artificial_label (UNKNOWN_LOCATION
);
5828 stmt
= gimple_build_label (label
);
5829 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5834 /* Attempt to perform edge redirection by replacing a possibly complex
5835 jump instruction by a goto or by removing the jump completely.
5836 This can apply only if all edges now point to the same block. The
5837 parameters and return values are equivalent to
5838 redirect_edge_and_branch. */
5841 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5843 basic_block src
= e
->src
;
5844 gimple_stmt_iterator i
;
5847 /* We can replace or remove a complex jump only when we have exactly
5849 if (EDGE_COUNT (src
->succs
) != 2
5850 /* Verify that all targets will be TARGET. Specifically, the
5851 edge that is not E must also go to TARGET. */
5852 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5855 i
= gsi_last_bb (src
);
5859 stmt
= gsi_stmt (i
);
5861 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5863 gsi_remove (&i
, true);
5864 e
= ssa_redirect_edge (e
, target
);
5865 e
->flags
= EDGE_FALLTHRU
;
5873 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5874 edge representing the redirected branch. */
5877 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5879 basic_block bb
= e
->src
;
5880 gimple_stmt_iterator gsi
;
5884 if (e
->flags
& EDGE_ABNORMAL
)
5887 if (e
->dest
== dest
)
5890 if (e
->flags
& EDGE_EH
)
5891 return redirect_eh_edge (e
, dest
);
5893 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5895 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5900 gsi
= gsi_last_bb (bb
);
5901 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5903 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5906 /* For COND_EXPR, we only need to redirect the edge. */
5910 /* No non-abnormal edges should lead from a non-simple goto, and
5911 simple ones should be represented implicitly. */
5916 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5917 tree label
= gimple_block_label (dest
);
5918 tree cases
= get_cases_for_edge (e
, switch_stmt
);
5920 /* If we have a list of cases associated with E, then use it
5921 as it's a lot faster than walking the entire case vector. */
5924 edge e2
= find_edge (e
->src
, dest
);
5931 CASE_LABEL (cases
) = label
;
5932 cases
= CASE_CHAIN (cases
);
5935 /* If there was already an edge in the CFG, then we need
5936 to move all the cases associated with E to E2. */
5939 tree cases2
= get_cases_for_edge (e2
, switch_stmt
);
5941 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5942 CASE_CHAIN (cases2
) = first
;
5944 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5948 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
5950 for (i
= 0; i
< n
; i
++)
5952 tree elt
= gimple_switch_label (switch_stmt
, i
);
5953 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
5954 CASE_LABEL (elt
) = label
;
5962 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5963 int i
, n
= gimple_asm_nlabels (asm_stmt
);
5966 for (i
= 0; i
< n
; ++i
)
5968 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
5969 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
5972 label
= gimple_block_label (dest
);
5973 TREE_VALUE (cons
) = label
;
5977 /* If we didn't find any label matching the former edge in the
5978 asm labels, we must be redirecting the fallthrough
5980 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5985 gsi_remove (&gsi
, true);
5986 e
->flags
|= EDGE_FALLTHRU
;
5989 case GIMPLE_OMP_RETURN
:
5990 case GIMPLE_OMP_CONTINUE
:
5991 case GIMPLE_OMP_SECTIONS_SWITCH
:
5992 case GIMPLE_OMP_FOR
:
5993 /* The edges from OMP constructs can be simply redirected. */
5996 case GIMPLE_EH_DISPATCH
:
5997 if (!(e
->flags
& EDGE_FALLTHRU
))
5998 redirect_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
), e
, dest
);
6001 case GIMPLE_TRANSACTION
:
6002 if (e
->flags
& EDGE_TM_ABORT
)
6003 gimple_transaction_set_label_over (as_a
<gtransaction
*> (stmt
),
6004 gimple_block_label (dest
));
6005 else if (e
->flags
& EDGE_TM_UNINSTRUMENTED
)
6006 gimple_transaction_set_label_uninst (as_a
<gtransaction
*> (stmt
),
6007 gimple_block_label (dest
));
6009 gimple_transaction_set_label_norm (as_a
<gtransaction
*> (stmt
),
6010 gimple_block_label (dest
));
6014 /* Otherwise it must be a fallthru edge, and we don't need to
6015 do anything besides redirecting it. */
6016 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
6020 /* Update/insert PHI nodes as necessary. */
6022 /* Now update the edges in the CFG. */
6023 e
= ssa_redirect_edge (e
, dest
);
6028 /* Returns true if it is possible to remove edge E by redirecting
6029 it to the destination of the other edge from E->src. */
6032 gimple_can_remove_branch_p (const_edge e
)
6034 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
6040 /* Simple wrapper, as we can always redirect fallthru edges. */
6043 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
6045 e
= gimple_redirect_edge_and_branch (e
, dest
);
6052 /* Splits basic block BB after statement STMT (but at least after the
6053 labels). If STMT is NULL, BB is split just after the labels. */
6056 gimple_split_block (basic_block bb
, void *stmt
)
6058 gimple_stmt_iterator gsi
;
6059 gimple_stmt_iterator gsi_tgt
;
6065 new_bb
= create_empty_bb (bb
);
6067 /* Redirect the outgoing edges. */
6068 new_bb
->succs
= bb
->succs
;
6070 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
6073 /* Get a stmt iterator pointing to the first stmt to move. */
6074 if (!stmt
|| gimple_code ((gimple
*) stmt
) == GIMPLE_LABEL
)
6075 gsi
= gsi_after_labels (bb
);
6078 gsi
= gsi_for_stmt ((gimple
*) stmt
);
6082 /* Move everything from GSI to the new basic block. */
6083 if (gsi_end_p (gsi
))
6086 /* Split the statement list - avoid re-creating new containers as this
6087 brings ugly quadratic memory consumption in the inliner.
6088 (We are still quadratic since we need to update stmt BB pointers,
6090 gsi_split_seq_before (&gsi
, &list
);
6091 set_bb_seq (new_bb
, list
);
6092 for (gsi_tgt
= gsi_start (list
);
6093 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
6094 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
6100 /* Moves basic block BB after block AFTER. */
6103 gimple_move_block_after (basic_block bb
, basic_block after
)
6105 if (bb
->prev_bb
== after
)
6109 link_block (bb
, after
);
6115 /* Return TRUE if block BB has no executable statements, otherwise return
6119 gimple_empty_block_p (basic_block bb
)
6121 /* BB must have no executable statements. */
6122 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
6125 if (gsi_end_p (gsi
))
6127 if (is_gimple_debug (gsi_stmt (gsi
)))
6128 gsi_next_nondebug (&gsi
);
6129 return gsi_end_p (gsi
);
6133 /* Split a basic block if it ends with a conditional branch and if the
6134 other part of the block is not empty. */
6137 gimple_split_block_before_cond_jump (basic_block bb
)
6139 gimple
*last
, *split_point
;
6140 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
6141 if (gsi_end_p (gsi
))
6143 last
= gsi_stmt (gsi
);
6144 if (gimple_code (last
) != GIMPLE_COND
6145 && gimple_code (last
) != GIMPLE_SWITCH
)
6148 split_point
= gsi_stmt (gsi
);
6149 return split_block (bb
, split_point
)->dest
;
6153 /* Return true if basic_block can be duplicated. */
6156 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
6161 /* Create a duplicate of the basic block BB. NOTE: This does not
6162 preserve SSA form. */
6165 gimple_duplicate_bb (basic_block bb
)
6168 gimple_stmt_iterator gsi_tgt
;
6170 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
6172 /* Copy the PHI nodes. We ignore PHI node arguments here because
6173 the incoming edges have not been setup yet. */
6174 for (gphi_iterator gpi
= gsi_start_phis (bb
);
6180 copy
= create_phi_node (NULL_TREE
, new_bb
);
6181 create_new_def_for (gimple_phi_result (phi
), copy
,
6182 gimple_phi_result_ptr (copy
));
6183 gimple_set_uid (copy
, gimple_uid (phi
));
6186 gsi_tgt
= gsi_start_bb (new_bb
);
6187 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
6191 def_operand_p def_p
;
6192 ssa_op_iter op_iter
;
6194 gimple
*stmt
, *copy
;
6196 stmt
= gsi_stmt (gsi
);
6197 if (gimple_code (stmt
) == GIMPLE_LABEL
)
6200 /* Don't duplicate label debug stmts. */
6201 if (gimple_debug_bind_p (stmt
)
6202 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
6206 /* Create a new copy of STMT and duplicate STMT's virtual
6208 copy
= gimple_copy (stmt
);
6209 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
6211 maybe_duplicate_eh_stmt (copy
, stmt
);
6212 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
6214 /* When copying around a stmt writing into a local non-user
6215 aggregate, make sure it won't share stack slot with other
6217 lhs
= gimple_get_lhs (stmt
);
6218 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
6220 tree base
= get_base_address (lhs
);
6222 && (VAR_P (base
) || TREE_CODE (base
) == RESULT_DECL
)
6223 && DECL_IGNORED_P (base
)
6224 && !TREE_STATIC (base
)
6225 && !DECL_EXTERNAL (base
)
6226 && (!VAR_P (base
) || !DECL_HAS_VALUE_EXPR_P (base
)))
6227 DECL_NONSHAREABLE (base
) = 1;
6230 /* Create new names for all the definitions created by COPY and
6231 add replacement mappings for each new name. */
6232 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
6233 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
6239 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6242 add_phi_args_after_copy_edge (edge e_copy
)
6244 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
6247 gphi
*phi
, *phi_copy
;
6249 gphi_iterator psi
, psi_copy
;
6251 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
6254 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
6256 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
6257 dest
= get_bb_original (e_copy
->dest
);
6259 dest
= e_copy
->dest
;
6261 e
= find_edge (bb
, dest
);
6264 /* During loop unrolling the target of the latch edge is copied.
6265 In this case we are not looking for edge to dest, but to
6266 duplicated block whose original was dest. */
6267 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6269 if ((e
->dest
->flags
& BB_DUPLICATED
)
6270 && get_bb_original (e
->dest
) == dest
)
6274 gcc_assert (e
!= NULL
);
6277 for (psi
= gsi_start_phis (e
->dest
),
6278 psi_copy
= gsi_start_phis (e_copy
->dest
);
6280 gsi_next (&psi
), gsi_next (&psi_copy
))
6283 phi_copy
= psi_copy
.phi ();
6284 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
6285 add_phi_arg (phi_copy
, def
, e_copy
,
6286 gimple_phi_arg_location_from_edge (phi
, e
));
6291 /* Basic block BB_COPY was created by code duplication. Add phi node
6292 arguments for edges going out of BB_COPY. The blocks that were
6293 duplicated have BB_DUPLICATED set. */
6296 add_phi_args_after_copy_bb (basic_block bb_copy
)
6301 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
6303 add_phi_args_after_copy_edge (e_copy
);
6307 /* Blocks in REGION_COPY array of length N_REGION were created by
6308 duplication of basic blocks. Add phi node arguments for edges
6309 going from these blocks. If E_COPY is not NULL, also add
6310 phi node arguments for its destination.*/
6313 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
6318 for (i
= 0; i
< n_region
; i
++)
6319 region_copy
[i
]->flags
|= BB_DUPLICATED
;
6321 for (i
= 0; i
< n_region
; i
++)
6322 add_phi_args_after_copy_bb (region_copy
[i
]);
6324 add_phi_args_after_copy_edge (e_copy
);
6326 for (i
= 0; i
< n_region
; i
++)
6327 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
6330 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6331 important exit edge EXIT. By important we mean that no SSA name defined
6332 inside region is live over the other exit edges of the region. All entry
6333 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6334 to the duplicate of the region. Dominance and loop information is
6335 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6336 UPDATE_DOMINANCE is false then we assume that the caller will update the
6337 dominance information after calling this function. The new basic
6338 blocks are stored to REGION_COPY in the same order as they had in REGION,
6339 provided that REGION_COPY is not NULL.
6340 The function returns false if it is unable to copy the region,
6344 gimple_duplicate_sese_region (edge entry
, edge exit
,
6345 basic_block
*region
, unsigned n_region
,
6346 basic_block
*region_copy
,
6347 bool update_dominance
)
6350 bool free_region_copy
= false, copying_header
= false;
6351 struct loop
*loop
= entry
->dest
->loop_father
;
6353 vec
<basic_block
> doms
= vNULL
;
6355 profile_count total_count
= profile_count::uninitialized ();
6356 profile_count entry_count
= profile_count::uninitialized ();
6358 if (!can_copy_bbs_p (region
, n_region
))
6361 /* Some sanity checking. Note that we do not check for all possible
6362 missuses of the functions. I.e. if you ask to copy something weird,
6363 it will work, but the state of structures probably will not be
6365 for (i
= 0; i
< n_region
; i
++)
6367 /* We do not handle subloops, i.e. all the blocks must belong to the
6369 if (region
[i
]->loop_father
!= loop
)
6372 if (region
[i
] != entry
->dest
6373 && region
[i
] == loop
->header
)
6377 /* In case the function is used for loop header copying (which is the primary
6378 use), ensure that EXIT and its copy will be new latch and entry edges. */
6379 if (loop
->header
== entry
->dest
)
6381 copying_header
= true;
6383 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
6386 for (i
= 0; i
< n_region
; i
++)
6387 if (region
[i
] != exit
->src
6388 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
6392 initialize_original_copy_tables ();
6395 set_loop_copy (loop
, loop_outer (loop
));
6397 set_loop_copy (loop
, loop
);
6401 region_copy
= XNEWVEC (basic_block
, n_region
);
6402 free_region_copy
= true;
6405 /* Record blocks outside the region that are dominated by something
6407 if (update_dominance
)
6410 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6413 if (entry
->dest
->count
.initialized_p ())
6415 total_count
= entry
->dest
->count
;
6416 entry_count
= entry
->count ();
6417 /* Fix up corner cases, to avoid division by zero or creation of negative
6419 if (entry_count
> total_count
)
6420 entry_count
= total_count
;
6423 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
6424 split_edge_bb_loc (entry
), update_dominance
);
6425 if (total_count
.initialized_p () && entry_count
.initialized_p ())
6427 scale_bbs_frequencies_profile_count (region
, n_region
,
6428 total_count
- entry_count
,
6430 scale_bbs_frequencies_profile_count (region_copy
, n_region
, entry_count
,
6436 loop
->header
= exit
->dest
;
6437 loop
->latch
= exit
->src
;
6440 /* Redirect the entry and add the phi node arguments. */
6441 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
6442 gcc_assert (redirected
!= NULL
);
6443 flush_pending_stmts (entry
);
6445 /* Concerning updating of dominators: We must recount dominators
6446 for entry block and its copy. Anything that is outside of the
6447 region, but was dominated by something inside needs recounting as
6449 if (update_dominance
)
6451 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
6452 doms
.safe_push (get_bb_original (entry
->dest
));
6453 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6457 /* Add the other PHI node arguments. */
6458 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
6460 if (free_region_copy
)
6463 free_original_copy_tables ();
6467 /* Checks if BB is part of the region defined by N_REGION BBS. */
6469 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
6473 for (n
= 0; n
< n_region
; n
++)
6481 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6482 are stored to REGION_COPY in the same order in that they appear
6483 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6484 the region, EXIT an exit from it. The condition guarding EXIT
6485 is moved to ENTRY. Returns true if duplication succeeds, false
6511 gimple_duplicate_sese_tail (edge entry
, edge exit
,
6512 basic_block
*region
, unsigned n_region
,
6513 basic_block
*region_copy
)
6516 bool free_region_copy
= false;
6517 struct loop
*loop
= exit
->dest
->loop_father
;
6518 struct loop
*orig_loop
= entry
->dest
->loop_father
;
6519 basic_block switch_bb
, entry_bb
, nentry_bb
;
6520 vec
<basic_block
> doms
;
6521 profile_count total_count
= profile_count::uninitialized (),
6522 exit_count
= profile_count::uninitialized ();
6523 edge exits
[2], nexits
[2], e
;
6524 gimple_stmt_iterator gsi
;
6527 basic_block exit_bb
;
6531 struct loop
*target
, *aloop
, *cloop
;
6533 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
6535 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
6537 if (!can_copy_bbs_p (region
, n_region
))
6540 initialize_original_copy_tables ();
6541 set_loop_copy (orig_loop
, loop
);
6544 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
6546 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
6548 cloop
= duplicate_loop (aloop
, target
);
6549 duplicate_subloops (aloop
, cloop
);
6555 region_copy
= XNEWVEC (basic_block
, n_region
);
6556 free_region_copy
= true;
6559 gcc_assert (!need_ssa_update_p (cfun
));
6561 /* Record blocks outside the region that are dominated by something
6563 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6565 total_count
= exit
->src
->count
;
6566 exit_count
= exit
->count ();
6567 /* Fix up corner cases, to avoid division by zero or creation of negative
6569 if (exit_count
> total_count
)
6570 exit_count
= total_count
;
6572 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6573 split_edge_bb_loc (exit
), true);
6574 if (total_count
.initialized_p () && exit_count
.initialized_p ())
6576 scale_bbs_frequencies_profile_count (region
, n_region
,
6577 total_count
- exit_count
,
6579 scale_bbs_frequencies_profile_count (region_copy
, n_region
, exit_count
,
6583 /* Create the switch block, and put the exit condition to it. */
6584 entry_bb
= entry
->dest
;
6585 nentry_bb
= get_bb_copy (entry_bb
);
6586 if (!last_stmt (entry
->src
)
6587 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6588 switch_bb
= entry
->src
;
6590 switch_bb
= split_edge (entry
);
6591 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6593 gsi
= gsi_last_bb (switch_bb
);
6594 cond_stmt
= last_stmt (exit
->src
);
6595 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6596 cond_stmt
= gimple_copy (cond_stmt
);
6598 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6600 sorig
= single_succ_edge (switch_bb
);
6601 sorig
->flags
= exits
[1]->flags
;
6602 sorig
->probability
= exits
[1]->probability
;
6603 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6604 snew
->probability
= exits
[0]->probability
;
6607 /* Register the new edge from SWITCH_BB in loop exit lists. */
6608 rescan_loop_exit (snew
, true, false);
6610 /* Add the PHI node arguments. */
6611 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6613 /* Get rid of now superfluous conditions and associated edges (and phi node
6615 exit_bb
= exit
->dest
;
6617 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6618 PENDING_STMT (e
) = NULL
;
6620 /* The latch of ORIG_LOOP was copied, and so was the backedge
6621 to the original header. We redirect this backedge to EXIT_BB. */
6622 for (i
= 0; i
< n_region
; i
++)
6623 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6625 gcc_assert (single_succ_edge (region_copy
[i
]));
6626 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6627 PENDING_STMT (e
) = NULL
;
6628 for (psi
= gsi_start_phis (exit_bb
);
6633 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6634 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6637 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6638 PENDING_STMT (e
) = NULL
;
6640 /* Anything that is outside of the region, but was dominated by something
6641 inside needs to update dominance info. */
6642 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6644 /* Update the SSA web. */
6645 update_ssa (TODO_update_ssa
);
6647 if (free_region_copy
)
6650 free_original_copy_tables ();
6654 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6655 adding blocks when the dominator traversal reaches EXIT. This
6656 function silently assumes that ENTRY strictly dominates EXIT. */
6659 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6660 vec
<basic_block
> *bbs_p
)
6664 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6666 son
= next_dom_son (CDI_DOMINATORS
, son
))
6668 bbs_p
->safe_push (son
);
6670 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6674 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6675 The duplicates are recorded in VARS_MAP. */
6678 replace_by_duplicate_decl (tree
*tp
, hash_map
<tree
, tree
> *vars_map
,
6681 tree t
= *tp
, new_t
;
6682 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6684 if (DECL_CONTEXT (t
) == to_context
)
6688 tree
&loc
= vars_map
->get_or_insert (t
, &existed
);
6694 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6695 add_local_decl (f
, new_t
);
6699 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6700 new_t
= copy_node (t
);
6702 DECL_CONTEXT (new_t
) = to_context
;
6713 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6714 VARS_MAP maps old ssa names and var_decls to the new ones. */
6717 replace_ssa_name (tree name
, hash_map
<tree
, tree
> *vars_map
,
6722 gcc_assert (!virtual_operand_p (name
));
6724 tree
*loc
= vars_map
->get (name
);
6728 tree decl
= SSA_NAME_VAR (name
);
6731 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name
));
6732 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6733 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6734 decl
, SSA_NAME_DEF_STMT (name
));
6737 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6738 name
, SSA_NAME_DEF_STMT (name
));
6740 /* Now that we've used the def stmt to define new_name, make sure it
6741 doesn't define name anymore. */
6742 SSA_NAME_DEF_STMT (name
) = NULL
;
6744 vars_map
->put (name
, new_name
);
6758 hash_map
<tree
, tree
> *vars_map
;
6759 htab_t new_label_map
;
6760 hash_map
<void *, void *> *eh_map
;
6764 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6765 contained in *TP if it has been ORIG_BLOCK previously and change the
6766 DECL_CONTEXT of every local variable referenced in *TP. */
6769 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6771 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6772 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6777 tree block
= TREE_BLOCK (t
);
6778 if (block
== NULL_TREE
)
6780 else if (block
== p
->orig_block
6781 || p
->orig_block
== NULL_TREE
)
6782 TREE_SET_BLOCK (t
, p
->new_block
);
6783 else if (flag_checking
)
6785 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6786 block
= BLOCK_SUPERCONTEXT (block
);
6787 gcc_assert (block
== p
->orig_block
);
6790 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6792 if (TREE_CODE (t
) == SSA_NAME
)
6793 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6794 else if (TREE_CODE (t
) == PARM_DECL
6795 && gimple_in_ssa_p (cfun
))
6796 *tp
= *(p
->vars_map
->get (t
));
6797 else if (TREE_CODE (t
) == LABEL_DECL
)
6799 if (p
->new_label_map
)
6801 struct tree_map in
, *out
;
6803 out
= (struct tree_map
*)
6804 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6809 /* For FORCED_LABELs we can end up with references from other
6810 functions if some SESE regions are outlined. It is UB to
6811 jump in between them, but they could be used just for printing
6812 addresses etc. In that case, DECL_CONTEXT on the label should
6813 be the function containing the glabel stmt with that LABEL_DECL,
6814 rather than whatever function a reference to the label was seen
6816 if (!FORCED_LABEL (t
) && !DECL_NONLOCAL (t
))
6817 DECL_CONTEXT (t
) = p
->to_context
;
6819 else if (p
->remap_decls_p
)
6821 /* Replace T with its duplicate. T should no longer appear in the
6822 parent function, so this looks wasteful; however, it may appear
6823 in referenced_vars, and more importantly, as virtual operands of
6824 statements, and in alias lists of other variables. It would be
6825 quite difficult to expunge it from all those places. ??? It might
6826 suffice to do this for addressable variables. */
6827 if ((VAR_P (t
) && !is_global_var (t
))
6828 || TREE_CODE (t
) == CONST_DECL
)
6829 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6833 else if (TYPE_P (t
))
6839 /* Helper for move_stmt_r. Given an EH region number for the source
6840 function, map that to the duplicate EH regio number in the dest. */
6843 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6845 eh_region old_r
, new_r
;
6847 old_r
= get_eh_region_from_number (old_nr
);
6848 new_r
= static_cast<eh_region
> (*p
->eh_map
->get (old_r
));
6850 return new_r
->index
;
6853 /* Similar, but operate on INTEGER_CSTs. */
6856 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6860 old_nr
= tree_to_shwi (old_t_nr
);
6861 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6863 return build_int_cst (integer_type_node
, new_nr
);
6866 /* Like move_stmt_op, but for gimple statements.
6868 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6869 contained in the current statement in *GSI_P and change the
6870 DECL_CONTEXT of every local variable referenced in the current
6874 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6875 struct walk_stmt_info
*wi
)
6877 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6878 gimple
*stmt
= gsi_stmt (*gsi_p
);
6879 tree block
= gimple_block (stmt
);
6881 if (block
== p
->orig_block
6882 || (p
->orig_block
== NULL_TREE
6883 && block
!= NULL_TREE
))
6884 gimple_set_block (stmt
, p
->new_block
);
6886 switch (gimple_code (stmt
))
6889 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6891 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6892 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
6893 switch (DECL_FUNCTION_CODE (fndecl
))
6895 case BUILT_IN_EH_COPY_VALUES
:
6896 r
= gimple_call_arg (stmt
, 1);
6897 r
= move_stmt_eh_region_tree_nr (r
, p
);
6898 gimple_call_set_arg (stmt
, 1, r
);
6901 case BUILT_IN_EH_POINTER
:
6902 case BUILT_IN_EH_FILTER
:
6903 r
= gimple_call_arg (stmt
, 0);
6904 r
= move_stmt_eh_region_tree_nr (r
, p
);
6905 gimple_call_set_arg (stmt
, 0, r
);
6916 gresx
*resx_stmt
= as_a
<gresx
*> (stmt
);
6917 int r
= gimple_resx_region (resx_stmt
);
6918 r
= move_stmt_eh_region_nr (r
, p
);
6919 gimple_resx_set_region (resx_stmt
, r
);
6923 case GIMPLE_EH_DISPATCH
:
6925 geh_dispatch
*eh_dispatch_stmt
= as_a
<geh_dispatch
*> (stmt
);
6926 int r
= gimple_eh_dispatch_region (eh_dispatch_stmt
);
6927 r
= move_stmt_eh_region_nr (r
, p
);
6928 gimple_eh_dispatch_set_region (eh_dispatch_stmt
, r
);
6932 case GIMPLE_OMP_RETURN
:
6933 case GIMPLE_OMP_CONTINUE
:
6938 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6939 so that such labels can be referenced from other regions.
6940 Make sure to update it when seeing a GIMPLE_LABEL though,
6941 that is the owner of the label. */
6942 walk_gimple_op (stmt
, move_stmt_op
, wi
);
6943 *handled_ops_p
= true;
6944 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
6945 if (FORCED_LABEL (label
) || DECL_NONLOCAL (label
))
6946 DECL_CONTEXT (label
) = p
->to_context
;
6951 if (is_gimple_omp (stmt
))
6953 /* Do not remap variables inside OMP directives. Variables
6954 referenced in clauses and directive header belong to the
6955 parent function and should not be moved into the child
6957 bool save_remap_decls_p
= p
->remap_decls_p
;
6958 p
->remap_decls_p
= false;
6959 *handled_ops_p
= true;
6961 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
6964 p
->remap_decls_p
= save_remap_decls_p
;
6972 /* Move basic block BB from function CFUN to function DEST_FN. The
6973 block is moved out of the original linked list and placed after
6974 block AFTER in the new list. Also, the block is removed from the
6975 original array of blocks and placed in DEST_FN's array of blocks.
6976 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6977 updated to reflect the moved edges.
6979 The local variables are remapped to new instances, VARS_MAP is used
6980 to record the mapping. */
6983 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
6984 basic_block after
, bool update_edge_count_p
,
6985 struct move_stmt_d
*d
)
6987 struct control_flow_graph
*cfg
;
6990 gimple_stmt_iterator si
;
6991 unsigned old_len
, new_len
;
6993 /* Remove BB from dominance structures. */
6994 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
6996 /* Move BB from its current loop to the copy in the new function. */
6999 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
7001 bb
->loop_father
= new_loop
;
7004 /* Link BB to the new linked list. */
7005 move_block_after (bb
, after
);
7007 /* Update the edge count in the corresponding flowgraphs. */
7008 if (update_edge_count_p
)
7009 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7011 cfun
->cfg
->x_n_edges
--;
7012 dest_cfun
->cfg
->x_n_edges
++;
7015 /* Remove BB from the original basic block array. */
7016 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
7017 cfun
->cfg
->x_n_basic_blocks
--;
7019 /* Grow DEST_CFUN's basic block array if needed. */
7020 cfg
= dest_cfun
->cfg
;
7021 cfg
->x_n_basic_blocks
++;
7022 if (bb
->index
>= cfg
->x_last_basic_block
)
7023 cfg
->x_last_basic_block
= bb
->index
+ 1;
7025 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
7026 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
7028 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
7029 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
7032 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
7034 /* Remap the variables in phi nodes. */
7035 for (gphi_iterator psi
= gsi_start_phis (bb
);
7038 gphi
*phi
= psi
.phi ();
7040 tree op
= PHI_RESULT (phi
);
7044 if (virtual_operand_p (op
))
7046 /* Remove the phi nodes for virtual operands (alias analysis will be
7047 run for the new function, anyway). */
7048 remove_phi_node (&psi
, true);
7052 SET_PHI_RESULT (phi
,
7053 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
7054 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
7056 op
= USE_FROM_PTR (use
);
7057 if (TREE_CODE (op
) == SSA_NAME
)
7058 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
7061 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
7063 location_t locus
= gimple_phi_arg_location (phi
, i
);
7064 tree block
= LOCATION_BLOCK (locus
);
7066 if (locus
== UNKNOWN_LOCATION
)
7068 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
7070 locus
= set_block (locus
, d
->new_block
);
7071 gimple_phi_arg_set_location (phi
, i
, locus
);
7078 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
7080 gimple
*stmt
= gsi_stmt (si
);
7081 struct walk_stmt_info wi
;
7083 memset (&wi
, 0, sizeof (wi
));
7085 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
7087 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
7089 tree label
= gimple_label_label (label_stmt
);
7090 int uid
= LABEL_DECL_UID (label
);
7092 gcc_assert (uid
> -1);
7094 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
7095 if (old_len
<= (unsigned) uid
)
7097 new_len
= 3 * uid
/ 2 + 1;
7098 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
7101 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
7102 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
7104 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
7106 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
7107 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
7110 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
7111 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
7113 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
7114 gimple_remove_stmt_histograms (cfun
, stmt
);
7116 /* We cannot leave any operands allocated from the operand caches of
7117 the current function. */
7118 free_stmt_operands (cfun
, stmt
);
7119 push_cfun (dest_cfun
);
7124 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7125 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
7127 tree block
= LOCATION_BLOCK (e
->goto_locus
);
7128 if (d
->orig_block
== NULL_TREE
7129 || block
== d
->orig_block
)
7130 e
->goto_locus
= set_block (e
->goto_locus
, d
->new_block
);
7134 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7135 the outermost EH region. Use REGION as the incoming base EH region. */
7138 find_outermost_region_in_block (struct function
*src_cfun
,
7139 basic_block bb
, eh_region region
)
7141 gimple_stmt_iterator si
;
7143 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
7145 gimple
*stmt
= gsi_stmt (si
);
7146 eh_region stmt_region
;
7149 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
7150 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
7154 region
= stmt_region
;
7155 else if (stmt_region
!= region
)
7157 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
7158 gcc_assert (region
!= NULL
);
7167 new_label_mapper (tree decl
, void *data
)
7169 htab_t hash
= (htab_t
) data
;
7173 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
7175 m
= XNEW (struct tree_map
);
7176 m
->hash
= DECL_UID (decl
);
7177 m
->base
.from
= decl
;
7178 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
7179 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
7180 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
7181 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
7183 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
7184 gcc_assert (*slot
== NULL
);
7191 /* Tree walker to replace the decls used inside value expressions by
7195 replace_block_vars_by_duplicates_1 (tree
*tp
, int *walk_subtrees
, void *data
)
7197 struct replace_decls_d
*rd
= (struct replace_decls_d
*)data
;
7199 switch (TREE_CODE (*tp
))
7204 replace_by_duplicate_decl (tp
, rd
->vars_map
, rd
->to_context
);
7210 if (IS_TYPE_OR_DECL_P (*tp
))
7211 *walk_subtrees
= false;
7216 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7220 replace_block_vars_by_duplicates (tree block
, hash_map
<tree
, tree
> *vars_map
,
7225 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
7228 if (!VAR_P (t
) && TREE_CODE (t
) != CONST_DECL
)
7230 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
7233 if (VAR_P (*tp
) && DECL_HAS_VALUE_EXPR_P (*tp
))
7235 tree x
= DECL_VALUE_EXPR (*tp
);
7236 struct replace_decls_d rd
= { vars_map
, to_context
};
7238 walk_tree (&x
, replace_block_vars_by_duplicates_1
, &rd
, NULL
);
7239 SET_DECL_VALUE_EXPR (t
, x
);
7240 DECL_HAS_VALUE_EXPR_P (t
) = 1;
7242 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
7247 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
7248 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
7251 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7255 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
7258 /* Discard it from the old loop array. */
7259 (*get_loops (fn1
))[loop
->num
] = NULL
;
7261 /* Place it in the new loop array, assigning it a new number. */
7262 loop
->num
= number_of_loops (fn2
);
7263 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
7265 /* Recurse to children. */
7266 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
7267 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
7270 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7271 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7274 verify_sese (basic_block entry
, basic_block exit
, vec
<basic_block
> *bbs_p
)
7279 bitmap bbs
= BITMAP_ALLOC (NULL
);
7282 gcc_assert (entry
!= NULL
);
7283 gcc_assert (entry
!= exit
);
7284 gcc_assert (bbs_p
!= NULL
);
7286 gcc_assert (bbs_p
->length () > 0);
7288 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7289 bitmap_set_bit (bbs
, bb
->index
);
7291 gcc_assert (bitmap_bit_p (bbs
, entry
->index
));
7292 gcc_assert (exit
== NULL
|| bitmap_bit_p (bbs
, exit
->index
));
7294 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7298 gcc_assert (single_pred_p (entry
));
7299 gcc_assert (!bitmap_bit_p (bbs
, single_pred (entry
)->index
));
7302 for (ei
= ei_start (bb
->preds
); !ei_end_p (ei
); ei_next (&ei
))
7305 gcc_assert (bitmap_bit_p (bbs
, e
->src
->index
));
7310 gcc_assert (single_succ_p (exit
));
7311 gcc_assert (!bitmap_bit_p (bbs
, single_succ (exit
)->index
));
7314 for (ei
= ei_start (bb
->succs
); !ei_end_p (ei
); ei_next (&ei
))
7317 gcc_assert (bitmap_bit_p (bbs
, e
->dest
->index
));
7324 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7327 gather_ssa_name_hash_map_from (tree
const &from
, tree
const &, void *data
)
7329 bitmap release_names
= (bitmap
)data
;
7331 if (TREE_CODE (from
) != SSA_NAME
)
7334 bitmap_set_bit (release_names
, SSA_NAME_VERSION (from
));
7338 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7339 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7340 single basic block in the original CFG and the new basic block is
7341 returned. DEST_CFUN must not have a CFG yet.
7343 Note that the region need not be a pure SESE region. Blocks inside
7344 the region may contain calls to abort/exit. The only restriction
7345 is that ENTRY_BB should be the only entry point and it must
7348 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7349 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7350 to the new function.
7352 All local variables referenced in the region are assumed to be in
7353 the corresponding BLOCK_VARS and unexpanded variable lists
7354 associated with DEST_CFUN.
7356 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7357 reimplement move_sese_region_to_fn by duplicating the region rather than
7361 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
7362 basic_block exit_bb
, tree orig_block
)
7364 vec
<basic_block
> bbs
, dom_bbs
;
7365 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
7366 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
7367 struct function
*saved_cfun
= cfun
;
7368 int *entry_flag
, *exit_flag
;
7369 profile_probability
*entry_prob
, *exit_prob
;
7370 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
7373 htab_t new_label_map
;
7374 hash_map
<void *, void *> *eh_map
;
7375 struct loop
*loop
= entry_bb
->loop_father
;
7376 struct loop
*loop0
= get_loop (saved_cfun
, 0);
7377 struct move_stmt_d d
;
7379 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7381 gcc_assert (entry_bb
!= exit_bb
7383 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
7385 /* Collect all the blocks in the region. Manually add ENTRY_BB
7386 because it won't be added by dfs_enumerate_from. */
7388 bbs
.safe_push (entry_bb
);
7389 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
7392 verify_sese (entry_bb
, exit_bb
, &bbs
);
7394 /* The blocks that used to be dominated by something in BBS will now be
7395 dominated by the new block. */
7396 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
7400 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7401 the predecessor edges to ENTRY_BB and the successor edges to
7402 EXIT_BB so that we can re-attach them to the new basic block that
7403 will replace the region. */
7404 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
7405 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
7406 entry_flag
= XNEWVEC (int, num_entry_edges
);
7407 entry_prob
= XNEWVEC (profile_probability
, num_entry_edges
);
7409 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
7411 entry_prob
[i
] = e
->probability
;
7412 entry_flag
[i
] = e
->flags
;
7413 entry_pred
[i
++] = e
->src
;
7419 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
7420 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
7421 exit_flag
= XNEWVEC (int, num_exit_edges
);
7422 exit_prob
= XNEWVEC (profile_probability
, num_exit_edges
);
7424 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
7426 exit_prob
[i
] = e
->probability
;
7427 exit_flag
[i
] = e
->flags
;
7428 exit_succ
[i
++] = e
->dest
;
7440 /* Switch context to the child function to initialize DEST_FN's CFG. */
7441 gcc_assert (dest_cfun
->cfg
== NULL
);
7442 push_cfun (dest_cfun
);
7444 init_empty_tree_cfg ();
7446 /* Initialize EH information for the new function. */
7448 new_label_map
= NULL
;
7451 eh_region region
= NULL
;
7453 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7454 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
7456 init_eh_for_function ();
7459 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
7460 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
7461 new_label_mapper
, new_label_map
);
7465 /* Initialize an empty loop tree. */
7466 struct loops
*loops
= ggc_cleared_alloc
<struct loops
> ();
7467 init_loops_structure (dest_cfun
, loops
, 1);
7468 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
7469 set_loops_for_fn (dest_cfun
, loops
);
7471 vec
<loop_p
, va_gc
> *larray
= get_loops (saved_cfun
)->copy ();
7473 /* Move the outlined loop tree part. */
7474 num_nodes
= bbs
.length ();
7475 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7477 if (bb
->loop_father
->header
== bb
)
7479 struct loop
*this_loop
= bb
->loop_father
;
7480 struct loop
*outer
= loop_outer (this_loop
);
7482 /* If the SESE region contains some bbs ending with
7483 a noreturn call, those are considered to belong
7484 to the outermost loop in saved_cfun, rather than
7485 the entry_bb's loop_father. */
7489 num_nodes
-= this_loop
->num_nodes
;
7490 flow_loop_tree_node_remove (bb
->loop_father
);
7491 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
7492 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
7495 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
7498 /* Remove loop exits from the outlined region. */
7499 if (loops_for_fn (saved_cfun
)->exits
)
7500 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7502 struct loops
*l
= loops_for_fn (saved_cfun
);
7504 = l
->exits
->find_slot_with_hash (e
, htab_hash_pointer (e
),
7507 l
->exits
->clear_slot (slot
);
7512 /* Adjust the number of blocks in the tree root of the outlined part. */
7513 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
7515 /* Setup a mapping to be used by move_block_to_fn. */
7516 loop
->aux
= current_loops
->tree_root
;
7517 loop0
->aux
= current_loops
->tree_root
;
7519 /* Fix up orig_loop_num. If the block referenced in it has been moved
7520 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7522 FOR_EACH_LOOP_FN (dest_cfun
, dloop
, 0)
7523 if (dloop
->orig_loop_num
)
7525 if ((*larray
)[dloop
->orig_loop_num
] != NULL
7526 && get_loop (saved_cfun
, dloop
->orig_loop_num
) == NULL
)
7527 dloop
->orig_loop_num
= (*larray
)[dloop
->orig_loop_num
]->num
;
7529 dloop
->orig_loop_num
= 0;
7535 /* Move blocks from BBS into DEST_CFUN. */
7536 gcc_assert (bbs
.length () >= 2);
7537 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
7538 hash_map
<tree
, tree
> vars_map
;
7540 memset (&d
, 0, sizeof (d
));
7541 d
.orig_block
= orig_block
;
7542 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
7543 d
.from_context
= cfun
->decl
;
7544 d
.to_context
= dest_cfun
->decl
;
7545 d
.vars_map
= &vars_map
;
7546 d
.new_label_map
= new_label_map
;
7548 d
.remap_decls_p
= true;
7550 if (gimple_in_ssa_p (cfun
))
7551 for (tree arg
= DECL_ARGUMENTS (d
.to_context
); arg
; arg
= DECL_CHAIN (arg
))
7553 tree narg
= make_ssa_name_fn (dest_cfun
, arg
, gimple_build_nop ());
7554 set_ssa_default_def (dest_cfun
, arg
, narg
);
7555 vars_map
.put (arg
, narg
);
7558 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7560 /* No need to update edge counts on the last block. It has
7561 already been updated earlier when we detached the region from
7562 the original CFG. */
7563 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
7569 /* Loop sizes are no longer correct, fix them up. */
7570 loop
->num_nodes
-= num_nodes
;
7571 for (struct loop
*outer
= loop_outer (loop
);
7572 outer
; outer
= loop_outer (outer
))
7573 outer
->num_nodes
-= num_nodes
;
7574 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
7576 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vectorize_loops
)
7579 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
7584 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
7586 dest_cfun
->has_simduid_loops
= true;
7588 if (aloop
->force_vectorize
)
7589 dest_cfun
->has_force_vectorize_loops
= true;
7593 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7597 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7599 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7600 = BLOCK_SUBBLOCKS (orig_block
);
7601 for (block
= BLOCK_SUBBLOCKS (orig_block
);
7602 block
; block
= BLOCK_CHAIN (block
))
7603 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
7604 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
7607 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
7608 &vars_map
, dest_cfun
->decl
);
7611 htab_delete (new_label_map
);
7615 if (gimple_in_ssa_p (cfun
))
7617 /* We need to release ssa-names in a defined order, so first find them,
7618 and then iterate in ascending version order. */
7619 bitmap release_names
= BITMAP_ALLOC (NULL
);
7620 vars_map
.traverse
<void *, gather_ssa_name_hash_map_from
> (release_names
);
7623 EXECUTE_IF_SET_IN_BITMAP (release_names
, 0, i
, bi
)
7624 release_ssa_name (ssa_name (i
));
7625 BITMAP_FREE (release_names
);
7628 /* Rewire the entry and exit blocks. The successor to the entry
7629 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7630 the child function. Similarly, the predecessor of DEST_FN's
7631 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7632 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7633 various CFG manipulation function get to the right CFG.
7635 FIXME, this is silly. The CFG ought to become a parameter to
7637 push_cfun (dest_cfun
);
7638 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= entry_bb
->count
;
7639 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
7642 make_single_succ_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
7643 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
= exit_bb
->count
;
7646 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
= profile_count::zero ();
7649 /* Back in the original function, the SESE region has disappeared,
7650 create a new basic block in its place. */
7651 bb
= create_empty_bb (entry_pred
[0]);
7653 add_bb_to_loop (bb
, loop
);
7654 for (i
= 0; i
< num_entry_edges
; i
++)
7656 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
7657 e
->probability
= entry_prob
[i
];
7660 for (i
= 0; i
< num_exit_edges
; i
++)
7662 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
7663 e
->probability
= exit_prob
[i
];
7666 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
7667 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
7668 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
7685 /* Dump default def DEF to file FILE using FLAGS and indentation
7689 dump_default_def (FILE *file
, tree def
, int spc
, dump_flags_t flags
)
7691 for (int i
= 0; i
< spc
; ++i
)
7692 fprintf (file
, " ");
7693 dump_ssaname_info_to_file (file
, def
, spc
);
7695 print_generic_expr (file
, TREE_TYPE (def
), flags
);
7696 fprintf (file
, " ");
7697 print_generic_expr (file
, def
, flags
);
7698 fprintf (file
, " = ");
7699 print_generic_expr (file
, SSA_NAME_VAR (def
), flags
);
7700 fprintf (file
, ";\n");
7703 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7706 print_no_sanitize_attr_value (FILE *file
, tree value
)
7708 unsigned int flags
= tree_to_uhwi (value
);
7710 for (int i
= 0; sanitizer_opts
[i
].name
!= NULL
; ++i
)
7712 if ((sanitizer_opts
[i
].flag
& flags
) == sanitizer_opts
[i
].flag
)
7715 fprintf (file
, " | ");
7716 fprintf (file
, "%s", sanitizer_opts
[i
].name
);
7722 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7726 dump_function_to_file (tree fndecl
, FILE *file
, dump_flags_t flags
)
7728 tree arg
, var
, old_current_fndecl
= current_function_decl
;
7729 struct function
*dsf
;
7730 bool ignore_topmost_bind
= false, any_var
= false;
7733 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
7734 && decl_is_tm_clone (fndecl
));
7735 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
7737 if (DECL_ATTRIBUTES (fndecl
) != NULL_TREE
)
7739 fprintf (file
, "__attribute__((");
7743 for (chain
= DECL_ATTRIBUTES (fndecl
); chain
;
7744 first
= false, chain
= TREE_CHAIN (chain
))
7747 fprintf (file
, ", ");
7749 tree name
= get_attribute_name (chain
);
7750 print_generic_expr (file
, name
, dump_flags
);
7751 if (TREE_VALUE (chain
) != NULL_TREE
)
7753 fprintf (file
, " (");
7755 if (strstr (IDENTIFIER_POINTER (name
), "no_sanitize"))
7756 print_no_sanitize_attr_value (file
, TREE_VALUE (chain
));
7758 print_generic_expr (file
, TREE_VALUE (chain
), dump_flags
);
7759 fprintf (file
, ")");
7763 fprintf (file
, "))\n");
7766 current_function_decl
= fndecl
;
7767 if (flags
& TDF_GIMPLE
)
7769 print_generic_expr (file
, TREE_TYPE (TREE_TYPE (fndecl
)),
7770 dump_flags
| TDF_SLIM
);
7771 fprintf (file
, " __GIMPLE ()\n%s (", function_name (fun
));
7774 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
7776 arg
= DECL_ARGUMENTS (fndecl
);
7779 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
7780 fprintf (file
, " ");
7781 print_generic_expr (file
, arg
, dump_flags
);
7782 if (DECL_CHAIN (arg
))
7783 fprintf (file
, ", ");
7784 arg
= DECL_CHAIN (arg
);
7786 fprintf (file
, ")\n");
7788 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
7789 if (dsf
&& (flags
& TDF_EH
))
7790 dump_eh_tree (file
, dsf
);
7792 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
7794 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
7795 current_function_decl
= old_current_fndecl
;
7799 /* When GIMPLE is lowered, the variables are no longer available in
7800 BIND_EXPRs, so display them separately. */
7801 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
7804 ignore_topmost_bind
= true;
7806 fprintf (file
, "{\n");
7807 if (gimple_in_ssa_p (fun
)
7808 && (flags
& TDF_ALIAS
))
7810 for (arg
= DECL_ARGUMENTS (fndecl
); arg
!= NULL
;
7811 arg
= DECL_CHAIN (arg
))
7813 tree def
= ssa_default_def (fun
, arg
);
7815 dump_default_def (file
, def
, 2, flags
);
7818 tree res
= DECL_RESULT (fun
->decl
);
7819 if (res
!= NULL_TREE
7820 && DECL_BY_REFERENCE (res
))
7822 tree def
= ssa_default_def (fun
, res
);
7824 dump_default_def (file
, def
, 2, flags
);
7827 tree static_chain
= fun
->static_chain_decl
;
7828 if (static_chain
!= NULL_TREE
)
7830 tree def
= ssa_default_def (fun
, static_chain
);
7832 dump_default_def (file
, def
, 2, flags
);
7836 if (!vec_safe_is_empty (fun
->local_decls
))
7837 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7839 print_generic_decl (file
, var
, flags
);
7840 fprintf (file
, "\n");
7847 if (gimple_in_ssa_p (cfun
))
7848 FOR_EACH_SSA_NAME (ix
, name
, cfun
)
7850 if (!SSA_NAME_VAR (name
))
7852 fprintf (file
, " ");
7853 print_generic_expr (file
, TREE_TYPE (name
), flags
);
7854 fprintf (file
, " ");
7855 print_generic_expr (file
, name
, flags
);
7856 fprintf (file
, ";\n");
7863 if (fun
&& fun
->decl
== fndecl
7865 && basic_block_info_for_fn (fun
))
7867 /* If the CFG has been built, emit a CFG-based dump. */
7868 if (!ignore_topmost_bind
)
7869 fprintf (file
, "{\n");
7871 if (any_var
&& n_basic_blocks_for_fn (fun
))
7872 fprintf (file
, "\n");
7874 FOR_EACH_BB_FN (bb
, fun
)
7875 dump_bb (file
, bb
, 2, flags
);
7877 fprintf (file
, "}\n");
7879 else if (fun
->curr_properties
& PROP_gimple_any
)
7881 /* The function is now in GIMPLE form but the CFG has not been
7882 built yet. Emit the single sequence of GIMPLE statements
7883 that make up its body. */
7884 gimple_seq body
= gimple_body (fndecl
);
7886 if (gimple_seq_first_stmt (body
)
7887 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
7888 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
7889 print_gimple_seq (file
, body
, 0, flags
);
7892 if (!ignore_topmost_bind
)
7893 fprintf (file
, "{\n");
7896 fprintf (file
, "\n");
7898 print_gimple_seq (file
, body
, 2, flags
);
7899 fprintf (file
, "}\n");
7906 /* Make a tree based dump. */
7907 chain
= DECL_SAVED_TREE (fndecl
);
7908 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
7910 if (ignore_topmost_bind
)
7912 chain
= BIND_EXPR_BODY (chain
);
7920 if (!ignore_topmost_bind
)
7922 fprintf (file
, "{\n");
7923 /* No topmost bind, pretend it's ignored for later. */
7924 ignore_topmost_bind
= true;
7930 fprintf (file
, "\n");
7932 print_generic_stmt_indented (file
, chain
, flags
, indent
);
7933 if (ignore_topmost_bind
)
7934 fprintf (file
, "}\n");
7937 if (flags
& TDF_ENUMERATE_LOCALS
)
7938 dump_enumerated_decls (file
, flags
);
7939 fprintf (file
, "\n\n");
7941 current_function_decl
= old_current_fndecl
;
7944 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7947 debug_function (tree fn
, dump_flags_t flags
)
7949 dump_function_to_file (fn
, stderr
, flags
);
7953 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7956 print_pred_bbs (FILE *file
, basic_block bb
)
7961 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7962 fprintf (file
, "bb_%d ", e
->src
->index
);
7966 /* Print on FILE the indexes for the successors of basic_block BB. */
7969 print_succ_bbs (FILE *file
, basic_block bb
)
7974 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7975 fprintf (file
, "bb_%d ", e
->dest
->index
);
7978 /* Print to FILE the basic block BB following the VERBOSITY level. */
7981 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
7983 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
7984 memset ((void *) s_indent
, ' ', (size_t) indent
);
7985 s_indent
[indent
] = '\0';
7987 /* Print basic_block's header. */
7990 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
7991 print_pred_bbs (file
, bb
);
7992 fprintf (file
, "}, succs = {");
7993 print_succ_bbs (file
, bb
);
7994 fprintf (file
, "})\n");
7997 /* Print basic_block's body. */
8000 fprintf (file
, "%s {\n", s_indent
);
8001 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
8002 fprintf (file
, "%s }\n", s_indent
);
8006 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
8008 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8009 VERBOSITY level this outputs the contents of the loop, or just its
8013 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
8021 s_indent
= (char *) alloca ((size_t) indent
+ 1);
8022 memset ((void *) s_indent
, ' ', (size_t) indent
);
8023 s_indent
[indent
] = '\0';
8025 /* Print loop's header. */
8026 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
8028 fprintf (file
, "header = %d", loop
->header
->index
);
8031 fprintf (file
, "deleted)\n");
8035 fprintf (file
, ", latch = %d", loop
->latch
->index
);
8037 fprintf (file
, ", multiple latches");
8038 fprintf (file
, ", niter = ");
8039 print_generic_expr (file
, loop
->nb_iterations
);
8041 if (loop
->any_upper_bound
)
8043 fprintf (file
, ", upper_bound = ");
8044 print_decu (loop
->nb_iterations_upper_bound
, file
);
8046 if (loop
->any_likely_upper_bound
)
8048 fprintf (file
, ", likely_upper_bound = ");
8049 print_decu (loop
->nb_iterations_likely_upper_bound
, file
);
8052 if (loop
->any_estimate
)
8054 fprintf (file
, ", estimate = ");
8055 print_decu (loop
->nb_iterations_estimate
, file
);
8058 fprintf (file
, ", unroll = %d", loop
->unroll
);
8059 fprintf (file
, ")\n");
8061 /* Print loop's body. */
8064 fprintf (file
, "%s{\n", s_indent
);
8065 FOR_EACH_BB_FN (bb
, cfun
)
8066 if (bb
->loop_father
== loop
)
8067 print_loops_bb (file
, bb
, indent
, verbosity
);
8069 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
8070 fprintf (file
, "%s}\n", s_indent
);
8074 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8075 spaces. Following VERBOSITY level this outputs the contents of the
8076 loop, or just its structure. */
8079 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
8085 print_loop (file
, loop
, indent
, verbosity
);
8086 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
8089 /* Follow a CFG edge from the entry point of the program, and on entry
8090 of a loop, pretty print the loop structure on FILE. */
8093 print_loops (FILE *file
, int verbosity
)
8097 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
8098 fprintf (file
, "\nLoops in function: %s\n", current_function_name ());
8099 if (bb
&& bb
->loop_father
)
8100 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
8106 debug (struct loop
&ref
)
8108 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
8112 debug (struct loop
*ptr
)
8117 fprintf (stderr
, "<nil>\n");
8120 /* Dump a loop verbosely. */
8123 debug_verbose (struct loop
&ref
)
8125 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
8129 debug_verbose (struct loop
*ptr
)
8134 fprintf (stderr
, "<nil>\n");
8138 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8141 debug_loops (int verbosity
)
8143 print_loops (stderr
, verbosity
);
8146 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8149 debug_loop (struct loop
*loop
, int verbosity
)
8151 print_loop (stderr
, loop
, 0, verbosity
);
8154 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8158 debug_loop_num (unsigned num
, int verbosity
)
8160 debug_loop (get_loop (cfun
, num
), verbosity
);
8163 /* Return true if BB ends with a call, possibly followed by some
8164 instructions that must stay with the call. Return false,
8168 gimple_block_ends_with_call_p (basic_block bb
)
8170 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
8171 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
8175 /* Return true if BB ends with a conditional branch. Return false,
8179 gimple_block_ends_with_condjump_p (const_basic_block bb
)
8181 gimple
*stmt
= last_stmt (CONST_CAST_BB (bb
));
8182 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
8186 /* Return true if statement T may terminate execution of BB in ways not
8187 explicitly represtented in the CFG. */
8190 stmt_can_terminate_bb_p (gimple
*t
)
8192 tree fndecl
= NULL_TREE
;
8195 /* Eh exception not handled internally terminates execution of the whole
8197 if (stmt_can_throw_external (t
))
8200 /* NORETURN and LONGJMP calls already have an edge to exit.
8201 CONST and PURE calls do not need one.
8202 We don't currently check for CONST and PURE here, although
8203 it would be a good idea, because those attributes are
8204 figured out from the RTL in mark_constant_function, and
8205 the counter incrementation code from -fprofile-arcs
8206 leads to different results from -fbranch-probabilities. */
8207 if (is_gimple_call (t
))
8209 fndecl
= gimple_call_fndecl (t
);
8210 call_flags
= gimple_call_flags (t
);
8213 if (is_gimple_call (t
)
8215 && DECL_BUILT_IN (fndecl
)
8216 && (call_flags
& ECF_NOTHROW
)
8217 && !(call_flags
& ECF_RETURNS_TWICE
)
8218 /* fork() doesn't really return twice, but the effect of
8219 wrapping it in __gcov_fork() which calls __gcov_flush()
8220 and clears the counters before forking has the same
8221 effect as returning twice. Force a fake edge. */
8222 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
8223 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
8226 if (is_gimple_call (t
))
8232 if (call_flags
& (ECF_PURE
| ECF_CONST
)
8233 && !(call_flags
& ECF_LOOPING_CONST_OR_PURE
))
8236 /* Function call may do longjmp, terminate program or do other things.
8237 Special case noreturn that have non-abnormal edges out as in this case
8238 the fact is sufficiently represented by lack of edges out of T. */
8239 if (!(call_flags
& ECF_NORETURN
))
8243 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8244 if ((e
->flags
& EDGE_FAKE
) == 0)
8248 if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (t
))
8249 if (gimple_asm_volatile_p (asm_stmt
) || gimple_asm_input_p (asm_stmt
))
8256 /* Add fake edges to the function exit for any non constant and non
8257 noreturn calls (or noreturn calls with EH/abnormal edges),
8258 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8259 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8262 The goal is to expose cases in which entering a basic block does
8263 not imply that all subsequent instructions must be executed. */
8266 gimple_flow_call_edges_add (sbitmap blocks
)
8269 int blocks_split
= 0;
8270 int last_bb
= last_basic_block_for_fn (cfun
);
8271 bool check_last_block
= false;
8273 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
8277 check_last_block
= true;
8279 check_last_block
= bitmap_bit_p (blocks
,
8280 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
8282 /* In the last basic block, before epilogue generation, there will be
8283 a fallthru edge to EXIT. Special care is required if the last insn
8284 of the last basic block is a call because make_edge folds duplicate
8285 edges, which would result in the fallthru edge also being marked
8286 fake, which would result in the fallthru edge being removed by
8287 remove_fake_edges, which would result in an invalid CFG.
8289 Moreover, we can't elide the outgoing fake edge, since the block
8290 profiler needs to take this into account in order to solve the minimal
8291 spanning tree in the case that the call doesn't return.
8293 Handle this by adding a dummy instruction in a new last basic block. */
8294 if (check_last_block
)
8296 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
8297 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
8300 if (!gsi_end_p (gsi
))
8303 if (t
&& stmt_can_terminate_bb_p (t
))
8307 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8310 gsi_insert_on_edge (e
, gimple_build_nop ());
8311 gsi_commit_edge_inserts ();
8316 /* Now add fake edges to the function exit for any non constant
8317 calls since there is no way that we can determine if they will
8319 for (i
= 0; i
< last_bb
; i
++)
8321 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8322 gimple_stmt_iterator gsi
;
8323 gimple
*stmt
, *last_stmt
;
8328 if (blocks
&& !bitmap_bit_p (blocks
, i
))
8331 gsi
= gsi_last_nondebug_bb (bb
);
8332 if (!gsi_end_p (gsi
))
8334 last_stmt
= gsi_stmt (gsi
);
8337 stmt
= gsi_stmt (gsi
);
8338 if (stmt_can_terminate_bb_p (stmt
))
8342 /* The handling above of the final block before the
8343 epilogue should be enough to verify that there is
8344 no edge to the exit block in CFG already.
8345 Calling make_edge in such case would cause us to
8346 mark that edge as fake and remove it later. */
8347 if (flag_checking
&& stmt
== last_stmt
)
8349 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8350 gcc_assert (e
== NULL
);
8353 /* Note that the following may create a new basic block
8354 and renumber the existing basic blocks. */
8355 if (stmt
!= last_stmt
)
8357 e
= split_block (bb
, stmt
);
8361 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
8362 e
->probability
= profile_probability::guessed_never ();
8366 while (!gsi_end_p (gsi
));
8371 checking_verify_flow_info ();
8373 return blocks_split
;
8376 /* Removes edge E and all the blocks dominated by it, and updates dominance
8377 information. The IL in E->src needs to be updated separately.
8378 If dominance info is not available, only the edge E is removed.*/
8381 remove_edge_and_dominated_blocks (edge e
)
8383 vec
<basic_block
> bbs_to_remove
= vNULL
;
8384 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
8387 bool none_removed
= false;
8389 basic_block bb
, dbb
;
8392 /* If we are removing a path inside a non-root loop that may change
8393 loop ownership of blocks or remove loops. Mark loops for fixup. */
8395 && loop_outer (e
->src
->loop_father
) != NULL
8396 && e
->src
->loop_father
== e
->dest
->loop_father
)
8397 loops_state_set (LOOPS_NEED_FIXUP
);
8399 if (!dom_info_available_p (CDI_DOMINATORS
))
8405 /* No updating is needed for edges to exit. */
8406 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8408 if (cfgcleanup_altered_bbs
)
8409 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
8414 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8415 that is not dominated by E->dest, then this set is empty. Otherwise,
8416 all the basic blocks dominated by E->dest are removed.
8418 Also, to DF_IDOM we store the immediate dominators of the blocks in
8419 the dominance frontier of E (i.e., of the successors of the
8420 removed blocks, if there are any, and of E->dest otherwise). */
8421 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
8426 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
8428 none_removed
= true;
8433 auto_bitmap df
, df_idom
;
8435 bitmap_set_bit (df_idom
,
8436 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
8439 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
8440 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
8442 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
8444 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
8445 bitmap_set_bit (df
, f
->dest
->index
);
8448 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
8449 bitmap_clear_bit (df
, bb
->index
);
8451 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
8453 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8454 bitmap_set_bit (df_idom
,
8455 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
8459 if (cfgcleanup_altered_bbs
)
8461 /* Record the set of the altered basic blocks. */
8462 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
8463 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
8466 /* Remove E and the cancelled blocks. */
8471 /* Walk backwards so as to get a chance to substitute all
8472 released DEFs into debug stmts. See
8473 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8475 for (i
= bbs_to_remove
.length (); i
-- > 0; )
8476 delete_basic_block (bbs_to_remove
[i
]);
8479 /* Update the dominance information. The immediate dominator may change only
8480 for blocks whose immediate dominator belongs to DF_IDOM:
8482 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8483 removal. Let Z the arbitrary block such that idom(Z) = Y and
8484 Z dominates X after the removal. Before removal, there exists a path P
8485 from Y to X that avoids Z. Let F be the last edge on P that is
8486 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8487 dominates W, and because of P, Z does not dominate W), and W belongs to
8488 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8489 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
8491 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8492 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
8494 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
8495 bbs_to_fix_dom
.safe_push (dbb
);
8498 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
8500 bbs_to_remove
.release ();
8501 bbs_to_fix_dom
.release ();
8504 /* Purge dead EH edges from basic block BB. */
8507 gimple_purge_dead_eh_edges (basic_block bb
)
8509 bool changed
= false;
8512 gimple
*stmt
= last_stmt (bb
);
8514 if (stmt
&& stmt_can_throw_internal (stmt
))
8517 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8519 if (e
->flags
& EDGE_EH
)
8521 remove_edge_and_dominated_blocks (e
);
8531 /* Purge dead EH edges from basic block listed in BLOCKS. */
8534 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
8536 bool changed
= false;
8540 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8542 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8544 /* Earlier gimple_purge_dead_eh_edges could have removed
8545 this basic block already. */
8546 gcc_assert (bb
|| changed
);
8548 changed
|= gimple_purge_dead_eh_edges (bb
);
8554 /* Purge dead abnormal call edges from basic block BB. */
8557 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
8559 bool changed
= false;
8562 gimple
*stmt
= last_stmt (bb
);
8564 if (!cfun
->has_nonlocal_label
8565 && !cfun
->calls_setjmp
)
8568 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
8571 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8573 if (e
->flags
& EDGE_ABNORMAL
)
8575 if (e
->flags
& EDGE_FALLTHRU
)
8576 e
->flags
&= ~EDGE_ABNORMAL
;
8578 remove_edge_and_dominated_blocks (e
);
8588 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8591 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
8593 bool changed
= false;
8597 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8599 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8601 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8602 this basic block already. */
8603 gcc_assert (bb
|| changed
);
8605 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
8611 /* This function is called whenever a new edge is created or
8615 gimple_execute_on_growing_pred (edge e
)
8617 basic_block bb
= e
->dest
;
8619 if (!gimple_seq_empty_p (phi_nodes (bb
)))
8620 reserve_phi_args_for_new_edge (bb
);
8623 /* This function is called immediately before edge E is removed from
8624 the edge vector E->dest->preds. */
8627 gimple_execute_on_shrinking_pred (edge e
)
8629 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
8630 remove_phi_args (e
);
8633 /*---------------------------------------------------------------------------
8634 Helper functions for Loop versioning
8635 ---------------------------------------------------------------------------*/
8637 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8638 of 'first'. Both of them are dominated by 'new_head' basic block. When
8639 'new_head' was created by 'second's incoming edge it received phi arguments
8640 on the edge by split_edge(). Later, additional edge 'e' was created to
8641 connect 'new_head' and 'first'. Now this routine adds phi args on this
8642 additional edge 'e' that new_head to second edge received as part of edge
8646 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
8647 basic_block new_head
, edge e
)
8650 gphi_iterator psi1
, psi2
;
8652 edge e2
= find_edge (new_head
, second
);
8654 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8655 edge, we should always have an edge from NEW_HEAD to SECOND. */
8656 gcc_assert (e2
!= NULL
);
8658 /* Browse all 'second' basic block phi nodes and add phi args to
8659 edge 'e' for 'first' head. PHI args are always in correct order. */
8661 for (psi2
= gsi_start_phis (second
),
8662 psi1
= gsi_start_phis (first
);
8663 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
8664 gsi_next (&psi2
), gsi_next (&psi1
))
8668 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
8669 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
8674 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8675 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8676 the destination of the ELSE part. */
8679 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
8680 basic_block second_head ATTRIBUTE_UNUSED
,
8681 basic_block cond_bb
, void *cond_e
)
8683 gimple_stmt_iterator gsi
;
8684 gimple
*new_cond_expr
;
8685 tree cond_expr
= (tree
) cond_e
;
8688 /* Build new conditional expr */
8689 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
8690 NULL_TREE
, NULL_TREE
);
8692 /* Add new cond in cond_bb. */
8693 gsi
= gsi_last_bb (cond_bb
);
8694 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
8696 /* Adjust edges appropriately to connect new head with first head
8697 as well as second head. */
8698 e0
= single_succ_edge (cond_bb
);
8699 e0
->flags
&= ~EDGE_FALLTHRU
;
8700 e0
->flags
|= EDGE_FALSE_VALUE
;
8704 /* Do book-keeping of basic block BB for the profile consistency checker.
8705 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8706 then do post-pass accounting. Store the counting in RECORD. */
8708 gimple_account_profile_record (basic_block bb
, int after_pass
,
8709 struct profile_record
*record
)
8711 gimple_stmt_iterator i
;
8712 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
8714 record
->size
[after_pass
]
8715 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
8716 if (bb
->count
.initialized_p ())
8717 record
->time
[after_pass
]
8718 += estimate_num_insns (gsi_stmt (i
),
8719 &eni_time_weights
) * bb
->count
.to_gcov_type ();
8720 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
8721 record
->time
[after_pass
]
8722 += estimate_num_insns (gsi_stmt (i
),
8723 &eni_time_weights
) * bb
->count
.to_frequency (cfun
);
8727 struct cfg_hooks gimple_cfg_hooks
= {
8729 gimple_verify_flow_info
,
8730 gimple_dump_bb
, /* dump_bb */
8731 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
8732 create_bb
, /* create_basic_block */
8733 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
8734 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
8735 gimple_can_remove_branch_p
, /* can_remove_branch_p */
8736 remove_bb
, /* delete_basic_block */
8737 gimple_split_block
, /* split_block */
8738 gimple_move_block_after
, /* move_block_after */
8739 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
8740 gimple_merge_blocks
, /* merge_blocks */
8741 gimple_predict_edge
, /* predict_edge */
8742 gimple_predicted_by_p
, /* predicted_by_p */
8743 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
8744 gimple_duplicate_bb
, /* duplicate_block */
8745 gimple_split_edge
, /* split_edge */
8746 gimple_make_forwarder_block
, /* make_forward_block */
8747 NULL
, /* tidy_fallthru_edge */
8748 NULL
, /* force_nonfallthru */
8749 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
8750 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
8751 gimple_flow_call_edges_add
, /* flow_call_edges_add */
8752 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
8753 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
8754 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
8755 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
8756 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
8757 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
8758 flush_pending_stmts
, /* flush_pending_stmts */
8759 gimple_empty_block_p
, /* block_empty_p */
8760 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
8761 gimple_account_profile_record
,
8765 /* Split all critical edges. */
8768 split_critical_edges (void)
8774 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8775 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8776 mappings around the calls to split_edge. */
8777 start_recording_case_labels ();
8778 FOR_ALL_BB_FN (bb
, cfun
)
8780 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8782 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
8784 /* PRE inserts statements to edges and expects that
8785 since split_critical_edges was done beforehand, committing edge
8786 insertions will not split more edges. In addition to critical
8787 edges we must split edges that have multiple successors and
8788 end by control flow statements, such as RESX.
8789 Go ahead and split them too. This matches the logic in
8790 gimple_find_edge_insert_loc. */
8791 else if ((!single_pred_p (e
->dest
)
8792 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
8793 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8794 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
8795 && !(e
->flags
& EDGE_ABNORMAL
))
8797 gimple_stmt_iterator gsi
;
8799 gsi
= gsi_last_bb (e
->src
);
8800 if (!gsi_end_p (gsi
)
8801 && stmt_ends_bb_p (gsi_stmt (gsi
))
8802 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
8803 && !gimple_call_builtin_p (gsi_stmt (gsi
),
8809 end_recording_case_labels ();
8815 const pass_data pass_data_split_crit_edges
=
8817 GIMPLE_PASS
, /* type */
8818 "crited", /* name */
8819 OPTGROUP_NONE
, /* optinfo_flags */
8820 TV_TREE_SPLIT_EDGES
, /* tv_id */
8821 PROP_cfg
, /* properties_required */
8822 PROP_no_crit_edges
, /* properties_provided */
8823 0, /* properties_destroyed */
8824 0, /* todo_flags_start */
8825 0, /* todo_flags_finish */
8828 class pass_split_crit_edges
: public gimple_opt_pass
8831 pass_split_crit_edges (gcc::context
*ctxt
)
8832 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
8835 /* opt_pass methods: */
8836 virtual unsigned int execute (function
*) { return split_critical_edges (); }
8838 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
8839 }; // class pass_split_crit_edges
8844 make_pass_split_crit_edges (gcc::context
*ctxt
)
8846 return new pass_split_crit_edges (ctxt
);
8850 /* Insert COND expression which is GIMPLE_COND after STMT
8851 in basic block BB with appropriate basic block split
8852 and creation of a new conditionally executed basic block.
8853 Update profile so the new bb is visited with probability PROB.
8854 Return created basic block. */
8856 insert_cond_bb (basic_block bb
, gimple
*stmt
, gimple
*cond
,
8857 profile_probability prob
)
8859 edge fall
= split_block (bb
, stmt
);
8860 gimple_stmt_iterator iter
= gsi_last_bb (bb
);
8863 /* Insert cond statement. */
8864 gcc_assert (gimple_code (cond
) == GIMPLE_COND
);
8865 if (gsi_end_p (iter
))
8866 gsi_insert_before (&iter
, cond
, GSI_CONTINUE_LINKING
);
8868 gsi_insert_after (&iter
, cond
, GSI_CONTINUE_LINKING
);
8870 /* Create conditionally executed block. */
8871 new_bb
= create_empty_bb (bb
);
8872 edge e
= make_edge (bb
, new_bb
, EDGE_TRUE_VALUE
);
8873 e
->probability
= prob
;
8874 new_bb
->count
= e
->count ();
8875 make_single_succ_edge (new_bb
, fall
->dest
, EDGE_FALLTHRU
);
8877 /* Fix edge for split bb. */
8878 fall
->flags
= EDGE_FALSE_VALUE
;
8879 fall
->probability
-= e
->probability
;
8881 /* Update dominance info. */
8882 if (dom_info_available_p (CDI_DOMINATORS
))
8884 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, bb
);
8885 set_immediate_dominator (CDI_DOMINATORS
, fall
->dest
, bb
);
8888 /* Update loop info. */
8890 add_bb_to_loop (new_bb
, bb
->loop_father
);
8895 /* Build a ternary operation and gimplify it. Emit code before GSI.
8896 Return the gimple_val holding the result. */
8899 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8900 tree type
, tree a
, tree b
, tree c
)
8903 location_t loc
= gimple_location (gsi_stmt (*gsi
));
8905 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
8908 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8912 /* Build a binary operation and gimplify it. Emit code before GSI.
8913 Return the gimple_val holding the result. */
8916 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8917 tree type
, tree a
, tree b
)
8921 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
8924 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8928 /* Build a unary operation and gimplify it. Emit code before GSI.
8929 Return the gimple_val holding the result. */
8932 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
8937 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
8940 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8946 /* Given a basic block B which ends with a conditional and has
8947 precisely two successors, determine which of the edges is taken if
8948 the conditional is true and which is taken if the conditional is
8949 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8952 extract_true_false_edges_from_block (basic_block b
,
8956 edge e
= EDGE_SUCC (b
, 0);
8958 if (e
->flags
& EDGE_TRUE_VALUE
)
8961 *false_edge
= EDGE_SUCC (b
, 1);
8966 *true_edge
= EDGE_SUCC (b
, 1);
8971 /* From a controlling predicate in the immediate dominator DOM of
8972 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8973 predicate evaluates to true and false and store them to
8974 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8975 they are non-NULL. Returns true if the edges can be determined,
8976 else return false. */
8979 extract_true_false_controlled_edges (basic_block dom
, basic_block phiblock
,
8980 edge
*true_controlled_edge
,
8981 edge
*false_controlled_edge
)
8983 basic_block bb
= phiblock
;
8984 edge true_edge
, false_edge
, tem
;
8985 edge e0
= NULL
, e1
= NULL
;
8987 /* We have to verify that one edge into the PHI node is dominated
8988 by the true edge of the predicate block and the other edge
8989 dominated by the false edge. This ensures that the PHI argument
8990 we are going to take is completely determined by the path we
8991 take from the predicate block.
8992 We can only use BB dominance checks below if the destination of
8993 the true/false edges are dominated by their edge, thus only
8994 have a single predecessor. */
8995 extract_true_false_edges_from_block (dom
, &true_edge
, &false_edge
);
8996 tem
= EDGE_PRED (bb
, 0);
8997 if (tem
== true_edge
8998 || (single_pred_p (true_edge
->dest
)
8999 && (tem
->src
== true_edge
->dest
9000 || dominated_by_p (CDI_DOMINATORS
,
9001 tem
->src
, true_edge
->dest
))))
9003 else if (tem
== false_edge
9004 || (single_pred_p (false_edge
->dest
)
9005 && (tem
->src
== false_edge
->dest
9006 || dominated_by_p (CDI_DOMINATORS
,
9007 tem
->src
, false_edge
->dest
))))
9011 tem
= EDGE_PRED (bb
, 1);
9012 if (tem
== true_edge
9013 || (single_pred_p (true_edge
->dest
)
9014 && (tem
->src
== true_edge
->dest
9015 || dominated_by_p (CDI_DOMINATORS
,
9016 tem
->src
, true_edge
->dest
))))
9018 else if (tem
== false_edge
9019 || (single_pred_p (false_edge
->dest
)
9020 && (tem
->src
== false_edge
->dest
9021 || dominated_by_p (CDI_DOMINATORS
,
9022 tem
->src
, false_edge
->dest
))))
9029 if (true_controlled_edge
)
9030 *true_controlled_edge
= e0
;
9031 if (false_controlled_edge
)
9032 *false_controlled_edge
= e1
;
9037 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9038 range [low, high]. Place associated stmts before *GSI. */
9041 generate_range_test (basic_block bb
, tree index
, tree low
, tree high
,
9042 tree
*lhs
, tree
*rhs
)
9044 tree type
= TREE_TYPE (index
);
9045 tree utype
= unsigned_type_for (type
);
9047 low
= fold_convert (type
, low
);
9048 high
= fold_convert (type
, high
);
9050 tree tmp
= make_ssa_name (type
);
9052 = gimple_build_assign (tmp
, MINUS_EXPR
, index
, low
);
9054 *lhs
= make_ssa_name (utype
);
9055 gassign
*a
= gimple_build_assign (*lhs
, NOP_EXPR
, tmp
);
9057 *rhs
= fold_build2 (MINUS_EXPR
, utype
, high
, low
);
9058 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
9059 gsi_insert_before (&gsi
, sub1
, GSI_SAME_STMT
);
9060 gsi_insert_before (&gsi
, a
, GSI_SAME_STMT
);
9063 /* Emit return warnings. */
9067 const pass_data pass_data_warn_function_return
=
9069 GIMPLE_PASS
, /* type */
9070 "*warn_function_return", /* name */
9071 OPTGROUP_NONE
, /* optinfo_flags */
9072 TV_NONE
, /* tv_id */
9073 PROP_cfg
, /* properties_required */
9074 0, /* properties_provided */
9075 0, /* properties_destroyed */
9076 0, /* todo_flags_start */
9077 0, /* todo_flags_finish */
9080 class pass_warn_function_return
: public gimple_opt_pass
9083 pass_warn_function_return (gcc::context
*ctxt
)
9084 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
9087 /* opt_pass methods: */
9088 virtual unsigned int execute (function
*);
9090 }; // class pass_warn_function_return
9093 pass_warn_function_return::execute (function
*fun
)
9095 source_location location
;
9100 if (!targetm
.warn_func_return (fun
->decl
))
9103 /* If we have a path to EXIT, then we do return. */
9104 if (TREE_THIS_VOLATILE (fun
->decl
)
9105 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0)
9107 location
= UNKNOWN_LOCATION
;
9108 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
);
9109 (e
= ei_safe_edge (ei
)); )
9111 last
= last_stmt (e
->src
);
9112 if ((gimple_code (last
) == GIMPLE_RETURN
9113 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
9114 && location
== UNKNOWN_LOCATION
9115 && ((location
= LOCATION_LOCUS (gimple_location (last
)))
9116 != UNKNOWN_LOCATION
)
9119 /* When optimizing, replace return stmts in noreturn functions
9120 with __builtin_unreachable () call. */
9121 if (optimize
&& gimple_code (last
) == GIMPLE_RETURN
)
9123 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
9124 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
9125 gimple_set_location (new_stmt
, gimple_location (last
));
9126 gimple_stmt_iterator gsi
= gsi_for_stmt (last
);
9127 gsi_replace (&gsi
, new_stmt
, true);
9133 if (location
== UNKNOWN_LOCATION
)
9134 location
= cfun
->function_end_locus
;
9135 warning_at (location
, 0, "%<noreturn%> function does return");
9138 /* If we see "return;" in some basic block, then we do reach the end
9139 without returning a value. */
9140 else if (warn_return_type
> 0
9141 && !TREE_NO_WARNING (fun
->decl
)
9142 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun
->decl
))))
9144 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
9146 gimple
*last
= last_stmt (e
->src
);
9147 greturn
*return_stmt
= dyn_cast
<greturn
*> (last
);
9149 && gimple_return_retval (return_stmt
) == NULL
9150 && !gimple_no_warning_p (last
))
9152 location
= gimple_location (last
);
9153 if (LOCATION_LOCUS (location
) == UNKNOWN_LOCATION
)
9154 location
= fun
->function_end_locus
;
9155 warning_at (location
, OPT_Wreturn_type
,
9156 "control reaches end of non-void function");
9157 TREE_NO_WARNING (fun
->decl
) = 1;
9161 /* The C++ FE turns fallthrough from the end of non-void function
9162 into __builtin_unreachable () call with BUILTINS_LOCATION.
9163 Recognize those too. */
9165 if (!TREE_NO_WARNING (fun
->decl
))
9166 FOR_EACH_BB_FN (bb
, fun
)
9167 if (EDGE_COUNT (bb
->succs
) == 0)
9169 gimple
*last
= last_stmt (bb
);
9170 const enum built_in_function ubsan_missing_ret
9171 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN
;
9173 && ((LOCATION_LOCUS (gimple_location (last
))
9174 == BUILTINS_LOCATION
9175 && gimple_call_builtin_p (last
, BUILT_IN_UNREACHABLE
))
9176 || gimple_call_builtin_p (last
, ubsan_missing_ret
)))
9178 gimple_stmt_iterator gsi
= gsi_for_stmt (last
);
9179 gsi_prev_nondebug (&gsi
);
9180 gimple
*prev
= gsi_stmt (gsi
);
9182 location
= UNKNOWN_LOCATION
;
9184 location
= gimple_location (prev
);
9185 if (LOCATION_LOCUS (location
) == UNKNOWN_LOCATION
)
9186 location
= fun
->function_end_locus
;
9187 warning_at (location
, OPT_Wreturn_type
,
9188 "control reaches end of non-void function");
9189 TREE_NO_WARNING (fun
->decl
) = 1;
9200 make_pass_warn_function_return (gcc::context
*ctxt
)
9202 return new pass_warn_function_return (ctxt
);
9205 /* Walk a gimplified function and warn for functions whose return value is
9206 ignored and attribute((warn_unused_result)) is set. This is done before
9207 inlining, so we don't have to worry about that. */
9210 do_warn_unused_result (gimple_seq seq
)
9213 gimple_stmt_iterator i
;
9215 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
9217 gimple
*g
= gsi_stmt (i
);
9219 switch (gimple_code (g
))
9222 do_warn_unused_result (gimple_bind_body (as_a
<gbind
*>(g
)));
9225 do_warn_unused_result (gimple_try_eval (g
));
9226 do_warn_unused_result (gimple_try_cleanup (g
));
9229 do_warn_unused_result (gimple_catch_handler (
9230 as_a
<gcatch
*> (g
)));
9232 case GIMPLE_EH_FILTER
:
9233 do_warn_unused_result (gimple_eh_filter_failure (g
));
9237 if (gimple_call_lhs (g
))
9239 if (gimple_call_internal_p (g
))
9242 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9243 LHS. All calls whose value is ignored should be
9244 represented like this. Look for the attribute. */
9245 fdecl
= gimple_call_fndecl (g
);
9246 ftype
= gimple_call_fntype (g
);
9248 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
9250 location_t loc
= gimple_location (g
);
9253 warning_at (loc
, OPT_Wunused_result
,
9254 "ignoring return value of %qD, "
9255 "declared with attribute warn_unused_result",
9258 warning_at (loc
, OPT_Wunused_result
,
9259 "ignoring return value of function "
9260 "declared with attribute warn_unused_result");
9265 /* Not a container, not a call, or a call whose value is used. */
9273 const pass_data pass_data_warn_unused_result
=
9275 GIMPLE_PASS
, /* type */
9276 "*warn_unused_result", /* name */
9277 OPTGROUP_NONE
, /* optinfo_flags */
9278 TV_NONE
, /* tv_id */
9279 PROP_gimple_any
, /* properties_required */
9280 0, /* properties_provided */
9281 0, /* properties_destroyed */
9282 0, /* todo_flags_start */
9283 0, /* todo_flags_finish */
9286 class pass_warn_unused_result
: public gimple_opt_pass
9289 pass_warn_unused_result (gcc::context
*ctxt
)
9290 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
9293 /* opt_pass methods: */
9294 virtual bool gate (function
*) { return flag_warn_unused_result
; }
9295 virtual unsigned int execute (function
*)
9297 do_warn_unused_result (gimple_body (current_function_decl
));
9301 }; // class pass_warn_unused_result
9306 make_pass_warn_unused_result (gcc::context
*ctxt
)
9308 return new pass_warn_unused_result (ctxt
);
9311 /* IPA passes, compilation of earlier functions or inlining
9312 might have changed some properties, such as marked functions nothrow,
9313 pure, const or noreturn.
9314 Remove redundant edges and basic blocks, and create new ones if necessary.
9316 This pass can't be executed as stand alone pass from pass manager, because
9317 in between inlining and this fixup the verify_flow_info would fail. */
9320 execute_fixup_cfg (void)
9323 gimple_stmt_iterator gsi
;
9325 cgraph_node
*node
= cgraph_node::get (current_function_decl
);
9326 profile_count num
= node
->count
;
9327 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
9328 bool scale
= num
.initialized_p () && !(num
== den
);
9332 profile_count::adjust_for_ipa_scaling (&num
, &den
);
9333 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= node
->count
;
9334 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
9335 = EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
.apply_scale (num
, den
);
9338 FOR_EACH_BB_FN (bb
, cfun
)
9341 bb
->count
= bb
->count
.apply_scale (num
, den
);
9342 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
9344 gimple
*stmt
= gsi_stmt (gsi
);
9345 tree decl
= is_gimple_call (stmt
)
9346 ? gimple_call_fndecl (stmt
)
9350 int flags
= gimple_call_flags (stmt
);
9351 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
9353 if (gimple_purge_dead_abnormal_call_edges (bb
))
9354 todo
|= TODO_cleanup_cfg
;
9356 if (gimple_in_ssa_p (cfun
))
9358 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9363 if (flags
& ECF_NORETURN
9364 && fixup_noreturn_call (stmt
))
9365 todo
|= TODO_cleanup_cfg
;
9368 /* Remove stores to variables we marked write-only.
9369 Keep access when store has side effect, i.e. in case when source
9371 if (gimple_store_p (stmt
)
9372 && !gimple_has_side_effects (stmt
))
9374 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
9377 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
9378 && varpool_node::get (lhs
)->writeonly
)
9380 unlink_stmt_vdef (stmt
);
9381 gsi_remove (&gsi
, true);
9382 release_defs (stmt
);
9383 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9387 /* For calls we can simply remove LHS when it is known
9388 to be write-only. */
9389 if (is_gimple_call (stmt
)
9390 && gimple_get_lhs (stmt
))
9392 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
9395 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
9396 && varpool_node::get (lhs
)->writeonly
)
9398 gimple_call_set_lhs (stmt
, NULL
);
9400 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9404 if (maybe_clean_eh_stmt (stmt
)
9405 && gimple_purge_dead_eh_edges (bb
))
9406 todo
|= TODO_cleanup_cfg
;
9410 /* If we have a basic block with no successors that does not
9411 end with a control statement or a noreturn call end it with
9412 a call to __builtin_unreachable. This situation can occur
9413 when inlining a noreturn call that does in fact return. */
9414 if (EDGE_COUNT (bb
->succs
) == 0)
9416 gimple
*stmt
= last_stmt (bb
);
9418 || (!is_ctrl_stmt (stmt
)
9419 && (!is_gimple_call (stmt
)
9420 || !gimple_call_noreturn_p (stmt
))))
9422 if (stmt
&& is_gimple_call (stmt
))
9423 gimple_call_set_ctrl_altering (stmt
, false);
9424 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
9425 stmt
= gimple_build_call (fndecl
, 0);
9426 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
9427 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
9428 if (!cfun
->after_inlining
)
9430 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
9431 node
->create_edge (cgraph_node::get_create (fndecl
),
9432 call_stmt
, bb
->count
);
9438 compute_function_frequency ();
9441 && (todo
& TODO_cleanup_cfg
))
9442 loops_state_set (LOOPS_NEED_FIXUP
);
9449 const pass_data pass_data_fixup_cfg
=
9451 GIMPLE_PASS
, /* type */
9452 "fixup_cfg", /* name */
9453 OPTGROUP_NONE
, /* optinfo_flags */
9454 TV_NONE
, /* tv_id */
9455 PROP_cfg
, /* properties_required */
9456 0, /* properties_provided */
9457 0, /* properties_destroyed */
9458 0, /* todo_flags_start */
9459 0, /* todo_flags_finish */
9462 class pass_fixup_cfg
: public gimple_opt_pass
9465 pass_fixup_cfg (gcc::context
*ctxt
)
9466 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
9469 /* opt_pass methods: */
9470 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
9471 virtual unsigned int execute (function
*) { return execute_fixup_cfg (); }
9473 }; // class pass_fixup_cfg
9478 make_pass_fixup_cfg (gcc::context
*ctxt
)
9480 return new pass_fixup_cfg (ctxt
);
9483 /* Garbage collection support for edge_def. */
9485 extern void gt_ggc_mx (tree
&);
9486 extern void gt_ggc_mx (gimple
*&);
9487 extern void gt_ggc_mx (rtx
&);
9488 extern void gt_ggc_mx (basic_block
&);
9491 gt_ggc_mx (rtx_insn
*& x
)
9494 gt_ggc_mx_rtx_def ((void *) x
);
9498 gt_ggc_mx (edge_def
*e
)
9500 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9502 gt_ggc_mx (e
->dest
);
9503 if (current_ir_type () == IR_GIMPLE
)
9504 gt_ggc_mx (e
->insns
.g
);
9506 gt_ggc_mx (e
->insns
.r
);
9510 /* PCH support for edge_def. */
9512 extern void gt_pch_nx (tree
&);
9513 extern void gt_pch_nx (gimple
*&);
9514 extern void gt_pch_nx (rtx
&);
9515 extern void gt_pch_nx (basic_block
&);
9518 gt_pch_nx (rtx_insn
*& x
)
9521 gt_pch_nx_rtx_def ((void *) x
);
9525 gt_pch_nx (edge_def
*e
)
9527 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9529 gt_pch_nx (e
->dest
);
9530 if (current_ir_type () == IR_GIMPLE
)
9531 gt_pch_nx (e
->insns
.g
);
9533 gt_pch_nx (e
->insns
.r
);
9538 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
9540 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9541 op (&(e
->src
), cookie
);
9542 op (&(e
->dest
), cookie
);
9543 if (current_ir_type () == IR_GIMPLE
)
9544 op (&(e
->insns
.g
), cookie
);
9546 op (&(e
->insns
.r
), cookie
);
9547 op (&(block
), cookie
);
9552 namespace selftest
{
9554 /* Helper function for CFG selftests: create a dummy function decl
9555 and push it as cfun. */
9558 push_fndecl (const char *name
)
9560 tree fn_type
= build_function_type_array (integer_type_node
, 0, NULL
);
9561 /* FIXME: this uses input_location: */
9562 tree fndecl
= build_fn_decl (name
, fn_type
);
9563 tree retval
= build_decl (UNKNOWN_LOCATION
, RESULT_DECL
,
9564 NULL_TREE
, integer_type_node
);
9565 DECL_RESULT (fndecl
) = retval
;
9566 push_struct_function (fndecl
);
9567 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9568 ASSERT_TRUE (fun
!= NULL
);
9569 init_empty_tree_cfg_for_function (fun
);
9570 ASSERT_EQ (2, n_basic_blocks_for_fn (fun
));
9571 ASSERT_EQ (0, n_edges_for_fn (fun
));
9575 /* These tests directly create CFGs.
9576 Compare with the static fns within tree-cfg.c:
9578 - make_blocks: calls create_basic_block (seq, bb);
9581 /* Verify a simple cfg of the form:
9582 ENTRY -> A -> B -> C -> EXIT. */
9585 test_linear_chain ()
9587 gimple_register_cfg_hooks ();
9589 tree fndecl
= push_fndecl ("cfg_test_linear_chain");
9590 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9592 /* Create some empty blocks. */
9593 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
9594 basic_block bb_b
= create_empty_bb (bb_a
);
9595 basic_block bb_c
= create_empty_bb (bb_b
);
9597 ASSERT_EQ (5, n_basic_blocks_for_fn (fun
));
9598 ASSERT_EQ (0, n_edges_for_fn (fun
));
9600 /* Create some edges: a simple linear chain of BBs. */
9601 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
9602 make_edge (bb_a
, bb_b
, 0);
9603 make_edge (bb_b
, bb_c
, 0);
9604 make_edge (bb_c
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9606 /* Verify the edges. */
9607 ASSERT_EQ (4, n_edges_for_fn (fun
));
9608 ASSERT_EQ (NULL
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->preds
);
9609 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
->length ());
9610 ASSERT_EQ (1, bb_a
->preds
->length ());
9611 ASSERT_EQ (1, bb_a
->succs
->length ());
9612 ASSERT_EQ (1, bb_b
->preds
->length ());
9613 ASSERT_EQ (1, bb_b
->succs
->length ());
9614 ASSERT_EQ (1, bb_c
->preds
->length ());
9615 ASSERT_EQ (1, bb_c
->succs
->length ());
9616 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
->length ());
9617 ASSERT_EQ (NULL
, EXIT_BLOCK_PTR_FOR_FN (fun
)->succs
);
9619 /* Verify the dominance information
9620 Each BB in our simple chain should be dominated by the one before
9622 calculate_dominance_info (CDI_DOMINATORS
);
9623 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
9624 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
9625 vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
9626 ASSERT_EQ (1, dom_by_b
.length ());
9627 ASSERT_EQ (bb_c
, dom_by_b
[0]);
9628 free_dominance_info (CDI_DOMINATORS
);
9629 dom_by_b
.release ();
9631 /* Similarly for post-dominance: each BB in our chain is post-dominated
9632 by the one after it. */
9633 calculate_dominance_info (CDI_POST_DOMINATORS
);
9634 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
9635 ASSERT_EQ (bb_c
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
9636 vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
9637 ASSERT_EQ (1, postdom_by_b
.length ());
9638 ASSERT_EQ (bb_a
, postdom_by_b
[0]);
9639 free_dominance_info (CDI_POST_DOMINATORS
);
9640 postdom_by_b
.release ();
9645 /* Verify a simple CFG of the form:
9661 gimple_register_cfg_hooks ();
9663 tree fndecl
= push_fndecl ("cfg_test_diamond");
9664 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9666 /* Create some empty blocks. */
9667 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
9668 basic_block bb_b
= create_empty_bb (bb_a
);
9669 basic_block bb_c
= create_empty_bb (bb_a
);
9670 basic_block bb_d
= create_empty_bb (bb_b
);
9672 ASSERT_EQ (6, n_basic_blocks_for_fn (fun
));
9673 ASSERT_EQ (0, n_edges_for_fn (fun
));
9675 /* Create the edges. */
9676 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
9677 make_edge (bb_a
, bb_b
, EDGE_TRUE_VALUE
);
9678 make_edge (bb_a
, bb_c
, EDGE_FALSE_VALUE
);
9679 make_edge (bb_b
, bb_d
, 0);
9680 make_edge (bb_c
, bb_d
, 0);
9681 make_edge (bb_d
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9683 /* Verify the edges. */
9684 ASSERT_EQ (6, n_edges_for_fn (fun
));
9685 ASSERT_EQ (1, bb_a
->preds
->length ());
9686 ASSERT_EQ (2, bb_a
->succs
->length ());
9687 ASSERT_EQ (1, bb_b
->preds
->length ());
9688 ASSERT_EQ (1, bb_b
->succs
->length ());
9689 ASSERT_EQ (1, bb_c
->preds
->length ());
9690 ASSERT_EQ (1, bb_c
->succs
->length ());
9691 ASSERT_EQ (2, bb_d
->preds
->length ());
9692 ASSERT_EQ (1, bb_d
->succs
->length ());
9694 /* Verify the dominance information. */
9695 calculate_dominance_info (CDI_DOMINATORS
);
9696 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
9697 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
9698 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_d
));
9699 vec
<basic_block
> dom_by_a
= get_dominated_by (CDI_DOMINATORS
, bb_a
);
9700 ASSERT_EQ (3, dom_by_a
.length ()); /* B, C, D, in some order. */
9701 dom_by_a
.release ();
9702 vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
9703 ASSERT_EQ (0, dom_by_b
.length ());
9704 dom_by_b
.release ();
9705 free_dominance_info (CDI_DOMINATORS
);
9707 /* Similarly for post-dominance. */
9708 calculate_dominance_info (CDI_POST_DOMINATORS
);
9709 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
9710 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
9711 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_c
));
9712 vec
<basic_block
> postdom_by_d
= get_dominated_by (CDI_POST_DOMINATORS
, bb_d
);
9713 ASSERT_EQ (3, postdom_by_d
.length ()); /* A, B, C in some order. */
9714 postdom_by_d
.release ();
9715 vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
9716 ASSERT_EQ (0, postdom_by_b
.length ());
9717 postdom_by_b
.release ();
9718 free_dominance_info (CDI_POST_DOMINATORS
);
9723 /* Verify that we can handle a CFG containing a "complete" aka
9724 fully-connected subgraph (where A B C D below all have edges
9725 pointing to each other node, also to themselves).
9743 test_fully_connected ()
9745 gimple_register_cfg_hooks ();
9747 tree fndecl
= push_fndecl ("cfg_fully_connected");
9748 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9752 /* Create some empty blocks. */
9753 auto_vec
<basic_block
> subgraph_nodes
;
9754 for (int i
= 0; i
< n
; i
++)
9755 subgraph_nodes
.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
)));
9757 ASSERT_EQ (n
+ 2, n_basic_blocks_for_fn (fun
));
9758 ASSERT_EQ (0, n_edges_for_fn (fun
));
9760 /* Create the edges. */
9761 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), subgraph_nodes
[0], EDGE_FALLTHRU
);
9762 make_edge (subgraph_nodes
[0], EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9763 for (int i
= 0; i
< n
; i
++)
9764 for (int j
= 0; j
< n
; j
++)
9765 make_edge (subgraph_nodes
[i
], subgraph_nodes
[j
], 0);
9767 /* Verify the edges. */
9768 ASSERT_EQ (2 + (n
* n
), n_edges_for_fn (fun
));
9769 /* The first one is linked to ENTRY/EXIT as well as itself and
9771 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->preds
->length ());
9772 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->succs
->length ());
9773 /* The other ones in the subgraph are linked to everything in
9774 the subgraph (including themselves). */
9775 for (int i
= 1; i
< n
; i
++)
9777 ASSERT_EQ (n
, subgraph_nodes
[i
]->preds
->length ());
9778 ASSERT_EQ (n
, subgraph_nodes
[i
]->succs
->length ());
9781 /* Verify the dominance information. */
9782 calculate_dominance_info (CDI_DOMINATORS
);
9783 /* The initial block in the subgraph should be dominated by ENTRY. */
9784 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun
),
9785 get_immediate_dominator (CDI_DOMINATORS
,
9786 subgraph_nodes
[0]));
9787 /* Every other block in the subgraph should be dominated by the
9789 for (int i
= 1; i
< n
; i
++)
9790 ASSERT_EQ (subgraph_nodes
[0],
9791 get_immediate_dominator (CDI_DOMINATORS
,
9792 subgraph_nodes
[i
]));
9793 free_dominance_info (CDI_DOMINATORS
);
9795 /* Similarly for post-dominance. */
9796 calculate_dominance_info (CDI_POST_DOMINATORS
);
9797 /* The initial block in the subgraph should be postdominated by EXIT. */
9798 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun
),
9799 get_immediate_dominator (CDI_POST_DOMINATORS
,
9800 subgraph_nodes
[0]));
9801 /* Every other block in the subgraph should be postdominated by the
9802 initial block, since that leads to EXIT. */
9803 for (int i
= 1; i
< n
; i
++)
9804 ASSERT_EQ (subgraph_nodes
[0],
9805 get_immediate_dominator (CDI_POST_DOMINATORS
,
9806 subgraph_nodes
[i
]));
9807 free_dominance_info (CDI_POST_DOMINATORS
);
9812 /* Run all of the selftests within this file. */
9817 test_linear_chain ();
9819 test_fully_connected ();
9822 } // namespace selftest
9824 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9827 - switch statement (a block with many out-edges)
9828 - something that jumps to itself
9831 #endif /* CHECKING_P */