1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
66 /* This file contains functions for building the Control Flow Graph (CFG)
67 for a function tree. */
69 /* Local declarations. */
71 /* Initial capacity for the basic block array. */
72 static const int initial_cfg_capacity
= 20;
74 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
75 which use a particular edge. The CASE_LABEL_EXPRs are chained together
76 via their CASE_CHAIN field, which we clear after we're done with the
77 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
80 update the case vector in response to edge redirections.
82 Right now this table is set up and torn down at key points in the
83 compilation process. It would be nice if we could make the table
84 more persistent. The key is getting notification of changes to
85 the CFG (particularly edge removal, creation and redirection). */
87 static hash_map
<edge
, tree
> *edge_to_cases
;
89 /* If we record edge_to_cases, this bitmap will hold indexes
90 of basic blocks that end in a GIMPLE_SWITCH which we touched
91 due to edge manipulations. */
93 static bitmap touched_switch_bbs
;
98 long num_merged_labels
;
101 static struct cfg_stats_d cfg_stats
;
103 /* Data to pass to replace_block_vars_by_duplicates_1. */
104 struct replace_decls_d
106 hash_map
<tree
, tree
> *vars_map
;
110 /* Hash table to store last discriminator assigned for each locus. */
111 struct locus_discrim_map
117 /* Hashtable helpers. */
119 struct locus_discrim_hasher
: free_ptr_hash
<locus_discrim_map
>
121 static inline hashval_t
hash (const locus_discrim_map
*);
122 static inline bool equal (const locus_discrim_map
*,
123 const locus_discrim_map
*);
126 /* Trivial hash function for a location_t. ITEM is a pointer to
127 a hash table entry that maps a location_t to a discriminator. */
130 locus_discrim_hasher::hash (const locus_discrim_map
*item
)
132 return LOCATION_LINE (item
->locus
);
135 /* Equality function for the locus-to-discriminator map. A and B
136 point to the two hash table entries to compare. */
139 locus_discrim_hasher::equal (const locus_discrim_map
*a
,
140 const locus_discrim_map
*b
)
142 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
145 static hash_table
<locus_discrim_hasher
> *discriminator_per_locus
;
147 /* Basic blocks and flowgraphs. */
148 static void make_blocks (gimple_seq
);
151 static void make_edges (void);
152 static void assign_discriminators (void);
153 static void make_cond_expr_edges (basic_block
);
154 static void make_gimple_switch_edges (gswitch
*, basic_block
);
155 static bool make_goto_expr_edges (basic_block
);
156 static void make_gimple_asm_edges (basic_block
);
157 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
158 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
160 /* Various helpers. */
161 static inline bool stmt_starts_bb_p (gimple
*, gimple
*);
162 static int gimple_verify_flow_info (void);
163 static void gimple_make_forwarder_block (edge
);
164 static gimple
*first_non_label_stmt (basic_block
);
165 static bool verify_gimple_transaction (gtransaction
*);
166 static bool call_can_make_abnormal_goto (gimple
*);
168 /* Flowgraph optimization and cleanup. */
169 static void gimple_merge_blocks (basic_block
, basic_block
);
170 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
171 static void remove_bb (basic_block
);
172 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
173 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
174 static edge
find_taken_edge_switch_expr (gswitch
*, basic_block
, tree
);
175 static tree
find_case_label_for_value (gswitch
*, tree
);
176 static void lower_phi_internal_fn ();
179 init_empty_tree_cfg_for_function (struct function
*fn
)
181 /* Initialize the basic block array. */
183 profile_status_for_fn (fn
) = PROFILE_ABSENT
;
184 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
185 last_basic_block_for_fn (fn
) = NUM_FIXED_BLOCKS
;
186 vec_alloc (basic_block_info_for_fn (fn
), initial_cfg_capacity
);
187 vec_safe_grow_cleared (basic_block_info_for_fn (fn
),
188 initial_cfg_capacity
);
190 /* Build a mapping of labels to their associated blocks. */
191 vec_alloc (label_to_block_map_for_fn (fn
), initial_cfg_capacity
);
192 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
),
193 initial_cfg_capacity
);
195 SET_BASIC_BLOCK_FOR_FN (fn
, ENTRY_BLOCK
, ENTRY_BLOCK_PTR_FOR_FN (fn
));
196 SET_BASIC_BLOCK_FOR_FN (fn
, EXIT_BLOCK
, EXIT_BLOCK_PTR_FOR_FN (fn
));
198 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
199 = EXIT_BLOCK_PTR_FOR_FN (fn
);
200 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
201 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
205 init_empty_tree_cfg (void)
207 init_empty_tree_cfg_for_function (cfun
);
210 /*---------------------------------------------------------------------------
212 ---------------------------------------------------------------------------*/
214 /* Entry point to the CFG builder for trees. SEQ is the sequence of
215 statements to be added to the flowgraph. */
218 build_gimple_cfg (gimple_seq seq
)
220 /* Register specific gimple functions. */
221 gimple_register_cfg_hooks ();
223 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
225 init_empty_tree_cfg ();
229 /* Make sure there is always at least one block, even if it's empty. */
230 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
231 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
233 /* Adjust the size of the array. */
234 if (basic_block_info_for_fn (cfun
)->length ()
235 < (size_t) n_basic_blocks_for_fn (cfun
))
236 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
237 n_basic_blocks_for_fn (cfun
));
239 /* To speed up statement iterator walks, we first purge dead labels. */
240 cleanup_dead_labels ();
242 /* Group case nodes to reduce the number of edges.
243 We do this after cleaning up dead labels because otherwise we miss
244 a lot of obvious case merging opportunities. */
245 group_case_labels ();
247 /* Create the edges of the flowgraph. */
248 discriminator_per_locus
= new hash_table
<locus_discrim_hasher
> (13);
250 assign_discriminators ();
251 lower_phi_internal_fn ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus
;
254 discriminator_per_locus
= NULL
;
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
262 replace_loop_annotate_in_block (basic_block bb
, struct loop
*loop
)
264 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
265 gimple
*stmt
= gsi_stmt (gsi
);
267 if (!(stmt
&& gimple_code (stmt
) == GIMPLE_COND
))
270 for (gsi_prev_nondebug (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
272 stmt
= gsi_stmt (gsi
);
273 if (gimple_code (stmt
) != GIMPLE_CALL
)
275 if (!gimple_call_internal_p (stmt
)
276 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
279 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
281 case annot_expr_ivdep_kind
:
282 loop
->safelen
= INT_MAX
;
284 case annot_expr_unroll_kind
:
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt
, 2));
287 cfun
->has_unroll
= true;
289 case annot_expr_no_vector_kind
:
290 loop
->dont_vectorize
= true;
292 case annot_expr_vector_kind
:
293 loop
->force_vectorize
= true;
294 cfun
->has_force_vectorize_loops
= true;
296 case annot_expr_parallel_kind
:
297 loop
->can_be_parallel
= true;
298 loop
->safelen
= INT_MAX
;
304 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
305 gimple_call_arg (stmt
, 0));
306 gsi_replace (&gsi
, stmt
, true);
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
315 replace_loop_annotate (void)
319 gimple_stmt_iterator gsi
;
322 FOR_EACH_LOOP (loop
, 0)
324 /* First look into the header. */
325 replace_loop_annotate_in_block (loop
->header
, loop
);
327 /* Then look into the latch, if any. */
329 replace_loop_annotate_in_block (loop
->latch
, loop
);
332 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
333 FOR_EACH_BB_FN (bb
, cfun
)
335 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
337 stmt
= gsi_stmt (gsi
);
338 if (gimple_code (stmt
) != GIMPLE_CALL
)
340 if (!gimple_call_internal_p (stmt
)
341 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
344 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
346 case annot_expr_ivdep_kind
:
347 case annot_expr_unroll_kind
:
348 case annot_expr_no_vector_kind
:
349 case annot_expr_vector_kind
:
355 warning_at (gimple_location (stmt
), 0, "ignoring loop annotation");
356 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
357 gimple_call_arg (stmt
, 0));
358 gsi_replace (&gsi
, stmt
, true);
363 /* Lower internal PHI function from GIMPLE FE. */
366 lower_phi_internal_fn ()
368 basic_block bb
, pred
= NULL
;
369 gimple_stmt_iterator gsi
;
374 /* After edge creation, handle __PHI function from GIMPLE FE. */
375 FOR_EACH_BB_FN (bb
, cfun
)
377 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
);)
379 stmt
= gsi_stmt (gsi
);
380 if (! gimple_call_internal_p (stmt
, IFN_PHI
))
383 lhs
= gimple_call_lhs (stmt
);
384 phi_node
= create_phi_node (lhs
, bb
);
386 /* Add arguments to the PHI node. */
387 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
389 tree arg
= gimple_call_arg (stmt
, i
);
390 if (TREE_CODE (arg
) == LABEL_DECL
)
391 pred
= label_to_block (arg
);
394 edge e
= find_edge (pred
, bb
);
395 add_phi_arg (phi_node
, arg
, e
, UNKNOWN_LOCATION
);
399 gsi_remove (&gsi
, true);
405 execute_build_cfg (void)
407 gimple_seq body
= gimple_body (current_function_decl
);
409 build_gimple_cfg (body
);
410 gimple_set_body (current_function_decl
, NULL
);
411 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
413 fprintf (dump_file
, "Scope blocks:\n");
414 dump_scope_blocks (dump_file
, dump_flags
);
417 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
418 replace_loop_annotate ();
424 const pass_data pass_data_build_cfg
=
426 GIMPLE_PASS
, /* type */
428 OPTGROUP_NONE
, /* optinfo_flags */
429 TV_TREE_CFG
, /* tv_id */
430 PROP_gimple_leh
, /* properties_required */
431 ( PROP_cfg
| PROP_loops
), /* properties_provided */
432 0, /* properties_destroyed */
433 0, /* todo_flags_start */
434 0, /* todo_flags_finish */
437 class pass_build_cfg
: public gimple_opt_pass
440 pass_build_cfg (gcc::context
*ctxt
)
441 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
444 /* opt_pass methods: */
445 virtual unsigned int execute (function
*) { return execute_build_cfg (); }
447 }; // class pass_build_cfg
452 make_pass_build_cfg (gcc::context
*ctxt
)
454 return new pass_build_cfg (ctxt
);
458 /* Return true if T is a computed goto. */
461 computed_goto_p (gimple
*t
)
463 return (gimple_code (t
) == GIMPLE_GOTO
464 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
467 /* Returns true if the sequence of statements STMTS only contains
468 a call to __builtin_unreachable (). */
471 gimple_seq_unreachable_p (gimple_seq stmts
)
474 /* Return false if -fsanitize=unreachable, we don't want to
475 optimize away those calls, but rather turn them into
476 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
478 || sanitize_flags_p (SANITIZE_UNREACHABLE
))
481 gimple_stmt_iterator gsi
= gsi_last (stmts
);
483 if (!gimple_call_builtin_p (gsi_stmt (gsi
), BUILT_IN_UNREACHABLE
))
486 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
488 gimple
*stmt
= gsi_stmt (gsi
);
489 if (gimple_code (stmt
) != GIMPLE_LABEL
490 && !is_gimple_debug (stmt
)
491 && !gimple_clobber_p (stmt
))
497 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
498 the other edge points to a bb with just __builtin_unreachable ().
499 I.e. return true for C->M edge in:
507 __builtin_unreachable ();
511 assert_unreachable_fallthru_edge_p (edge e
)
513 basic_block pred_bb
= e
->src
;
514 gimple
*last
= last_stmt (pred_bb
);
515 if (last
&& gimple_code (last
) == GIMPLE_COND
)
517 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
518 if (other_bb
== e
->dest
)
519 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
520 if (EDGE_COUNT (other_bb
->succs
) == 0)
521 return gimple_seq_unreachable_p (bb_seq (other_bb
));
527 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
528 could alter control flow except via eh. We initialize the flag at
529 CFG build time and only ever clear it later. */
532 gimple_call_initialize_ctrl_altering (gimple
*stmt
)
534 int flags
= gimple_call_flags (stmt
);
536 /* A call alters control flow if it can make an abnormal goto. */
537 if (call_can_make_abnormal_goto (stmt
)
538 /* A call also alters control flow if it does not return. */
539 || flags
& ECF_NORETURN
540 /* TM ending statements have backedges out of the transaction.
541 Return true so we split the basic block containing them.
542 Note that the TM_BUILTIN test is merely an optimization. */
543 || ((flags
& ECF_TM_BUILTIN
)
544 && is_tm_ending_fndecl (gimple_call_fndecl (stmt
)))
545 /* BUILT_IN_RETURN call is same as return statement. */
546 || gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
)
547 /* IFN_UNIQUE should be the last insn, to make checking for it
548 as cheap as possible. */
549 || (gimple_call_internal_p (stmt
)
550 && gimple_call_internal_unique_p (stmt
)))
551 gimple_call_set_ctrl_altering (stmt
, true);
553 gimple_call_set_ctrl_altering (stmt
, false);
557 /* Insert SEQ after BB and build a flowgraph. */
560 make_blocks_1 (gimple_seq seq
, basic_block bb
)
562 gimple_stmt_iterator i
= gsi_start (seq
);
564 bool start_new_block
= true;
565 bool first_stmt_of_seq
= true;
567 while (!gsi_end_p (i
))
574 if (stmt
&& is_gimple_call (stmt
))
575 gimple_call_initialize_ctrl_altering (stmt
);
577 /* If the statement starts a new basic block or if we have determined
578 in a previous pass that we need to create a new block for STMT, do
580 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
582 if (!first_stmt_of_seq
)
583 gsi_split_seq_before (&i
, &seq
);
584 bb
= create_basic_block (seq
, bb
);
585 start_new_block
= false;
588 /* Now add STMT to BB and create the subgraphs for special statement
590 gimple_set_bb (stmt
, bb
);
592 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
594 if (stmt_ends_bb_p (stmt
))
596 /* If the stmt can make abnormal goto use a new temporary
597 for the assignment to the LHS. This makes sure the old value
598 of the LHS is available on the abnormal edge. Otherwise
599 we will end up with overlapping life-ranges for abnormal
601 if (gimple_has_lhs (stmt
)
602 && stmt_can_make_abnormal_goto (stmt
)
603 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
605 tree lhs
= gimple_get_lhs (stmt
);
606 tree tmp
= create_tmp_var (TREE_TYPE (lhs
));
607 gimple
*s
= gimple_build_assign (lhs
, tmp
);
608 gimple_set_location (s
, gimple_location (stmt
));
609 gimple_set_block (s
, gimple_block (stmt
));
610 gimple_set_lhs (stmt
, tmp
);
611 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
612 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
613 DECL_GIMPLE_REG_P (tmp
) = 1;
614 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
616 start_new_block
= true;
620 first_stmt_of_seq
= false;
625 /* Build a flowgraph for the sequence of stmts SEQ. */
628 make_blocks (gimple_seq seq
)
630 make_blocks_1 (seq
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
633 /* Create and return a new empty basic block after bb AFTER. */
636 create_bb (void *h
, void *e
, basic_block after
)
642 /* Create and initialize a new basic block. Since alloc_block uses
643 GC allocation that clears memory to allocate a basic block, we do
644 not have to clear the newly allocated basic block here. */
647 bb
->index
= last_basic_block_for_fn (cfun
);
649 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
651 /* Add the new block to the linked list of blocks. */
652 link_block (bb
, after
);
654 /* Grow the basic block array if needed. */
655 if ((size_t) last_basic_block_for_fn (cfun
)
656 == basic_block_info_for_fn (cfun
)->length ())
659 (last_basic_block_for_fn (cfun
)
660 + (last_basic_block_for_fn (cfun
) + 3) / 4);
661 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
), new_size
);
664 /* Add the newly created block to the array. */
665 SET_BASIC_BLOCK_FOR_FN (cfun
, last_basic_block_for_fn (cfun
), bb
);
667 n_basic_blocks_for_fn (cfun
)++;
668 last_basic_block_for_fn (cfun
)++;
674 /*---------------------------------------------------------------------------
676 ---------------------------------------------------------------------------*/
678 /* If basic block BB has an abnormal edge to a basic block
679 containing IFN_ABNORMAL_DISPATCHER internal call, return
680 that the dispatcher's basic block, otherwise return NULL. */
683 get_abnormal_succ_dispatcher (basic_block bb
)
688 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
689 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)) == EDGE_ABNORMAL
)
691 gimple_stmt_iterator gsi
692 = gsi_start_nondebug_after_labels_bb (e
->dest
);
693 gimple
*g
= gsi_stmt (gsi
);
694 if (g
&& gimple_call_internal_p (g
, IFN_ABNORMAL_DISPATCHER
))
700 /* Helper function for make_edges. Create a basic block with
701 with ABNORMAL_DISPATCHER internal call in it if needed, and
702 create abnormal edges from BBS to it and from it to FOR_BB
703 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
706 handle_abnormal_edges (basic_block
*dispatcher_bbs
,
707 basic_block for_bb
, int *bb_to_omp_idx
,
708 auto_vec
<basic_block
> *bbs
, bool computed_goto
)
710 basic_block
*dispatcher
= dispatcher_bbs
+ (computed_goto
? 1 : 0);
711 unsigned int idx
= 0;
717 dispatcher
= dispatcher_bbs
+ 2 * bb_to_omp_idx
[for_bb
->index
];
718 if (bb_to_omp_idx
[for_bb
->index
] != 0)
722 /* If the dispatcher has been created already, then there are basic
723 blocks with abnormal edges to it, so just make a new edge to
725 if (*dispatcher
== NULL
)
727 /* Check if there are any basic blocks that need to have
728 abnormal edges to this dispatcher. If there are none, return
730 if (bb_to_omp_idx
== NULL
)
732 if (bbs
->is_empty ())
737 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
738 if (bb_to_omp_idx
[bb
->index
] == bb_to_omp_idx
[for_bb
->index
])
744 /* Create the dispatcher bb. */
745 *dispatcher
= create_basic_block (NULL
, for_bb
);
748 /* Factor computed gotos into a common computed goto site. Also
749 record the location of that site so that we can un-factor the
750 gotos after we have converted back to normal form. */
751 gimple_stmt_iterator gsi
= gsi_start_bb (*dispatcher
);
753 /* Create the destination of the factored goto. Each original
754 computed goto will put its desired destination into this
755 variable and jump to the label we create immediately below. */
756 tree var
= create_tmp_var (ptr_type_node
, "gotovar");
758 /* Build a label for the new block which will contain the
759 factored computed goto. */
760 tree factored_label_decl
761 = create_artificial_label (UNKNOWN_LOCATION
);
762 gimple
*factored_computed_goto_label
763 = gimple_build_label (factored_label_decl
);
764 gsi_insert_after (&gsi
, factored_computed_goto_label
, GSI_NEW_STMT
);
766 /* Build our new computed goto. */
767 gimple
*factored_computed_goto
= gimple_build_goto (var
);
768 gsi_insert_after (&gsi
, factored_computed_goto
, GSI_NEW_STMT
);
770 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
773 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
776 gsi
= gsi_last_bb (bb
);
777 gimple
*last
= gsi_stmt (gsi
);
779 gcc_assert (computed_goto_p (last
));
781 /* Copy the original computed goto's destination into VAR. */
783 = gimple_build_assign (var
, gimple_goto_dest (last
));
784 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
786 edge e
= make_edge (bb
, *dispatcher
, EDGE_FALLTHRU
);
787 e
->goto_locus
= gimple_location (last
);
788 gsi_remove (&gsi
, true);
793 tree arg
= inner
? boolean_true_node
: boolean_false_node
;
794 gimple
*g
= gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER
,
796 gimple_stmt_iterator gsi
= gsi_after_labels (*dispatcher
);
797 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
799 /* Create predecessor edges of the dispatcher. */
800 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
803 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
805 make_edge (bb
, *dispatcher
, EDGE_ABNORMAL
);
810 make_edge (*dispatcher
, for_bb
, EDGE_ABNORMAL
);
813 /* Creates outgoing edges for BB. Returns 1 when it ends with an
814 computed goto, returns 2 when it ends with a statement that
815 might return to this function via an nonlocal goto, otherwise
816 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
819 make_edges_bb (basic_block bb
, struct omp_region
**pcur_region
, int *pomp_index
)
821 gimple
*last
= last_stmt (bb
);
822 bool fallthru
= false;
828 switch (gimple_code (last
))
831 if (make_goto_expr_edges (bb
))
837 edge e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
838 e
->goto_locus
= gimple_location (last
);
843 make_cond_expr_edges (bb
);
847 make_gimple_switch_edges (as_a
<gswitch
*> (last
), bb
);
851 make_eh_edges (last
);
854 case GIMPLE_EH_DISPATCH
:
855 fallthru
= make_eh_dispatch_edges (as_a
<geh_dispatch
*> (last
));
859 /* If this function receives a nonlocal goto, then we need to
860 make edges from this call site to all the nonlocal goto
862 if (stmt_can_make_abnormal_goto (last
))
865 /* If this statement has reachable exception handlers, then
866 create abnormal edges to them. */
867 make_eh_edges (last
);
869 /* BUILTIN_RETURN is really a return statement. */
870 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
872 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
875 /* Some calls are known not to return. */
877 fallthru
= !gimple_call_noreturn_p (last
);
881 /* A GIMPLE_ASSIGN may throw internally and thus be considered
883 if (is_ctrl_altering_stmt (last
))
884 make_eh_edges (last
);
889 make_gimple_asm_edges (bb
);
894 fallthru
= omp_make_gimple_edges (bb
, pcur_region
, pomp_index
);
897 case GIMPLE_TRANSACTION
:
899 gtransaction
*txn
= as_a
<gtransaction
*> (last
);
900 tree label1
= gimple_transaction_label_norm (txn
);
901 tree label2
= gimple_transaction_label_uninst (txn
);
904 make_edge (bb
, label_to_block (label1
), EDGE_FALLTHRU
);
906 make_edge (bb
, label_to_block (label2
),
907 EDGE_TM_UNINSTRUMENTED
| (label1
? 0 : EDGE_FALLTHRU
));
909 tree label3
= gimple_transaction_label_over (txn
);
910 if (gimple_transaction_subcode (txn
)
911 & (GTMA_HAVE_ABORT
| GTMA_IS_OUTER
))
912 make_edge (bb
, label_to_block (label3
), EDGE_TM_ABORT
);
919 gcc_assert (!stmt_ends_bb_p (last
));
925 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
930 /* Join all the blocks in the flowgraph. */
936 struct omp_region
*cur_region
= NULL
;
937 auto_vec
<basic_block
> ab_edge_goto
;
938 auto_vec
<basic_block
> ab_edge_call
;
939 int *bb_to_omp_idx
= NULL
;
940 int cur_omp_region_idx
= 0;
942 /* Create an edge from entry to the first block with executable
944 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
945 BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
),
948 /* Traverse the basic block array placing edges. */
949 FOR_EACH_BB_FN (bb
, cfun
)
954 bb_to_omp_idx
[bb
->index
] = cur_omp_region_idx
;
956 mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
958 ab_edge_goto
.safe_push (bb
);
960 ab_edge_call
.safe_push (bb
);
962 if (cur_region
&& bb_to_omp_idx
== NULL
)
963 bb_to_omp_idx
= XCNEWVEC (int, n_basic_blocks_for_fn (cfun
));
966 /* Computed gotos are hell to deal with, especially if there are
967 lots of them with a large number of destinations. So we factor
968 them to a common computed goto location before we build the
969 edge list. After we convert back to normal form, we will un-factor
970 the computed gotos since factoring introduces an unwanted jump.
971 For non-local gotos and abnormal edges from calls to calls that return
972 twice or forced labels, factor the abnormal edges too, by having all
973 abnormal edges from the calls go to a common artificial basic block
974 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
975 basic block to all forced labels and calls returning twice.
976 We do this per-OpenMP structured block, because those regions
977 are guaranteed to be single entry single exit by the standard,
978 so it is not allowed to enter or exit such regions abnormally this way,
979 thus all computed gotos, non-local gotos and setjmp/longjmp calls
980 must not transfer control across SESE region boundaries. */
981 if (!ab_edge_goto
.is_empty () || !ab_edge_call
.is_empty ())
983 gimple_stmt_iterator gsi
;
984 basic_block dispatcher_bb_array
[2] = { NULL
, NULL
};
985 basic_block
*dispatcher_bbs
= dispatcher_bb_array
;
986 int count
= n_basic_blocks_for_fn (cfun
);
989 dispatcher_bbs
= XCNEWVEC (basic_block
, 2 * count
);
991 FOR_EACH_BB_FN (bb
, cfun
)
993 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
995 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1001 target
= gimple_label_label (label_stmt
);
1003 /* Make an edge to every label block that has been marked as a
1004 potential target for a computed goto or a non-local goto. */
1005 if (FORCED_LABEL (target
))
1006 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1007 &ab_edge_goto
, true);
1008 if (DECL_NONLOCAL (target
))
1010 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1011 &ab_edge_call
, false);
1016 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
1017 gsi_next_nondebug (&gsi
);
1018 if (!gsi_end_p (gsi
))
1020 /* Make an edge to every setjmp-like call. */
1021 gimple
*call_stmt
= gsi_stmt (gsi
);
1022 if (is_gimple_call (call_stmt
)
1023 && ((gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
)
1024 || gimple_call_builtin_p (call_stmt
,
1025 BUILT_IN_SETJMP_RECEIVER
)))
1026 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1027 &ab_edge_call
, false);
1032 XDELETE (dispatcher_bbs
);
1035 XDELETE (bb_to_omp_idx
);
1037 omp_free_regions ();
1040 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1041 needed. Returns true if new bbs were created.
1042 Note: This is transitional code, and should not be used for new code. We
1043 should be able to get rid of this by rewriting all target va-arg
1044 gimplification hooks to use an interface gimple_build_cond_value as described
1045 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1048 gimple_find_sub_bbs (gimple_seq seq
, gimple_stmt_iterator
*gsi
)
1050 gimple
*stmt
= gsi_stmt (*gsi
);
1051 basic_block bb
= gimple_bb (stmt
);
1052 basic_block lastbb
, afterbb
;
1053 int old_num_bbs
= n_basic_blocks_for_fn (cfun
);
1055 lastbb
= make_blocks_1 (seq
, bb
);
1056 if (old_num_bbs
== n_basic_blocks_for_fn (cfun
))
1058 e
= split_block (bb
, stmt
);
1059 /* Move e->dest to come after the new basic blocks. */
1061 unlink_block (afterbb
);
1062 link_block (afterbb
, lastbb
);
1063 redirect_edge_succ (e
, bb
->next_bb
);
1065 while (bb
!= afterbb
)
1067 struct omp_region
*cur_region
= NULL
;
1068 profile_count cnt
= profile_count::zero ();
1071 int cur_omp_region_idx
= 0;
1072 int mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
1073 gcc_assert (!mer
&& !cur_region
);
1074 add_bb_to_loop (bb
, afterbb
->loop_father
);
1078 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1080 if (e
->count ().initialized_p ())
1085 tree_guess_outgoing_edge_probabilities (bb
);
1086 if (all
|| profile_status_for_fn (cfun
) == PROFILE_READ
)
1094 /* Find the next available discriminator value for LOCUS. The
1095 discriminator distinguishes among several basic blocks that
1096 share a common locus, allowing for more accurate sample-based
1100 next_discriminator_for_locus (location_t locus
)
1102 struct locus_discrim_map item
;
1103 struct locus_discrim_map
**slot
;
1106 item
.discriminator
= 0;
1107 slot
= discriminator_per_locus
->find_slot_with_hash (
1108 &item
, LOCATION_LINE (locus
), INSERT
);
1110 if (*slot
== HTAB_EMPTY_ENTRY
)
1112 *slot
= XNEW (struct locus_discrim_map
);
1114 (*slot
)->locus
= locus
;
1115 (*slot
)->discriminator
= 0;
1117 (*slot
)->discriminator
++;
1118 return (*slot
)->discriminator
;
1121 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1124 same_line_p (location_t locus1
, location_t locus2
)
1126 expanded_location from
, to
;
1128 if (locus1
== locus2
)
1131 from
= expand_location (locus1
);
1132 to
= expand_location (locus2
);
1134 if (from
.line
!= to
.line
)
1136 if (from
.file
== to
.file
)
1138 return (from
.file
!= NULL
1140 && filename_cmp (from
.file
, to
.file
) == 0);
1143 /* Assign discriminators to each basic block. */
1146 assign_discriminators (void)
1150 FOR_EACH_BB_FN (bb
, cfun
)
1154 gimple
*last
= last_stmt (bb
);
1155 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
1157 if (locus
== UNKNOWN_LOCATION
)
1160 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1162 gimple
*first
= first_non_label_stmt (e
->dest
);
1163 gimple
*last
= last_stmt (e
->dest
);
1164 if ((first
&& same_line_p (locus
, gimple_location (first
)))
1165 || (last
&& same_line_p (locus
, gimple_location (last
))))
1167 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
1168 bb
->discriminator
= next_discriminator_for_locus (locus
);
1170 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
1176 /* Create the edges for a GIMPLE_COND starting at block BB. */
1179 make_cond_expr_edges (basic_block bb
)
1181 gcond
*entry
= as_a
<gcond
*> (last_stmt (bb
));
1182 gimple
*then_stmt
, *else_stmt
;
1183 basic_block then_bb
, else_bb
;
1184 tree then_label
, else_label
;
1188 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
1190 /* Entry basic blocks for each component. */
1191 then_label
= gimple_cond_true_label (entry
);
1192 else_label
= gimple_cond_false_label (entry
);
1193 then_bb
= label_to_block (then_label
);
1194 else_bb
= label_to_block (else_label
);
1195 then_stmt
= first_stmt (then_bb
);
1196 else_stmt
= first_stmt (else_bb
);
1198 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1199 e
->goto_locus
= gimple_location (then_stmt
);
1200 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1202 e
->goto_locus
= gimple_location (else_stmt
);
1204 /* We do not need the labels anymore. */
1205 gimple_cond_set_true_label (entry
, NULL_TREE
);
1206 gimple_cond_set_false_label (entry
, NULL_TREE
);
1210 /* Called for each element in the hash table (P) as we delete the
1211 edge to cases hash table.
1213 Clear all the CASE_CHAINs to prevent problems with copying of
1214 SWITCH_EXPRs and structure sharing rules, then free the hash table
1218 edge_to_cases_cleanup (edge
const &, tree
const &value
, void *)
1222 for (t
= value
; t
; t
= next
)
1224 next
= CASE_CHAIN (t
);
1225 CASE_CHAIN (t
) = NULL
;
1231 /* Start recording information mapping edges to case labels. */
1234 start_recording_case_labels (void)
1236 gcc_assert (edge_to_cases
== NULL
);
1237 edge_to_cases
= new hash_map
<edge
, tree
>;
1238 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
1241 /* Return nonzero if we are recording information for case labels. */
1244 recording_case_labels_p (void)
1246 return (edge_to_cases
!= NULL
);
1249 /* Stop recording information mapping edges to case labels and
1250 remove any information we have recorded. */
1252 end_recording_case_labels (void)
1256 edge_to_cases
->traverse
<void *, edge_to_cases_cleanup
> (NULL
);
1257 delete edge_to_cases
;
1258 edge_to_cases
= NULL
;
1259 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
1261 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
1264 gimple
*stmt
= last_stmt (bb
);
1265 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1266 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1269 BITMAP_FREE (touched_switch_bbs
);
1272 /* If we are inside a {start,end}_recording_cases block, then return
1273 a chain of CASE_LABEL_EXPRs from T which reference E.
1275 Otherwise return NULL. */
1278 get_cases_for_edge (edge e
, gswitch
*t
)
1283 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1284 chains available. Return NULL so the caller can detect this case. */
1285 if (!recording_case_labels_p ())
1288 slot
= edge_to_cases
->get (e
);
1292 /* If we did not find E in the hash table, then this must be the first
1293 time we have been queried for information about E & T. Add all the
1294 elements from T to the hash table then perform the query again. */
1296 n
= gimple_switch_num_labels (t
);
1297 for (i
= 0; i
< n
; i
++)
1299 tree elt
= gimple_switch_label (t
, i
);
1300 tree lab
= CASE_LABEL (elt
);
1301 basic_block label_bb
= label_to_block (lab
);
1302 edge this_edge
= find_edge (e
->src
, label_bb
);
1304 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1306 tree
&s
= edge_to_cases
->get_or_insert (this_edge
);
1307 CASE_CHAIN (elt
) = s
;
1311 return *edge_to_cases
->get (e
);
1314 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1317 make_gimple_switch_edges (gswitch
*entry
, basic_block bb
)
1321 n
= gimple_switch_num_labels (entry
);
1323 for (i
= 0; i
< n
; ++i
)
1325 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1326 basic_block label_bb
= label_to_block (lab
);
1327 make_edge (bb
, label_bb
, 0);
1332 /* Return the basic block holding label DEST. */
1335 label_to_block_fn (struct function
*ifun
, tree dest
)
1337 int uid
= LABEL_DECL_UID (dest
);
1339 /* We would die hard when faced by an undefined label. Emit a label to
1340 the very first basic block. This will hopefully make even the dataflow
1341 and undefined variable warnings quite right. */
1342 if (seen_error () && uid
< 0)
1344 gimple_stmt_iterator gsi
=
1345 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
));
1348 stmt
= gimple_build_label (dest
);
1349 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1350 uid
= LABEL_DECL_UID (dest
);
1352 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1354 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1357 /* Create edges for a goto statement at block BB. Returns true
1358 if abnormal edges should be created. */
1361 make_goto_expr_edges (basic_block bb
)
1363 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1364 gimple
*goto_t
= gsi_stmt (last
);
1366 /* A simple GOTO creates normal edges. */
1367 if (simple_goto_p (goto_t
))
1369 tree dest
= gimple_goto_dest (goto_t
);
1370 basic_block label_bb
= label_to_block (dest
);
1371 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1372 e
->goto_locus
= gimple_location (goto_t
);
1373 gsi_remove (&last
, true);
1377 /* A computed GOTO creates abnormal edges. */
1381 /* Create edges for an asm statement with labels at block BB. */
1384 make_gimple_asm_edges (basic_block bb
)
1386 gasm
*stmt
= as_a
<gasm
*> (last_stmt (bb
));
1387 int i
, n
= gimple_asm_nlabels (stmt
);
1389 for (i
= 0; i
< n
; ++i
)
1391 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1392 basic_block label_bb
= label_to_block (label
);
1393 make_edge (bb
, label_bb
, 0);
1397 /*---------------------------------------------------------------------------
1399 ---------------------------------------------------------------------------*/
1401 /* Cleanup useless labels in basic blocks. This is something we wish
1402 to do early because it allows us to group case labels before creating
1403 the edges for the CFG, and it speeds up block statement iterators in
1404 all passes later on.
1405 We rerun this pass after CFG is created, to get rid of the labels that
1406 are no longer referenced. After then we do not run it any more, since
1407 (almost) no new labels should be created. */
1409 /* A map from basic block index to the leading label of that block. */
1410 static struct label_record
1415 /* True if the label is referenced from somewhere. */
1419 /* Given LABEL return the first label in the same basic block. */
1422 main_block_label (tree label
)
1424 basic_block bb
= label_to_block (label
);
1425 tree main_label
= label_for_bb
[bb
->index
].label
;
1427 /* label_to_block possibly inserted undefined label into the chain. */
1430 label_for_bb
[bb
->index
].label
= label
;
1434 label_for_bb
[bb
->index
].used
= true;
1438 /* Clean up redundant labels within the exception tree. */
1441 cleanup_dead_labels_eh (void)
1448 if (cfun
->eh
== NULL
)
1451 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1452 if (lp
&& lp
->post_landing_pad
)
1454 lab
= main_block_label (lp
->post_landing_pad
);
1455 if (lab
!= lp
->post_landing_pad
)
1457 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1458 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1462 FOR_ALL_EH_REGION (r
)
1466 case ERT_MUST_NOT_THROW
:
1472 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1476 c
->label
= main_block_label (lab
);
1481 case ERT_ALLOWED_EXCEPTIONS
:
1482 lab
= r
->u
.allowed
.label
;
1484 r
->u
.allowed
.label
= main_block_label (lab
);
1490 /* Cleanup redundant labels. This is a three-step process:
1491 1) Find the leading label for each block.
1492 2) Redirect all references to labels to the leading labels.
1493 3) Cleanup all useless labels. */
1496 cleanup_dead_labels (void)
1499 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block_for_fn (cfun
));
1501 /* Find a suitable label for each block. We use the first user-defined
1502 label if there is one, or otherwise just the first label we see. */
1503 FOR_EACH_BB_FN (bb
, cfun
)
1505 gimple_stmt_iterator i
;
1507 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1510 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1515 label
= gimple_label_label (label_stmt
);
1517 /* If we have not yet seen a label for the current block,
1518 remember this one and see if there are more labels. */
1519 if (!label_for_bb
[bb
->index
].label
)
1521 label_for_bb
[bb
->index
].label
= label
;
1525 /* If we did see a label for the current block already, but it
1526 is an artificially created label, replace it if the current
1527 label is a user defined label. */
1528 if (!DECL_ARTIFICIAL (label
)
1529 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1531 label_for_bb
[bb
->index
].label
= label
;
1537 /* Now redirect all jumps/branches to the selected label.
1538 First do so for each block ending in a control statement. */
1539 FOR_EACH_BB_FN (bb
, cfun
)
1541 gimple
*stmt
= last_stmt (bb
);
1542 tree label
, new_label
;
1547 switch (gimple_code (stmt
))
1551 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
1552 label
= gimple_cond_true_label (cond_stmt
);
1555 new_label
= main_block_label (label
);
1556 if (new_label
!= label
)
1557 gimple_cond_set_true_label (cond_stmt
, new_label
);
1560 label
= gimple_cond_false_label (cond_stmt
);
1563 new_label
= main_block_label (label
);
1564 if (new_label
!= label
)
1565 gimple_cond_set_false_label (cond_stmt
, new_label
);
1572 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
1573 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
1575 /* Replace all destination labels. */
1576 for (i
= 0; i
< n
; ++i
)
1578 tree case_label
= gimple_switch_label (switch_stmt
, i
);
1579 label
= CASE_LABEL (case_label
);
1580 new_label
= main_block_label (label
);
1581 if (new_label
!= label
)
1582 CASE_LABEL (case_label
) = new_label
;
1589 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1590 int i
, n
= gimple_asm_nlabels (asm_stmt
);
1592 for (i
= 0; i
< n
; ++i
)
1594 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
1595 tree label
= main_block_label (TREE_VALUE (cons
));
1596 TREE_VALUE (cons
) = label
;
1601 /* We have to handle gotos until they're removed, and we don't
1602 remove them until after we've created the CFG edges. */
1604 if (!computed_goto_p (stmt
))
1606 ggoto
*goto_stmt
= as_a
<ggoto
*> (stmt
);
1607 label
= gimple_goto_dest (goto_stmt
);
1608 new_label
= main_block_label (label
);
1609 if (new_label
!= label
)
1610 gimple_goto_set_dest (goto_stmt
, new_label
);
1614 case GIMPLE_TRANSACTION
:
1616 gtransaction
*txn
= as_a
<gtransaction
*> (stmt
);
1618 label
= gimple_transaction_label_norm (txn
);
1621 new_label
= main_block_label (label
);
1622 if (new_label
!= label
)
1623 gimple_transaction_set_label_norm (txn
, new_label
);
1626 label
= gimple_transaction_label_uninst (txn
);
1629 new_label
= main_block_label (label
);
1630 if (new_label
!= label
)
1631 gimple_transaction_set_label_uninst (txn
, new_label
);
1634 label
= gimple_transaction_label_over (txn
);
1637 new_label
= main_block_label (label
);
1638 if (new_label
!= label
)
1639 gimple_transaction_set_label_over (txn
, new_label
);
1649 /* Do the same for the exception region tree labels. */
1650 cleanup_dead_labels_eh ();
1652 /* Finally, purge dead labels. All user-defined labels and labels that
1653 can be the target of non-local gotos and labels which have their
1654 address taken are preserved. */
1655 FOR_EACH_BB_FN (bb
, cfun
)
1657 gimple_stmt_iterator i
;
1658 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1660 if (!label_for_this_bb
)
1663 /* If the main label of the block is unused, we may still remove it. */
1664 if (!label_for_bb
[bb
->index
].used
)
1665 label_for_this_bb
= NULL
;
1667 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1670 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1675 label
= gimple_label_label (label_stmt
);
1677 if (label
== label_for_this_bb
1678 || !DECL_ARTIFICIAL (label
)
1679 || DECL_NONLOCAL (label
)
1680 || FORCED_LABEL (label
))
1683 gsi_remove (&i
, true);
1687 free (label_for_bb
);
1690 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1691 the ones jumping to the same label.
1692 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1695 group_case_labels_stmt (gswitch
*stmt
)
1697 int old_size
= gimple_switch_num_labels (stmt
);
1698 int i
, next_index
, new_size
;
1699 basic_block default_bb
= NULL
;
1701 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1703 /* Look for possible opportunities to merge cases. */
1705 while (i
< old_size
)
1707 tree base_case
, base_high
;
1708 basic_block base_bb
;
1710 base_case
= gimple_switch_label (stmt
, i
);
1712 gcc_assert (base_case
);
1713 base_bb
= label_to_block (CASE_LABEL (base_case
));
1715 /* Discard cases that have the same destination as the default case or
1716 whose destiniation blocks have already been removed as unreachable. */
1717 if (base_bb
== NULL
|| base_bb
== default_bb
)
1723 base_high
= CASE_HIGH (base_case
)
1724 ? CASE_HIGH (base_case
)
1725 : CASE_LOW (base_case
);
1728 /* Try to merge case labels. Break out when we reach the end
1729 of the label vector or when we cannot merge the next case
1730 label with the current one. */
1731 while (next_index
< old_size
)
1733 tree merge_case
= gimple_switch_label (stmt
, next_index
);
1734 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1735 wide_int bhp1
= wi::to_wide (base_high
) + 1;
1737 /* Merge the cases if they jump to the same place,
1738 and their ranges are consecutive. */
1739 if (merge_bb
== base_bb
1740 && wi::to_wide (CASE_LOW (merge_case
)) == bhp1
)
1742 base_high
= CASE_HIGH (merge_case
) ?
1743 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1744 CASE_HIGH (base_case
) = base_high
;
1751 /* Discard cases that have an unreachable destination block. */
1752 if (EDGE_COUNT (base_bb
->succs
) == 0
1753 && gimple_seq_unreachable_p (bb_seq (base_bb
))
1754 /* Don't optimize this if __builtin_unreachable () is the
1755 implicitly added one by the C++ FE too early, before
1756 -Wreturn-type can be diagnosed. We'll optimize it later
1757 during switchconv pass or any other cfg cleanup. */
1758 && (gimple_in_ssa_p (cfun
)
1759 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb
)))
1760 != BUILTINS_LOCATION
)))
1762 edge base_edge
= find_edge (gimple_bb (stmt
), base_bb
);
1763 if (base_edge
!= NULL
)
1764 remove_edge_and_dominated_blocks (base_edge
);
1770 gimple_switch_set_label (stmt
, new_size
,
1771 gimple_switch_label (stmt
, i
));
1776 gcc_assert (new_size
<= old_size
);
1778 if (new_size
< old_size
)
1779 gimple_switch_set_num_labels (stmt
, new_size
);
1781 return new_size
< old_size
;
1784 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1785 and scan the sorted vector of cases. Combine the ones jumping to the
1789 group_case_labels (void)
1792 bool changed
= false;
1794 FOR_EACH_BB_FN (bb
, cfun
)
1796 gimple
*stmt
= last_stmt (bb
);
1797 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1798 changed
|= group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1804 /* Checks whether we can merge block B into block A. */
1807 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1811 if (!single_succ_p (a
))
1814 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1817 if (single_succ (a
) != b
)
1820 if (!single_pred_p (b
))
1823 if (a
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
1824 || b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1827 /* If A ends by a statement causing exceptions or something similar, we
1828 cannot merge the blocks. */
1829 stmt
= last_stmt (a
);
1830 if (stmt
&& stmt_ends_bb_p (stmt
))
1833 /* Do not allow a block with only a non-local label to be merged. */
1835 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
1836 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
1839 /* Examine the labels at the beginning of B. */
1840 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);
1844 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1847 lab
= gimple_label_label (label_stmt
);
1849 /* Do not remove user forced labels or for -O0 any user labels. */
1850 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1854 /* Protect simple loop latches. We only want to avoid merging
1855 the latch with the loop header or with a block in another
1856 loop in this case. */
1858 && b
->loop_father
->latch
== b
1859 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
)
1860 && (b
->loop_father
->header
== a
1861 || b
->loop_father
!= a
->loop_father
))
1864 /* It must be possible to eliminate all phi nodes in B. If ssa form
1865 is not up-to-date and a name-mapping is registered, we cannot eliminate
1866 any phis. Symbols marked for renaming are never a problem though. */
1867 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);
1870 gphi
*phi
= gsi
.phi ();
1871 /* Technically only new names matter. */
1872 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1876 /* When not optimizing, don't merge if we'd lose goto_locus. */
1878 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1880 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1881 gimple_stmt_iterator prev
, next
;
1882 prev
= gsi_last_nondebug_bb (a
);
1883 next
= gsi_after_labels (b
);
1884 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1885 gsi_next_nondebug (&next
);
1886 if ((gsi_end_p (prev
)
1887 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1888 && (gsi_end_p (next
)
1889 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1896 /* Replaces all uses of NAME by VAL. */
1899 replace_uses_by (tree name
, tree val
)
1901 imm_use_iterator imm_iter
;
1906 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1908 /* Mark the block if we change the last stmt in it. */
1909 if (cfgcleanup_altered_bbs
1910 && stmt_ends_bb_p (stmt
))
1911 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1913 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1915 replace_exp (use
, val
);
1917 if (gimple_code (stmt
) == GIMPLE_PHI
)
1919 e
= gimple_phi_arg_edge (as_a
<gphi
*> (stmt
),
1920 PHI_ARG_INDEX_FROM_USE (use
));
1921 if (e
->flags
& EDGE_ABNORMAL
1922 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
))
1924 /* This can only occur for virtual operands, since
1925 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1926 would prevent replacement. */
1927 gcc_checking_assert (virtual_operand_p (name
));
1928 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1933 if (gimple_code (stmt
) != GIMPLE_PHI
)
1935 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1936 gimple
*orig_stmt
= stmt
;
1939 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1940 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1941 only change sth from non-invariant to invariant, and only
1942 when propagating constants. */
1943 if (is_gimple_min_invariant (val
))
1944 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1946 tree op
= gimple_op (stmt
, i
);
1947 /* Operands may be empty here. For example, the labels
1948 of a GIMPLE_COND are nulled out following the creation
1949 of the corresponding CFG edges. */
1950 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
1951 recompute_tree_invariant_for_addr_expr (op
);
1954 if (fold_stmt (&gsi
))
1955 stmt
= gsi_stmt (gsi
);
1957 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
1958 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
1964 gcc_checking_assert (has_zero_uses (name
));
1966 /* Also update the trees stored in loop structures. */
1971 FOR_EACH_LOOP (loop
, 0)
1973 substitute_in_loop_info (loop
, name
, val
);
1978 /* Merge block B into block A. */
1981 gimple_merge_blocks (basic_block a
, basic_block b
)
1983 gimple_stmt_iterator last
, gsi
;
1987 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1989 /* Remove all single-valued PHI nodes from block B of the form
1990 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1991 gsi
= gsi_last_bb (a
);
1992 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
1994 gimple
*phi
= gsi_stmt (psi
);
1995 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
1997 bool may_replace_uses
= (virtual_operand_p (def
)
1998 || may_propagate_copy (def
, use
));
2000 /* In case we maintain loop closed ssa form, do not propagate arguments
2001 of loop exit phi nodes. */
2003 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
2004 && !virtual_operand_p (def
)
2005 && TREE_CODE (use
) == SSA_NAME
2006 && a
->loop_father
!= b
->loop_father
)
2007 may_replace_uses
= false;
2009 if (!may_replace_uses
)
2011 gcc_assert (!virtual_operand_p (def
));
2013 /* Note that just emitting the copies is fine -- there is no problem
2014 with ordering of phi nodes. This is because A is the single
2015 predecessor of B, therefore results of the phi nodes cannot
2016 appear as arguments of the phi nodes. */
2017 copy
= gimple_build_assign (def
, use
);
2018 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
2019 remove_phi_node (&psi
, false);
2023 /* If we deal with a PHI for virtual operands, we can simply
2024 propagate these without fussing with folding or updating
2026 if (virtual_operand_p (def
))
2028 imm_use_iterator iter
;
2029 use_operand_p use_p
;
2032 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
2033 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
2034 SET_USE (use_p
, use
);
2036 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
2037 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
2040 replace_uses_by (def
, use
);
2042 remove_phi_node (&psi
, true);
2046 /* Ensure that B follows A. */
2047 move_block_after (b
, a
);
2049 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
2050 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
2052 /* Remove labels from B and set gimple_bb to A for other statements. */
2053 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
2055 gimple
*stmt
= gsi_stmt (gsi
);
2056 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2058 tree label
= gimple_label_label (label_stmt
);
2061 gsi_remove (&gsi
, false);
2063 /* Now that we can thread computed gotos, we might have
2064 a situation where we have a forced label in block B
2065 However, the label at the start of block B might still be
2066 used in other ways (think about the runtime checking for
2067 Fortran assigned gotos). So we can not just delete the
2068 label. Instead we move the label to the start of block A. */
2069 if (FORCED_LABEL (label
))
2071 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
2072 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
2074 /* Other user labels keep around in a form of a debug stmt. */
2075 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_STMTS
)
2077 gimple
*dbg
= gimple_build_debug_bind (label
,
2080 gimple_debug_bind_reset_value (dbg
);
2081 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
2084 lp_nr
= EH_LANDING_PAD_NR (label
);
2087 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
2088 lp
->post_landing_pad
= NULL
;
2093 gimple_set_bb (stmt
, a
);
2098 /* When merging two BBs, if their counts are different, the larger count
2099 is selected as the new bb count. This is to handle inconsistent
2101 if (a
->loop_father
== b
->loop_father
)
2103 a
->count
= a
->count
.merge (b
->count
);
2106 /* Merge the sequences. */
2107 last
= gsi_last_bb (a
);
2108 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
2109 set_bb_seq (b
, NULL
);
2111 if (cfgcleanup_altered_bbs
)
2112 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
2116 /* Return the one of two successors of BB that is not reachable by a
2117 complex edge, if there is one. Else, return BB. We use
2118 this in optimizations that use post-dominators for their heuristics,
2119 to catch the cases in C++ where function calls are involved. */
2122 single_noncomplex_succ (basic_block bb
)
2125 if (EDGE_COUNT (bb
->succs
) != 2)
2128 e0
= EDGE_SUCC (bb
, 0);
2129 e1
= EDGE_SUCC (bb
, 1);
2130 if (e0
->flags
& EDGE_COMPLEX
)
2132 if (e1
->flags
& EDGE_COMPLEX
)
2138 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2141 notice_special_calls (gcall
*call
)
2143 int flags
= gimple_call_flags (call
);
2145 if (flags
& ECF_MAY_BE_ALLOCA
)
2146 cfun
->calls_alloca
= true;
2147 if (flags
& ECF_RETURNS_TWICE
)
2148 cfun
->calls_setjmp
= true;
2152 /* Clear flags set by notice_special_calls. Used by dead code removal
2153 to update the flags. */
2156 clear_special_calls (void)
2158 cfun
->calls_alloca
= false;
2159 cfun
->calls_setjmp
= false;
2162 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2165 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
2167 /* Since this block is no longer reachable, we can just delete all
2168 of its PHI nodes. */
2169 remove_phi_nodes (bb
);
2171 /* Remove edges to BB's successors. */
2172 while (EDGE_COUNT (bb
->succs
) > 0)
2173 remove_edge (EDGE_SUCC (bb
, 0));
2177 /* Remove statements of basic block BB. */
2180 remove_bb (basic_block bb
)
2182 gimple_stmt_iterator i
;
2186 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
2187 if (dump_flags
& TDF_DETAILS
)
2189 dump_bb (dump_file
, bb
, 0, TDF_BLOCKS
);
2190 fprintf (dump_file
, "\n");
2196 struct loop
*loop
= bb
->loop_father
;
2198 /* If a loop gets removed, clean up the information associated
2200 if (loop
->latch
== bb
2201 || loop
->header
== bb
)
2202 free_numbers_of_iterations_estimates (loop
);
2205 /* Remove all the instructions in the block. */
2206 if (bb_seq (bb
) != NULL
)
2208 /* Walk backwards so as to get a chance to substitute all
2209 released DEFs into debug stmts. See
2210 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2212 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
2214 gimple
*stmt
= gsi_stmt (i
);
2215 glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
);
2217 && (FORCED_LABEL (gimple_label_label (label_stmt
))
2218 || DECL_NONLOCAL (gimple_label_label (label_stmt
))))
2221 gimple_stmt_iterator new_gsi
;
2223 /* A non-reachable non-local label may still be referenced.
2224 But it no longer needs to carry the extra semantics of
2226 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
2228 DECL_NONLOCAL (gimple_label_label (label_stmt
)) = 0;
2229 FORCED_LABEL (gimple_label_label (label_stmt
)) = 1;
2232 new_bb
= bb
->prev_bb
;
2233 new_gsi
= gsi_start_bb (new_bb
);
2234 gsi_remove (&i
, false);
2235 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
2239 /* Release SSA definitions. */
2240 release_defs (stmt
);
2241 gsi_remove (&i
, true);
2245 i
= gsi_last_bb (bb
);
2251 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2252 bb
->il
.gimple
.seq
= NULL
;
2253 bb
->il
.gimple
.phi_nodes
= NULL
;
2257 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2258 predicate VAL, return the edge that will be taken out of the block.
2259 If VAL does not match a unique edge, NULL is returned. */
2262 find_taken_edge (basic_block bb
, tree val
)
2266 stmt
= last_stmt (bb
);
2268 gcc_assert (is_ctrl_stmt (stmt
));
2270 if (gimple_code (stmt
) == GIMPLE_COND
)
2271 return find_taken_edge_cond_expr (bb
, val
);
2273 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
2274 return find_taken_edge_switch_expr (as_a
<gswitch
*> (stmt
), bb
, val
);
2276 if (computed_goto_p (stmt
))
2278 /* Only optimize if the argument is a label, if the argument is
2279 not a label then we can not construct a proper CFG.
2281 It may be the case that we only need to allow the LABEL_REF to
2282 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2283 appear inside a LABEL_EXPR just to be safe. */
2285 && (TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
2286 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
2287 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
2294 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2295 statement, determine which of the outgoing edges will be taken out of the
2296 block. Return NULL if either edge may be taken. */
2299 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2304 dest
= label_to_block (val
);
2307 e
= find_edge (bb
, dest
);
2308 gcc_assert (e
!= NULL
);
2314 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2315 statement, determine which of the two edges will be taken out of the
2316 block. Return NULL if either edge may be taken. */
2319 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2321 edge true_edge
, false_edge
;
2324 || TREE_CODE (val
) != INTEGER_CST
)
2327 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2329 return (integer_zerop (val
) ? false_edge
: true_edge
);
2332 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2333 statement, determine which edge will be taken out of the block. Return
2334 NULL if any edge may be taken. */
2337 find_taken_edge_switch_expr (gswitch
*switch_stmt
, basic_block bb
,
2340 basic_block dest_bb
;
2344 if (gimple_switch_num_labels (switch_stmt
) == 1)
2345 taken_case
= gimple_switch_default_label (switch_stmt
);
2346 else if (! val
|| TREE_CODE (val
) != INTEGER_CST
)
2349 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2350 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2352 e
= find_edge (bb
, dest_bb
);
2358 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2359 We can make optimal use here of the fact that the case labels are
2360 sorted: We can do a binary search for a case matching VAL. */
2363 find_case_label_for_value (gswitch
*switch_stmt
, tree val
)
2365 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2366 tree default_case
= gimple_switch_default_label (switch_stmt
);
2368 for (low
= 0, high
= n
; high
- low
> 1; )
2370 size_t i
= (high
+ low
) / 2;
2371 tree t
= gimple_switch_label (switch_stmt
, i
);
2374 /* Cache the result of comparing CASE_LOW and val. */
2375 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2382 if (CASE_HIGH (t
) == NULL
)
2384 /* A singe-valued case label. */
2390 /* A case range. We can only handle integer ranges. */
2391 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2396 return default_case
;
2400 /* Dump a basic block on stderr. */
2403 gimple_debug_bb (basic_block bb
)
2405 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2409 /* Dump basic block with index N on stderr. */
2412 gimple_debug_bb_n (int n
)
2414 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun
, n
));
2415 return BASIC_BLOCK_FOR_FN (cfun
, n
);
2419 /* Dump the CFG on stderr.
2421 FLAGS are the same used by the tree dumping functions
2422 (see TDF_* in dumpfile.h). */
2425 gimple_debug_cfg (dump_flags_t flags
)
2427 gimple_dump_cfg (stderr
, flags
);
2431 /* Dump the program showing basic block boundaries on the given FILE.
2433 FLAGS are the same used by the tree dumping functions (see TDF_* in
2437 gimple_dump_cfg (FILE *file
, dump_flags_t flags
)
2439 if (flags
& TDF_DETAILS
)
2441 dump_function_header (file
, current_function_decl
, flags
);
2442 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2443 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2444 last_basic_block_for_fn (cfun
));
2446 brief_dump_cfg (file
, flags
);
2447 fprintf (file
, "\n");
2450 if (flags
& TDF_STATS
)
2451 dump_cfg_stats (file
);
2453 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2457 /* Dump CFG statistics on FILE. */
2460 dump_cfg_stats (FILE *file
)
2462 static long max_num_merged_labels
= 0;
2463 unsigned long size
, total
= 0;
2466 const char * const fmt_str
= "%-30s%-13s%12s\n";
2467 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2468 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2469 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2470 const char *funcname
= current_function_name ();
2472 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2474 fprintf (file
, "---------------------------------------------------------\n");
2475 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2476 fprintf (file
, fmt_str
, "", " instances ", "used ");
2477 fprintf (file
, "---------------------------------------------------------\n");
2479 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2481 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2482 SCALE (size
), LABEL (size
));
2485 FOR_EACH_BB_FN (bb
, cfun
)
2486 num_edges
+= EDGE_COUNT (bb
->succs
);
2487 size
= num_edges
* sizeof (struct edge_def
);
2489 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2491 fprintf (file
, "---------------------------------------------------------\n");
2492 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2494 fprintf (file
, "---------------------------------------------------------\n");
2495 fprintf (file
, "\n");
2497 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2498 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2500 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2501 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2503 fprintf (file
, "\n");
2507 /* Dump CFG statistics on stderr. Keep extern so that it's always
2508 linked in the final executable. */
2511 debug_cfg_stats (void)
2513 dump_cfg_stats (stderr
);
2516 /*---------------------------------------------------------------------------
2517 Miscellaneous helpers
2518 ---------------------------------------------------------------------------*/
2520 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2521 flow. Transfers of control flow associated with EH are excluded. */
2524 call_can_make_abnormal_goto (gimple
*t
)
2526 /* If the function has no non-local labels, then a call cannot make an
2527 abnormal transfer of control. */
2528 if (!cfun
->has_nonlocal_label
2529 && !cfun
->calls_setjmp
)
2532 /* Likewise if the call has no side effects. */
2533 if (!gimple_has_side_effects (t
))
2536 /* Likewise if the called function is leaf. */
2537 if (gimple_call_flags (t
) & ECF_LEAF
)
2544 /* Return true if T can make an abnormal transfer of control flow.
2545 Transfers of control flow associated with EH are excluded. */
2548 stmt_can_make_abnormal_goto (gimple
*t
)
2550 if (computed_goto_p (t
))
2552 if (is_gimple_call (t
))
2553 return call_can_make_abnormal_goto (t
);
2558 /* Return true if T represents a stmt that always transfers control. */
2561 is_ctrl_stmt (gimple
*t
)
2563 switch (gimple_code (t
))
2577 /* Return true if T is a statement that may alter the flow of control
2578 (e.g., a call to a non-returning function). */
2581 is_ctrl_altering_stmt (gimple
*t
)
2585 switch (gimple_code (t
))
2588 /* Per stmt call flag indicates whether the call could alter
2590 if (gimple_call_ctrl_altering_p (t
))
2594 case GIMPLE_EH_DISPATCH
:
2595 /* EH_DISPATCH branches to the individual catch handlers at
2596 this level of a try or allowed-exceptions region. It can
2597 fallthru to the next statement as well. */
2601 if (gimple_asm_nlabels (as_a
<gasm
*> (t
)) > 0)
2606 /* OpenMP directives alter control flow. */
2609 case GIMPLE_TRANSACTION
:
2610 /* A transaction start alters control flow. */
2617 /* If a statement can throw, it alters control flow. */
2618 return stmt_can_throw_internal (t
);
2622 /* Return true if T is a simple local goto. */
2625 simple_goto_p (gimple
*t
)
2627 return (gimple_code (t
) == GIMPLE_GOTO
2628 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2632 /* Return true if STMT should start a new basic block. PREV_STMT is
2633 the statement preceding STMT. It is used when STMT is a label or a
2634 case label. Labels should only start a new basic block if their
2635 previous statement wasn't a label. Otherwise, sequence of labels
2636 would generate unnecessary basic blocks that only contain a single
2640 stmt_starts_bb_p (gimple
*stmt
, gimple
*prev_stmt
)
2645 /* Labels start a new basic block only if the preceding statement
2646 wasn't a label of the same type. This prevents the creation of
2647 consecutive blocks that have nothing but a single label. */
2648 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2650 /* Nonlocal and computed GOTO targets always start a new block. */
2651 if (DECL_NONLOCAL (gimple_label_label (label_stmt
))
2652 || FORCED_LABEL (gimple_label_label (label_stmt
)))
2655 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2657 if (DECL_NONLOCAL (gimple_label_label (
2658 as_a
<glabel
*> (prev_stmt
))))
2661 cfg_stats
.num_merged_labels
++;
2667 else if (gimple_code (stmt
) == GIMPLE_CALL
)
2669 if (gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2670 /* setjmp acts similar to a nonlocal GOTO target and thus should
2671 start a new block. */
2673 if (gimple_call_internal_p (stmt
, IFN_PHI
)
2675 && gimple_code (prev_stmt
) != GIMPLE_LABEL
2676 && (gimple_code (prev_stmt
) != GIMPLE_CALL
2677 || ! gimple_call_internal_p (prev_stmt
, IFN_PHI
)))
2678 /* PHI nodes start a new block unless preceeded by a label
2687 /* Return true if T should end a basic block. */
2690 stmt_ends_bb_p (gimple
*t
)
2692 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2695 /* Remove block annotations and other data structures. */
2698 delete_tree_cfg_annotations (struct function
*fn
)
2700 vec_free (label_to_block_map_for_fn (fn
));
2703 /* Return the virtual phi in BB. */
2706 get_virtual_phi (basic_block bb
)
2708 for (gphi_iterator gsi
= gsi_start_phis (bb
);
2712 gphi
*phi
= gsi
.phi ();
2714 if (virtual_operand_p (PHI_RESULT (phi
)))
2721 /* Return the first statement in basic block BB. */
2724 first_stmt (basic_block bb
)
2726 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2727 gimple
*stmt
= NULL
;
2729 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2737 /* Return the first non-label statement in basic block BB. */
2740 first_non_label_stmt (basic_block bb
)
2742 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2743 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2745 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2748 /* Return the last statement in basic block BB. */
2751 last_stmt (basic_block bb
)
2753 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2754 gimple
*stmt
= NULL
;
2756 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2764 /* Return the last statement of an otherwise empty block. Return NULL
2765 if the block is totally empty, or if it contains more than one
2769 last_and_only_stmt (basic_block bb
)
2771 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2772 gimple
*last
, *prev
;
2777 last
= gsi_stmt (i
);
2778 gsi_prev_nondebug (&i
);
2782 /* Empty statements should no longer appear in the instruction stream.
2783 Everything that might have appeared before should be deleted by
2784 remove_useless_stmts, and the optimizers should just gsi_remove
2785 instead of smashing with build_empty_stmt.
2787 Thus the only thing that should appear here in a block containing
2788 one executable statement is a label. */
2789 prev
= gsi_stmt (i
);
2790 if (gimple_code (prev
) == GIMPLE_LABEL
)
2796 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2799 reinstall_phi_args (edge new_edge
, edge old_edge
)
2805 vec
<edge_var_map
> *v
= redirect_edge_var_map_vector (old_edge
);
2809 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2810 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2811 i
++, gsi_next (&phis
))
2813 gphi
*phi
= phis
.phi ();
2814 tree result
= redirect_edge_var_map_result (vm
);
2815 tree arg
= redirect_edge_var_map_def (vm
);
2817 gcc_assert (result
== gimple_phi_result (phi
));
2819 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2822 redirect_edge_var_map_clear (old_edge
);
2825 /* Returns the basic block after which the new basic block created
2826 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2827 near its "logical" location. This is of most help to humans looking
2828 at debugging dumps. */
2831 split_edge_bb_loc (edge edge_in
)
2833 basic_block dest
= edge_in
->dest
;
2834 basic_block dest_prev
= dest
->prev_bb
;
2838 edge e
= find_edge (dest_prev
, dest
);
2839 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2840 return edge_in
->src
;
2845 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2846 Abort on abnormal edges. */
2849 gimple_split_edge (edge edge_in
)
2851 basic_block new_bb
, after_bb
, dest
;
2854 /* Abnormal edges cannot be split. */
2855 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2857 dest
= edge_in
->dest
;
2859 after_bb
= split_edge_bb_loc (edge_in
);
2861 new_bb
= create_empty_bb (after_bb
);
2862 new_bb
->count
= edge_in
->count ();
2864 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2865 gcc_assert (e
== edge_in
);
2867 new_edge
= make_single_succ_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2868 reinstall_phi_args (new_edge
, e
);
2874 /* Verify properties of the address expression T with base object BASE. */
2877 verify_address (tree t
, tree base
)
2880 bool old_side_effects
;
2882 bool new_side_effects
;
2884 old_constant
= TREE_CONSTANT (t
);
2885 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2887 recompute_tree_invariant_for_addr_expr (t
);
2888 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2889 new_constant
= TREE_CONSTANT (t
);
2891 if (old_constant
!= new_constant
)
2893 error ("constant not recomputed when ADDR_EXPR changed");
2896 if (old_side_effects
!= new_side_effects
)
2898 error ("side effects not recomputed when ADDR_EXPR changed");
2903 || TREE_CODE (base
) == PARM_DECL
2904 || TREE_CODE (base
) == RESULT_DECL
))
2907 if (DECL_GIMPLE_REG_P (base
))
2909 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2916 /* Callback for walk_tree, check that all elements with address taken are
2917 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2918 inside a PHI node. */
2921 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2928 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2929 #define CHECK_OP(N, MSG) \
2930 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2931 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2933 switch (TREE_CODE (t
))
2936 if (SSA_NAME_IN_FREE_LIST (t
))
2938 error ("SSA name in freelist but still referenced");
2947 tree context
= decl_function_context (t
);
2948 if (context
!= cfun
->decl
2949 && !SCOPE_FILE_SCOPE_P (context
)
2951 && !DECL_EXTERNAL (t
))
2953 error ("Local declaration from a different function");
2960 error ("INDIRECT_REF in gimple IL");
2964 x
= TREE_OPERAND (t
, 0);
2965 if (!POINTER_TYPE_P (TREE_TYPE (x
))
2966 || !is_gimple_mem_ref_addr (x
))
2968 error ("invalid first operand of MEM_REF");
2971 if (TREE_CODE (TREE_OPERAND (t
, 1)) != INTEGER_CST
2972 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
2974 error ("invalid offset operand of MEM_REF");
2975 return TREE_OPERAND (t
, 1);
2977 if (TREE_CODE (x
) == ADDR_EXPR
)
2979 tree va
= verify_address (x
, TREE_OPERAND (x
, 0));
2982 x
= TREE_OPERAND (x
, 0);
2984 walk_tree (&x
, verify_expr
, data
, NULL
);
2989 x
= fold (ASSERT_EXPR_COND (t
));
2990 if (x
== boolean_false_node
)
2992 error ("ASSERT_EXPR with an always-false condition");
2998 error ("MODIFY_EXPR not expected while having tuples");
3005 gcc_assert (is_gimple_address (t
));
3007 /* Skip any references (they will be checked when we recurse down the
3008 tree) and ensure that any variable used as a prefix is marked
3010 for (x
= TREE_OPERAND (t
, 0);
3011 handled_component_p (x
);
3012 x
= TREE_OPERAND (x
, 0))
3015 if ((tem
= verify_address (t
, x
)))
3019 || TREE_CODE (x
) == PARM_DECL
3020 || TREE_CODE (x
) == RESULT_DECL
))
3023 if (!TREE_ADDRESSABLE (x
))
3025 error ("address taken, but ADDRESSABLE bit not set");
3033 x
= COND_EXPR_COND (t
);
3034 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
3036 error ("non-integral used in condition");
3039 if (!is_gimple_condexpr (x
))
3041 error ("invalid conditional operand");
3046 case NON_LVALUE_EXPR
:
3047 case TRUTH_NOT_EXPR
:
3051 case FIX_TRUNC_EXPR
:
3056 CHECK_OP (0, "invalid operand to unary operator");
3062 if (!is_gimple_reg_type (TREE_TYPE (t
)))
3064 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3068 if (TREE_CODE (t
) == BIT_FIELD_REF
)
3070 tree t0
= TREE_OPERAND (t
, 0);
3071 tree t1
= TREE_OPERAND (t
, 1);
3072 tree t2
= TREE_OPERAND (t
, 2);
3073 if (!tree_fits_uhwi_p (t1
)
3074 || !tree_fits_uhwi_p (t2
)
3075 || !types_compatible_p (bitsizetype
, TREE_TYPE (t1
))
3076 || !types_compatible_p (bitsizetype
, TREE_TYPE (t2
)))
3078 error ("invalid position or size operand to BIT_FIELD_REF");
3081 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
3082 && (TYPE_PRECISION (TREE_TYPE (t
))
3083 != tree_to_uhwi (t1
)))
3085 error ("integral result type precision does not match "
3086 "field size of BIT_FIELD_REF");
3089 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
3090 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
3091 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t
)))
3092 != tree_to_uhwi (t1
)))
3094 error ("mode size of non-integral result does not "
3095 "match field size of BIT_FIELD_REF");
3098 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0
))
3099 && (tree_to_uhwi (t1
) + tree_to_uhwi (t2
)
3100 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0
)))))
3102 error ("position plus size exceeds size of referenced object in "
3107 t
= TREE_OPERAND (t
, 0);
3112 case ARRAY_RANGE_REF
:
3113 case VIEW_CONVERT_EXPR
:
3114 /* We have a nest of references. Verify that each of the operands
3115 that determine where to reference is either a constant or a variable,
3116 verify that the base is valid, and then show we've already checked
3118 while (handled_component_p (t
))
3120 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
3121 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3122 else if (TREE_CODE (t
) == ARRAY_REF
3123 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
3125 CHECK_OP (1, "invalid array index");
3126 if (TREE_OPERAND (t
, 2))
3127 CHECK_OP (2, "invalid array lower bound");
3128 if (TREE_OPERAND (t
, 3))
3129 CHECK_OP (3, "invalid array stride");
3131 else if (TREE_CODE (t
) == BIT_FIELD_REF
3132 || TREE_CODE (t
) == REALPART_EXPR
3133 || TREE_CODE (t
) == IMAGPART_EXPR
)
3135 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3140 t
= TREE_OPERAND (t
, 0);
3143 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
3145 error ("invalid reference prefix");
3148 walk_tree (&t
, verify_expr
, data
, NULL
);
3153 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3154 POINTER_PLUS_EXPR. */
3155 if (POINTER_TYPE_P (TREE_TYPE (t
)))
3157 error ("invalid operand to plus/minus, type is a pointer");
3160 CHECK_OP (0, "invalid operand to binary operator");
3161 CHECK_OP (1, "invalid operand to binary operator");
3164 case POINTER_DIFF_EXPR
:
3165 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0)))
3166 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
3168 error ("invalid operand to pointer diff, operand is not a pointer");
3171 if (TREE_CODE (TREE_TYPE (t
)) != INTEGER_TYPE
3172 || TYPE_UNSIGNED (TREE_TYPE (t
))
3173 || (TYPE_PRECISION (TREE_TYPE (t
))
3174 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t
, 0)))))
3176 error ("invalid type for pointer diff");
3179 CHECK_OP (0, "invalid operand to pointer diff");
3180 CHECK_OP (1, "invalid operand to pointer diff");
3183 case POINTER_PLUS_EXPR
:
3184 /* Check to make sure the first operand is a pointer or reference type. */
3185 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
3187 error ("invalid operand to pointer plus, first operand is not a pointer");
3190 /* Check to make sure the second operand is a ptrofftype. */
3191 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
3193 error ("invalid operand to pointer plus, second operand is not an "
3194 "integer type of appropriate width");
3204 case UNORDERED_EXPR
:
3213 case TRUNC_DIV_EXPR
:
3215 case FLOOR_DIV_EXPR
:
3216 case ROUND_DIV_EXPR
:
3217 case TRUNC_MOD_EXPR
:
3219 case FLOOR_MOD_EXPR
:
3220 case ROUND_MOD_EXPR
:
3222 case EXACT_DIV_EXPR
:
3232 CHECK_OP (0, "invalid operand to binary operator");
3233 CHECK_OP (1, "invalid operand to binary operator");
3237 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
3241 case CASE_LABEL_EXPR
:
3244 error ("invalid CASE_CHAIN");
3258 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3259 Returns true if there is an error, otherwise false. */
3262 verify_types_in_gimple_min_lval (tree expr
)
3266 if (is_gimple_id (expr
))
3269 if (TREE_CODE (expr
) != TARGET_MEM_REF
3270 && TREE_CODE (expr
) != MEM_REF
)
3272 error ("invalid expression for min lvalue");
3276 /* TARGET_MEM_REFs are strange beasts. */
3277 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3280 op
= TREE_OPERAND (expr
, 0);
3281 if (!is_gimple_val (op
))
3283 error ("invalid operand in indirect reference");
3284 debug_generic_stmt (op
);
3287 /* Memory references now generally can involve a value conversion. */
3292 /* Verify if EXPR is a valid GIMPLE reference expression. If
3293 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3294 if there is an error, otherwise false. */
3297 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
3299 while (handled_component_p (expr
))
3301 tree op
= TREE_OPERAND (expr
, 0);
3303 if (TREE_CODE (expr
) == ARRAY_REF
3304 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
3306 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
3307 || (TREE_OPERAND (expr
, 2)
3308 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3309 || (TREE_OPERAND (expr
, 3)
3310 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
3312 error ("invalid operands to array reference");
3313 debug_generic_stmt (expr
);
3318 /* Verify if the reference array element types are compatible. */
3319 if (TREE_CODE (expr
) == ARRAY_REF
3320 && !useless_type_conversion_p (TREE_TYPE (expr
),
3321 TREE_TYPE (TREE_TYPE (op
))))
3323 error ("type mismatch in array reference");
3324 debug_generic_stmt (TREE_TYPE (expr
));
3325 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3328 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
3329 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
3330 TREE_TYPE (TREE_TYPE (op
))))
3332 error ("type mismatch in array range reference");
3333 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
3334 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3338 if ((TREE_CODE (expr
) == REALPART_EXPR
3339 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3340 && !useless_type_conversion_p (TREE_TYPE (expr
),
3341 TREE_TYPE (TREE_TYPE (op
))))
3343 error ("type mismatch in real/imagpart reference");
3344 debug_generic_stmt (TREE_TYPE (expr
));
3345 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3349 if (TREE_CODE (expr
) == COMPONENT_REF
3350 && !useless_type_conversion_p (TREE_TYPE (expr
),
3351 TREE_TYPE (TREE_OPERAND (expr
, 1))))
3353 error ("type mismatch in component reference");
3354 debug_generic_stmt (TREE_TYPE (expr
));
3355 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
3359 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3361 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3362 that their operand is not an SSA name or an invariant when
3363 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3364 bug). Otherwise there is nothing to verify, gross mismatches at
3365 most invoke undefined behavior. */
3367 && (TREE_CODE (op
) == SSA_NAME
3368 || is_gimple_min_invariant (op
)))
3370 error ("conversion of an SSA_NAME on the left hand side");
3371 debug_generic_stmt (expr
);
3374 else if (TREE_CODE (op
) == SSA_NAME
3375 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3377 error ("conversion of register to a different size");
3378 debug_generic_stmt (expr
);
3381 else if (!handled_component_p (op
))
3388 if (TREE_CODE (expr
) == MEM_REF
)
3390 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
3392 error ("invalid address operand in MEM_REF");
3393 debug_generic_stmt (expr
);
3396 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
3397 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3399 error ("invalid offset operand in MEM_REF");
3400 debug_generic_stmt (expr
);
3404 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3406 if (!TMR_BASE (expr
)
3407 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3409 error ("invalid address operand in TARGET_MEM_REF");
3412 if (!TMR_OFFSET (expr
)
3413 || TREE_CODE (TMR_OFFSET (expr
)) != INTEGER_CST
3414 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3416 error ("invalid offset operand in TARGET_MEM_REF");
3417 debug_generic_stmt (expr
);
3422 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3423 && verify_types_in_gimple_min_lval (expr
));
3426 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3427 list of pointer-to types that is trivially convertible to DEST. */
3430 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3434 if (!TYPE_POINTER_TO (src_obj
))
3437 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3438 if (useless_type_conversion_p (dest
, src
))
3444 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3445 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3448 valid_fixed_convert_types_p (tree type1
, tree type2
)
3450 return (FIXED_POINT_TYPE_P (type1
)
3451 && (INTEGRAL_TYPE_P (type2
)
3452 || SCALAR_FLOAT_TYPE_P (type2
)
3453 || FIXED_POINT_TYPE_P (type2
)));
3456 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3457 is a problem, otherwise false. */
3460 verify_gimple_call (gcall
*stmt
)
3462 tree fn
= gimple_call_fn (stmt
);
3463 tree fntype
, fndecl
;
3466 if (gimple_call_internal_p (stmt
))
3470 error ("gimple call has two targets");
3471 debug_generic_stmt (fn
);
3474 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3475 else if (gimple_call_internal_fn (stmt
) == IFN_PHI
)
3484 error ("gimple call has no target");
3489 if (fn
&& !is_gimple_call_addr (fn
))
3491 error ("invalid function in gimple call");
3492 debug_generic_stmt (fn
);
3497 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3498 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3499 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3501 error ("non-function in gimple call");
3505 fndecl
= gimple_call_fndecl (stmt
);
3507 && TREE_CODE (fndecl
) == FUNCTION_DECL
3508 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3509 && !DECL_PURE_P (fndecl
)
3510 && !TREE_READONLY (fndecl
))
3512 error ("invalid pure const state for function");
3516 tree lhs
= gimple_call_lhs (stmt
);
3518 && (!is_gimple_lvalue (lhs
)
3519 || verify_types_in_gimple_reference (lhs
, true)))
3521 error ("invalid LHS in gimple call");
3525 if (gimple_call_ctrl_altering_p (stmt
)
3526 && gimple_call_noreturn_p (stmt
)
3527 && should_remove_lhs_p (lhs
))
3529 error ("LHS in noreturn call");
3533 fntype
= gimple_call_fntype (stmt
);
3536 && !useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (fntype
))
3537 /* ??? At least C++ misses conversions at assignments from
3538 void * call results.
3539 For now simply allow arbitrary pointer type conversions. */
3540 && !(POINTER_TYPE_P (TREE_TYPE (lhs
))
3541 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3543 error ("invalid conversion in gimple call");
3544 debug_generic_stmt (TREE_TYPE (lhs
));
3545 debug_generic_stmt (TREE_TYPE (fntype
));
3549 if (gimple_call_chain (stmt
)
3550 && !is_gimple_val (gimple_call_chain (stmt
)))
3552 error ("invalid static chain in gimple call");
3553 debug_generic_stmt (gimple_call_chain (stmt
));
3557 /* If there is a static chain argument, the call should either be
3558 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3559 if (gimple_call_chain (stmt
)
3561 && !DECL_STATIC_CHAIN (fndecl
))
3563 error ("static chain with function that doesn%'t use one");
3567 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3569 switch (DECL_FUNCTION_CODE (fndecl
))
3571 case BUILT_IN_UNREACHABLE
:
3573 if (gimple_call_num_args (stmt
) > 0)
3575 /* Built-in unreachable with parameters might not be caught by
3576 undefined behavior sanitizer. Front-ends do check users do not
3577 call them that way but we also produce calls to
3578 __builtin_unreachable internally, for example when IPA figures
3579 out a call cannot happen in a legal program. In such cases,
3580 we must make sure arguments are stripped off. */
3581 error ("__builtin_unreachable or __builtin_trap call with "
3591 /* ??? The C frontend passes unpromoted arguments in case it
3592 didn't see a function declaration before the call. So for now
3593 leave the call arguments mostly unverified. Once we gimplify
3594 unit-at-a-time we have a chance to fix this. */
3596 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3598 tree arg
= gimple_call_arg (stmt
, i
);
3599 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3600 && !is_gimple_val (arg
))
3601 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3602 && !is_gimple_lvalue (arg
)))
3604 error ("invalid argument to gimple call");
3605 debug_generic_expr (arg
);
3613 /* Verifies the gimple comparison with the result type TYPE and
3614 the operands OP0 and OP1, comparison code is CODE. */
3617 verify_gimple_comparison (tree type
, tree op0
, tree op1
, enum tree_code code
)
3619 tree op0_type
= TREE_TYPE (op0
);
3620 tree op1_type
= TREE_TYPE (op1
);
3622 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3624 error ("invalid operands in gimple comparison");
3628 /* For comparisons we do not have the operations type as the
3629 effective type the comparison is carried out in. Instead
3630 we require that either the first operand is trivially
3631 convertible into the second, or the other way around.
3632 Because we special-case pointers to void we allow
3633 comparisons of pointers with the same mode as well. */
3634 if (!useless_type_conversion_p (op0_type
, op1_type
)
3635 && !useless_type_conversion_p (op1_type
, op0_type
)
3636 && (!POINTER_TYPE_P (op0_type
)
3637 || !POINTER_TYPE_P (op1_type
)
3638 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3640 error ("mismatching comparison operand types");
3641 debug_generic_expr (op0_type
);
3642 debug_generic_expr (op1_type
);
3646 /* The resulting type of a comparison may be an effective boolean type. */
3647 if (INTEGRAL_TYPE_P (type
)
3648 && (TREE_CODE (type
) == BOOLEAN_TYPE
3649 || TYPE_PRECISION (type
) == 1))
3651 if ((TREE_CODE (op0_type
) == VECTOR_TYPE
3652 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3653 && code
!= EQ_EXPR
&& code
!= NE_EXPR
3654 && !VECTOR_BOOLEAN_TYPE_P (op0_type
)
3655 && !VECTOR_INTEGER_TYPE_P (op0_type
))
3657 error ("unsupported operation or type for vector comparison"
3658 " returning a boolean");
3659 debug_generic_expr (op0_type
);
3660 debug_generic_expr (op1_type
);
3664 /* Or a boolean vector type with the same element count
3665 as the comparison operand types. */
3666 else if (TREE_CODE (type
) == VECTOR_TYPE
3667 && TREE_CODE (TREE_TYPE (type
)) == BOOLEAN_TYPE
)
3669 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3670 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3672 error ("non-vector operands in vector comparison");
3673 debug_generic_expr (op0_type
);
3674 debug_generic_expr (op1_type
);
3678 if (TYPE_VECTOR_SUBPARTS (type
) != TYPE_VECTOR_SUBPARTS (op0_type
))
3680 error ("invalid vector comparison resulting type");
3681 debug_generic_expr (type
);
3687 error ("bogus comparison result type");
3688 debug_generic_expr (type
);
3695 /* Verify a gimple assignment statement STMT with an unary rhs.
3696 Returns true if anything is wrong. */
3699 verify_gimple_assign_unary (gassign
*stmt
)
3701 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3702 tree lhs
= gimple_assign_lhs (stmt
);
3703 tree lhs_type
= TREE_TYPE (lhs
);
3704 tree rhs1
= gimple_assign_rhs1 (stmt
);
3705 tree rhs1_type
= TREE_TYPE (rhs1
);
3707 if (!is_gimple_reg (lhs
))
3709 error ("non-register as LHS of unary operation");
3713 if (!is_gimple_val (rhs1
))
3715 error ("invalid operand in unary operation");
3719 /* First handle conversions. */
3724 /* Allow conversions from pointer type to integral type only if
3725 there is no sign or zero extension involved.
3726 For targets were the precision of ptrofftype doesn't match that
3727 of pointers we need to allow arbitrary conversions to ptrofftype. */
3728 if ((POINTER_TYPE_P (lhs_type
)
3729 && INTEGRAL_TYPE_P (rhs1_type
))
3730 || (POINTER_TYPE_P (rhs1_type
)
3731 && INTEGRAL_TYPE_P (lhs_type
)
3732 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3733 || ptrofftype_p (sizetype
))))
3736 /* Allow conversion from integral to offset type and vice versa. */
3737 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3738 && INTEGRAL_TYPE_P (rhs1_type
))
3739 || (INTEGRAL_TYPE_P (lhs_type
)
3740 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3743 /* Otherwise assert we are converting between types of the
3745 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3747 error ("invalid types in nop conversion");
3748 debug_generic_expr (lhs_type
);
3749 debug_generic_expr (rhs1_type
);
3756 case ADDR_SPACE_CONVERT_EXPR
:
3758 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3759 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3760 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3762 error ("invalid types in address space conversion");
3763 debug_generic_expr (lhs_type
);
3764 debug_generic_expr (rhs1_type
);
3771 case FIXED_CONVERT_EXPR
:
3773 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3774 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3776 error ("invalid types in fixed-point conversion");
3777 debug_generic_expr (lhs_type
);
3778 debug_generic_expr (rhs1_type
);
3787 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3788 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3789 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3791 error ("invalid types in conversion to floating point");
3792 debug_generic_expr (lhs_type
);
3793 debug_generic_expr (rhs1_type
);
3800 case FIX_TRUNC_EXPR
:
3802 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3803 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3804 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3806 error ("invalid types in conversion to integer");
3807 debug_generic_expr (lhs_type
);
3808 debug_generic_expr (rhs1_type
);
3815 case VEC_UNPACK_HI_EXPR
:
3816 case VEC_UNPACK_LO_EXPR
:
3817 case VEC_UNPACK_FLOAT_HI_EXPR
:
3818 case VEC_UNPACK_FLOAT_LO_EXPR
:
3833 /* For the remaining codes assert there is no conversion involved. */
3834 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3836 error ("non-trivial conversion in unary operation");
3837 debug_generic_expr (lhs_type
);
3838 debug_generic_expr (rhs1_type
);
3845 /* Verify a gimple assignment statement STMT with a binary rhs.
3846 Returns true if anything is wrong. */
3849 verify_gimple_assign_binary (gassign
*stmt
)
3851 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3852 tree lhs
= gimple_assign_lhs (stmt
);
3853 tree lhs_type
= TREE_TYPE (lhs
);
3854 tree rhs1
= gimple_assign_rhs1 (stmt
);
3855 tree rhs1_type
= TREE_TYPE (rhs1
);
3856 tree rhs2
= gimple_assign_rhs2 (stmt
);
3857 tree rhs2_type
= TREE_TYPE (rhs2
);
3859 if (!is_gimple_reg (lhs
))
3861 error ("non-register as LHS of binary operation");
3865 if (!is_gimple_val (rhs1
)
3866 || !is_gimple_val (rhs2
))
3868 error ("invalid operands in binary operation");
3872 /* First handle operations that involve different types. */
3877 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3878 || !(INTEGRAL_TYPE_P (rhs1_type
)
3879 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3880 || !(INTEGRAL_TYPE_P (rhs2_type
)
3881 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3883 error ("type mismatch in complex expression");
3884 debug_generic_expr (lhs_type
);
3885 debug_generic_expr (rhs1_type
);
3886 debug_generic_expr (rhs2_type
);
3898 /* Shifts and rotates are ok on integral types, fixed point
3899 types and integer vector types. */
3900 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3901 && !FIXED_POINT_TYPE_P (rhs1_type
)
3902 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3903 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3904 || (!INTEGRAL_TYPE_P (rhs2_type
)
3905 /* Vector shifts of vectors are also ok. */
3906 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3907 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3908 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3909 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3910 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3912 error ("type mismatch in shift expression");
3913 debug_generic_expr (lhs_type
);
3914 debug_generic_expr (rhs1_type
);
3915 debug_generic_expr (rhs2_type
);
3922 case WIDEN_LSHIFT_EXPR
:
3924 if (!INTEGRAL_TYPE_P (lhs_type
)
3925 || !INTEGRAL_TYPE_P (rhs1_type
)
3926 || TREE_CODE (rhs2
) != INTEGER_CST
3927 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3929 error ("type mismatch in widening vector shift expression");
3930 debug_generic_expr (lhs_type
);
3931 debug_generic_expr (rhs1_type
);
3932 debug_generic_expr (rhs2_type
);
3939 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3940 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3942 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3943 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3944 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3945 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3946 || TREE_CODE (rhs2
) != INTEGER_CST
3947 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3948 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3950 error ("type mismatch in widening vector shift expression");
3951 debug_generic_expr (lhs_type
);
3952 debug_generic_expr (rhs1_type
);
3953 debug_generic_expr (rhs2_type
);
3963 tree lhs_etype
= lhs_type
;
3964 tree rhs1_etype
= rhs1_type
;
3965 tree rhs2_etype
= rhs2_type
;
3966 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3968 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3969 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3971 error ("invalid non-vector operands to vector valued plus");
3974 lhs_etype
= TREE_TYPE (lhs_type
);
3975 rhs1_etype
= TREE_TYPE (rhs1_type
);
3976 rhs2_etype
= TREE_TYPE (rhs2_type
);
3978 if (POINTER_TYPE_P (lhs_etype
)
3979 || POINTER_TYPE_P (rhs1_etype
)
3980 || POINTER_TYPE_P (rhs2_etype
))
3982 error ("invalid (pointer) operands to plus/minus");
3986 /* Continue with generic binary expression handling. */
3990 case POINTER_PLUS_EXPR
:
3992 if (!POINTER_TYPE_P (rhs1_type
)
3993 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3994 || !ptrofftype_p (rhs2_type
))
3996 error ("type mismatch in pointer plus expression");
3997 debug_generic_stmt (lhs_type
);
3998 debug_generic_stmt (rhs1_type
);
3999 debug_generic_stmt (rhs2_type
);
4006 case POINTER_DIFF_EXPR
:
4008 if (!POINTER_TYPE_P (rhs1_type
)
4009 || !POINTER_TYPE_P (rhs2_type
)
4010 /* Because we special-case pointers to void we allow difference
4011 of arbitrary pointers with the same mode. */
4012 || TYPE_MODE (rhs1_type
) != TYPE_MODE (rhs2_type
)
4013 || TREE_CODE (lhs_type
) != INTEGER_TYPE
4014 || TYPE_UNSIGNED (lhs_type
)
4015 || TYPE_PRECISION (lhs_type
) != TYPE_PRECISION (rhs1_type
))
4017 error ("type mismatch in pointer diff expression");
4018 debug_generic_stmt (lhs_type
);
4019 debug_generic_stmt (rhs1_type
);
4020 debug_generic_stmt (rhs2_type
);
4027 case TRUTH_ANDIF_EXPR
:
4028 case TRUTH_ORIF_EXPR
:
4029 case TRUTH_AND_EXPR
:
4031 case TRUTH_XOR_EXPR
:
4041 case UNORDERED_EXPR
:
4049 /* Comparisons are also binary, but the result type is not
4050 connected to the operand types. */
4051 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
, rhs_code
);
4053 case WIDEN_MULT_EXPR
:
4054 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
4056 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
4057 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
4059 case WIDEN_SUM_EXPR
:
4061 if (((TREE_CODE (rhs1_type
) != VECTOR_TYPE
4062 || TREE_CODE (lhs_type
) != VECTOR_TYPE
)
4063 && ((!INTEGRAL_TYPE_P (rhs1_type
)
4064 && !SCALAR_FLOAT_TYPE_P (rhs1_type
))
4065 || (!INTEGRAL_TYPE_P (lhs_type
)
4066 && !SCALAR_FLOAT_TYPE_P (lhs_type
))))
4067 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
4068 || (GET_MODE_SIZE (element_mode (rhs2_type
))
4069 < 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4071 error ("type mismatch in widening sum reduction");
4072 debug_generic_expr (lhs_type
);
4073 debug_generic_expr (rhs1_type
);
4074 debug_generic_expr (rhs2_type
);
4080 case VEC_WIDEN_MULT_HI_EXPR
:
4081 case VEC_WIDEN_MULT_LO_EXPR
:
4082 case VEC_WIDEN_MULT_EVEN_EXPR
:
4083 case VEC_WIDEN_MULT_ODD_EXPR
:
4085 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4086 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4087 || !types_compatible_p (rhs1_type
, rhs2_type
)
4088 || (GET_MODE_SIZE (element_mode (lhs_type
))
4089 != 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4091 error ("type mismatch in vector widening multiplication");
4092 debug_generic_expr (lhs_type
);
4093 debug_generic_expr (rhs1_type
);
4094 debug_generic_expr (rhs2_type
);
4100 case VEC_PACK_TRUNC_EXPR
:
4101 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4102 vector boolean types. */
4103 if (VECTOR_BOOLEAN_TYPE_P (lhs_type
)
4104 && VECTOR_BOOLEAN_TYPE_P (rhs1_type
)
4105 && types_compatible_p (rhs1_type
, rhs2_type
)
4106 && (TYPE_VECTOR_SUBPARTS (lhs_type
)
4107 == 2 * TYPE_VECTOR_SUBPARTS (rhs1_type
)))
4111 case VEC_PACK_SAT_EXPR
:
4112 case VEC_PACK_FIX_TRUNC_EXPR
:
4114 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4115 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4116 || !((rhs_code
== VEC_PACK_FIX_TRUNC_EXPR
4117 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
))
4118 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
)))
4119 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
4120 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))))
4121 || !types_compatible_p (rhs1_type
, rhs2_type
)
4122 || (GET_MODE_SIZE (element_mode (rhs1_type
))
4123 != 2 * GET_MODE_SIZE (element_mode (lhs_type
))))
4125 error ("type mismatch in vector pack expression");
4126 debug_generic_expr (lhs_type
);
4127 debug_generic_expr (rhs1_type
);
4128 debug_generic_expr (rhs2_type
);
4136 case MULT_HIGHPART_EXPR
:
4137 case TRUNC_DIV_EXPR
:
4139 case FLOOR_DIV_EXPR
:
4140 case ROUND_DIV_EXPR
:
4141 case TRUNC_MOD_EXPR
:
4143 case FLOOR_MOD_EXPR
:
4144 case ROUND_MOD_EXPR
:
4146 case EXACT_DIV_EXPR
:
4152 /* Continue with generic binary expression handling. */
4159 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4160 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4162 error ("type mismatch in binary expression");
4163 debug_generic_stmt (lhs_type
);
4164 debug_generic_stmt (rhs1_type
);
4165 debug_generic_stmt (rhs2_type
);
4172 /* Verify a gimple assignment statement STMT with a ternary rhs.
4173 Returns true if anything is wrong. */
4176 verify_gimple_assign_ternary (gassign
*stmt
)
4178 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4179 tree lhs
= gimple_assign_lhs (stmt
);
4180 tree lhs_type
= TREE_TYPE (lhs
);
4181 tree rhs1
= gimple_assign_rhs1 (stmt
);
4182 tree rhs1_type
= TREE_TYPE (rhs1
);
4183 tree rhs2
= gimple_assign_rhs2 (stmt
);
4184 tree rhs2_type
= TREE_TYPE (rhs2
);
4185 tree rhs3
= gimple_assign_rhs3 (stmt
);
4186 tree rhs3_type
= TREE_TYPE (rhs3
);
4188 if (!is_gimple_reg (lhs
))
4190 error ("non-register as LHS of ternary operation");
4194 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
4195 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
4196 || !is_gimple_val (rhs2
)
4197 || !is_gimple_val (rhs3
))
4199 error ("invalid operands in ternary operation");
4203 /* First handle operations that involve different types. */
4206 case WIDEN_MULT_PLUS_EXPR
:
4207 case WIDEN_MULT_MINUS_EXPR
:
4208 if ((!INTEGRAL_TYPE_P (rhs1_type
)
4209 && !FIXED_POINT_TYPE_P (rhs1_type
))
4210 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
4211 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4212 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
4213 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
4215 error ("type mismatch in widening multiply-accumulate expression");
4216 debug_generic_expr (lhs_type
);
4217 debug_generic_expr (rhs1_type
);
4218 debug_generic_expr (rhs2_type
);
4219 debug_generic_expr (rhs3_type
);
4225 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4226 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
4227 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4229 error ("type mismatch in fused multiply-add expression");
4230 debug_generic_expr (lhs_type
);
4231 debug_generic_expr (rhs1_type
);
4232 debug_generic_expr (rhs2_type
);
4233 debug_generic_expr (rhs3_type
);
4239 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type
)
4240 || TYPE_VECTOR_SUBPARTS (rhs1_type
)
4241 != TYPE_VECTOR_SUBPARTS (lhs_type
))
4243 error ("the first argument of a VEC_COND_EXPR must be of a "
4244 "boolean vector type of the same number of elements "
4246 debug_generic_expr (lhs_type
);
4247 debug_generic_expr (rhs1_type
);
4252 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
4253 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4255 error ("type mismatch in conditional expression");
4256 debug_generic_expr (lhs_type
);
4257 debug_generic_expr (rhs2_type
);
4258 debug_generic_expr (rhs3_type
);
4264 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4265 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4267 error ("type mismatch in vector permute expression");
4268 debug_generic_expr (lhs_type
);
4269 debug_generic_expr (rhs1_type
);
4270 debug_generic_expr (rhs2_type
);
4271 debug_generic_expr (rhs3_type
);
4275 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4276 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4277 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4279 error ("vector types expected in vector permute expression");
4280 debug_generic_expr (lhs_type
);
4281 debug_generic_expr (rhs1_type
);
4282 debug_generic_expr (rhs2_type
);
4283 debug_generic_expr (rhs3_type
);
4287 if (TYPE_VECTOR_SUBPARTS (rhs1_type
) != TYPE_VECTOR_SUBPARTS (rhs2_type
)
4288 || TYPE_VECTOR_SUBPARTS (rhs2_type
)
4289 != TYPE_VECTOR_SUBPARTS (rhs3_type
)
4290 || TYPE_VECTOR_SUBPARTS (rhs3_type
)
4291 != TYPE_VECTOR_SUBPARTS (lhs_type
))
4293 error ("vectors with different element number found "
4294 "in vector permute expression");
4295 debug_generic_expr (lhs_type
);
4296 debug_generic_expr (rhs1_type
);
4297 debug_generic_expr (rhs2_type
);
4298 debug_generic_expr (rhs3_type
);
4302 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
4303 || GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (rhs3_type
)))
4304 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (rhs1_type
))))
4306 error ("invalid mask type in vector permute expression");
4307 debug_generic_expr (lhs_type
);
4308 debug_generic_expr (rhs1_type
);
4309 debug_generic_expr (rhs2_type
);
4310 debug_generic_expr (rhs3_type
);
4317 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
)
4318 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4319 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
)))
4320 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type
))))
4322 error ("type mismatch in sad expression");
4323 debug_generic_expr (lhs_type
);
4324 debug_generic_expr (rhs1_type
);
4325 debug_generic_expr (rhs2_type
);
4326 debug_generic_expr (rhs3_type
);
4330 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4331 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4332 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4334 error ("vector types expected in sad expression");
4335 debug_generic_expr (lhs_type
);
4336 debug_generic_expr (rhs1_type
);
4337 debug_generic_expr (rhs2_type
);
4338 debug_generic_expr (rhs3_type
);
4344 case BIT_INSERT_EXPR
:
4345 if (! useless_type_conversion_p (lhs_type
, rhs1_type
))
4347 error ("type mismatch in BIT_INSERT_EXPR");
4348 debug_generic_expr (lhs_type
);
4349 debug_generic_expr (rhs1_type
);
4352 if (! ((INTEGRAL_TYPE_P (rhs1_type
)
4353 && INTEGRAL_TYPE_P (rhs2_type
))
4354 || (VECTOR_TYPE_P (rhs1_type
)
4355 && types_compatible_p (TREE_TYPE (rhs1_type
), rhs2_type
))))
4357 error ("not allowed type combination in BIT_INSERT_EXPR");
4358 debug_generic_expr (rhs1_type
);
4359 debug_generic_expr (rhs2_type
);
4362 if (! tree_fits_uhwi_p (rhs3
)
4363 || ! types_compatible_p (bitsizetype
, TREE_TYPE (rhs3
))
4364 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type
)))
4366 error ("invalid position or size in BIT_INSERT_EXPR");
4369 if (INTEGRAL_TYPE_P (rhs1_type
))
4371 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4372 if (bitpos
>= TYPE_PRECISION (rhs1_type
)
4373 || (bitpos
+ TYPE_PRECISION (rhs2_type
)
4374 > TYPE_PRECISION (rhs1_type
)))
4376 error ("insertion out of range in BIT_INSERT_EXPR");
4380 else if (VECTOR_TYPE_P (rhs1_type
))
4382 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4383 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (TYPE_SIZE (rhs2_type
));
4384 if (bitpos
% bitsize
!= 0)
4386 error ("vector insertion not at element boundary");
4394 if (((TREE_CODE (rhs1_type
) != VECTOR_TYPE
4395 || TREE_CODE (lhs_type
) != VECTOR_TYPE
)
4396 && ((!INTEGRAL_TYPE_P (rhs1_type
)
4397 && !SCALAR_FLOAT_TYPE_P (rhs1_type
))
4398 || (!INTEGRAL_TYPE_P (lhs_type
)
4399 && !SCALAR_FLOAT_TYPE_P (lhs_type
))))
4400 || !types_compatible_p (rhs1_type
, rhs2_type
)
4401 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4402 || (GET_MODE_SIZE (element_mode (rhs3_type
))
4403 < 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4405 error ("type mismatch in dot product reduction");
4406 debug_generic_expr (lhs_type
);
4407 debug_generic_expr (rhs1_type
);
4408 debug_generic_expr (rhs2_type
);
4414 case REALIGN_LOAD_EXPR
:
4424 /* Verify a gimple assignment statement STMT with a single rhs.
4425 Returns true if anything is wrong. */
4428 verify_gimple_assign_single (gassign
*stmt
)
4430 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4431 tree lhs
= gimple_assign_lhs (stmt
);
4432 tree lhs_type
= TREE_TYPE (lhs
);
4433 tree rhs1
= gimple_assign_rhs1 (stmt
);
4434 tree rhs1_type
= TREE_TYPE (rhs1
);
4437 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
4439 error ("non-trivial conversion at assignment");
4440 debug_generic_expr (lhs_type
);
4441 debug_generic_expr (rhs1_type
);
4445 if (gimple_clobber_p (stmt
)
4446 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
4448 error ("non-decl/MEM_REF LHS in clobber statement");
4449 debug_generic_expr (lhs
);
4453 if (handled_component_p (lhs
)
4454 || TREE_CODE (lhs
) == MEM_REF
4455 || TREE_CODE (lhs
) == TARGET_MEM_REF
)
4456 res
|= verify_types_in_gimple_reference (lhs
, true);
4458 /* Special codes we cannot handle via their class. */
4463 tree op
= TREE_OPERAND (rhs1
, 0);
4464 if (!is_gimple_addressable (op
))
4466 error ("invalid operand in unary expression");
4470 /* Technically there is no longer a need for matching types, but
4471 gimple hygiene asks for this check. In LTO we can end up
4472 combining incompatible units and thus end up with addresses
4473 of globals that change their type to a common one. */
4475 && !types_compatible_p (TREE_TYPE (op
),
4476 TREE_TYPE (TREE_TYPE (rhs1
)))
4477 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
4480 error ("type mismatch in address expression");
4481 debug_generic_stmt (TREE_TYPE (rhs1
));
4482 debug_generic_stmt (TREE_TYPE (op
));
4486 return verify_types_in_gimple_reference (op
, true);
4491 error ("INDIRECT_REF in gimple IL");
4497 case ARRAY_RANGE_REF
:
4498 case VIEW_CONVERT_EXPR
:
4501 case TARGET_MEM_REF
:
4503 if (!is_gimple_reg (lhs
)
4504 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4506 error ("invalid rhs for gimple memory store");
4507 debug_generic_stmt (lhs
);
4508 debug_generic_stmt (rhs1
);
4511 return res
|| verify_types_in_gimple_reference (rhs1
, false);
4523 /* tcc_declaration */
4528 if (!is_gimple_reg (lhs
)
4529 && !is_gimple_reg (rhs1
)
4530 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4532 error ("invalid rhs for gimple memory store");
4533 debug_generic_stmt (lhs
);
4534 debug_generic_stmt (rhs1
);
4540 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
4543 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
4545 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
4547 /* For vector CONSTRUCTORs we require that either it is empty
4548 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4549 (then the element count must be correct to cover the whole
4550 outer vector and index must be NULL on all elements, or it is
4551 a CONSTRUCTOR of scalar elements, where we as an exception allow
4552 smaller number of elements (assuming zero filling) and
4553 consecutive indexes as compared to NULL indexes (such
4554 CONSTRUCTORs can appear in the IL from FEs). */
4555 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4557 if (elt_t
== NULL_TREE
)
4559 elt_t
= TREE_TYPE (elt_v
);
4560 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4562 tree elt_t
= TREE_TYPE (elt_v
);
4563 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4566 error ("incorrect type of vector CONSTRUCTOR"
4568 debug_generic_stmt (rhs1
);
4571 else if (CONSTRUCTOR_NELTS (rhs1
)
4572 * TYPE_VECTOR_SUBPARTS (elt_t
)
4573 != TYPE_VECTOR_SUBPARTS (rhs1_type
))
4575 error ("incorrect number of vector CONSTRUCTOR"
4577 debug_generic_stmt (rhs1
);
4581 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4584 error ("incorrect type of vector CONSTRUCTOR elements");
4585 debug_generic_stmt (rhs1
);
4588 else if (CONSTRUCTOR_NELTS (rhs1
)
4589 > TYPE_VECTOR_SUBPARTS (rhs1_type
))
4591 error ("incorrect number of vector CONSTRUCTOR elements");
4592 debug_generic_stmt (rhs1
);
4596 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4598 error ("incorrect type of vector CONSTRUCTOR elements");
4599 debug_generic_stmt (rhs1
);
4602 if (elt_i
!= NULL_TREE
4603 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4604 || TREE_CODE (elt_i
) != INTEGER_CST
4605 || compare_tree_int (elt_i
, i
) != 0))
4607 error ("vector CONSTRUCTOR with non-NULL element index");
4608 debug_generic_stmt (rhs1
);
4611 if (!is_gimple_val (elt_v
))
4613 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4614 debug_generic_stmt (rhs1
);
4619 else if (CONSTRUCTOR_NELTS (rhs1
) != 0)
4621 error ("non-vector CONSTRUCTOR with elements");
4622 debug_generic_stmt (rhs1
);
4628 case WITH_SIZE_EXPR
:
4638 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4639 is a problem, otherwise false. */
4642 verify_gimple_assign (gassign
*stmt
)
4644 switch (gimple_assign_rhs_class (stmt
))
4646 case GIMPLE_SINGLE_RHS
:
4647 return verify_gimple_assign_single (stmt
);
4649 case GIMPLE_UNARY_RHS
:
4650 return verify_gimple_assign_unary (stmt
);
4652 case GIMPLE_BINARY_RHS
:
4653 return verify_gimple_assign_binary (stmt
);
4655 case GIMPLE_TERNARY_RHS
:
4656 return verify_gimple_assign_ternary (stmt
);
4663 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4664 is a problem, otherwise false. */
4667 verify_gimple_return (greturn
*stmt
)
4669 tree op
= gimple_return_retval (stmt
);
4670 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4672 /* We cannot test for present return values as we do not fix up missing
4673 return values from the original source. */
4677 if (!is_gimple_val (op
)
4678 && TREE_CODE (op
) != RESULT_DECL
)
4680 error ("invalid operand in return statement");
4681 debug_generic_stmt (op
);
4685 if ((TREE_CODE (op
) == RESULT_DECL
4686 && DECL_BY_REFERENCE (op
))
4687 || (TREE_CODE (op
) == SSA_NAME
4688 && SSA_NAME_VAR (op
)
4689 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4690 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4691 op
= TREE_TYPE (op
);
4693 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4695 error ("invalid conversion in return statement");
4696 debug_generic_stmt (restype
);
4697 debug_generic_stmt (TREE_TYPE (op
));
4705 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4706 is a problem, otherwise false. */
4709 verify_gimple_goto (ggoto
*stmt
)
4711 tree dest
= gimple_goto_dest (stmt
);
4713 /* ??? We have two canonical forms of direct goto destinations, a
4714 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4715 if (TREE_CODE (dest
) != LABEL_DECL
4716 && (!is_gimple_val (dest
)
4717 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4719 error ("goto destination is neither a label nor a pointer");
4726 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4727 is a problem, otherwise false. */
4730 verify_gimple_switch (gswitch
*stmt
)
4733 tree elt
, prev_upper_bound
= NULL_TREE
;
4734 tree index_type
, elt_type
= NULL_TREE
;
4736 if (!is_gimple_val (gimple_switch_index (stmt
)))
4738 error ("invalid operand to switch statement");
4739 debug_generic_stmt (gimple_switch_index (stmt
));
4743 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4744 if (! INTEGRAL_TYPE_P (index_type
))
4746 error ("non-integral type switch statement");
4747 debug_generic_expr (index_type
);
4751 elt
= gimple_switch_label (stmt
, 0);
4752 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4754 error ("invalid default case label in switch statement");
4755 debug_generic_expr (elt
);
4759 n
= gimple_switch_num_labels (stmt
);
4760 for (i
= 1; i
< n
; i
++)
4762 elt
= gimple_switch_label (stmt
, i
);
4764 if (! CASE_LOW (elt
))
4766 error ("invalid case label in switch statement");
4767 debug_generic_expr (elt
);
4771 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4773 error ("invalid case range in switch statement");
4774 debug_generic_expr (elt
);
4780 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4781 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4783 error ("type mismatch for case label in switch statement");
4784 debug_generic_expr (elt
);
4790 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4791 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4793 error ("type precision mismatch in switch statement");
4798 if (prev_upper_bound
)
4800 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4802 error ("case labels not sorted in switch statement");
4807 prev_upper_bound
= CASE_HIGH (elt
);
4808 if (! prev_upper_bound
)
4809 prev_upper_bound
= CASE_LOW (elt
);
4815 /* Verify a gimple debug statement STMT.
4816 Returns true if anything is wrong. */
4819 verify_gimple_debug (gimple
*stmt ATTRIBUTE_UNUSED
)
4821 /* There isn't much that could be wrong in a gimple debug stmt. A
4822 gimple debug bind stmt, for example, maps a tree, that's usually
4823 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4824 component or member of an aggregate type, to another tree, that
4825 can be an arbitrary expression. These stmts expand into debug
4826 insns, and are converted to debug notes by var-tracking.c. */
4830 /* Verify a gimple label statement STMT.
4831 Returns true if anything is wrong. */
4834 verify_gimple_label (glabel
*stmt
)
4836 tree decl
= gimple_label_label (stmt
);
4840 if (TREE_CODE (decl
) != LABEL_DECL
)
4842 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4843 && DECL_CONTEXT (decl
) != current_function_decl
)
4845 error ("label's context is not the current function decl");
4849 uid
= LABEL_DECL_UID (decl
);
4852 || (*label_to_block_map_for_fn (cfun
))[uid
] != gimple_bb (stmt
)))
4854 error ("incorrect entry in label_to_block_map");
4858 uid
= EH_LANDING_PAD_NR (decl
);
4861 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4862 if (decl
!= lp
->post_landing_pad
)
4864 error ("incorrect setting of landing pad number");
4872 /* Verify a gimple cond statement STMT.
4873 Returns true if anything is wrong. */
4876 verify_gimple_cond (gcond
*stmt
)
4878 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4880 error ("invalid comparison code in gimple cond");
4883 if (!(!gimple_cond_true_label (stmt
)
4884 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4885 || !(!gimple_cond_false_label (stmt
)
4886 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4888 error ("invalid labels in gimple cond");
4892 return verify_gimple_comparison (boolean_type_node
,
4893 gimple_cond_lhs (stmt
),
4894 gimple_cond_rhs (stmt
),
4895 gimple_cond_code (stmt
));
4898 /* Verify the GIMPLE statement STMT. Returns true if there is an
4899 error, otherwise false. */
4902 verify_gimple_stmt (gimple
*stmt
)
4904 switch (gimple_code (stmt
))
4907 return verify_gimple_assign (as_a
<gassign
*> (stmt
));
4910 return verify_gimple_label (as_a
<glabel
*> (stmt
));
4913 return verify_gimple_call (as_a
<gcall
*> (stmt
));
4916 return verify_gimple_cond (as_a
<gcond
*> (stmt
));
4919 return verify_gimple_goto (as_a
<ggoto
*> (stmt
));
4922 return verify_gimple_switch (as_a
<gswitch
*> (stmt
));
4925 return verify_gimple_return (as_a
<greturn
*> (stmt
));
4930 case GIMPLE_TRANSACTION
:
4931 return verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4933 /* Tuples that do not have tree operands. */
4935 case GIMPLE_PREDICT
:
4937 case GIMPLE_EH_DISPATCH
:
4938 case GIMPLE_EH_MUST_NOT_THROW
:
4942 /* OpenMP directives are validated by the FE and never operated
4943 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4944 non-gimple expressions when the main index variable has had
4945 its address taken. This does not affect the loop itself
4946 because the header of an GIMPLE_OMP_FOR is merely used to determine
4947 how to setup the parallel iteration. */
4951 return verify_gimple_debug (stmt
);
4958 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4959 and false otherwise. */
4962 verify_gimple_phi (gimple
*phi
)
4966 tree phi_result
= gimple_phi_result (phi
);
4971 error ("invalid PHI result");
4975 virtual_p
= virtual_operand_p (phi_result
);
4976 if (TREE_CODE (phi_result
) != SSA_NAME
4978 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4980 error ("invalid PHI result");
4984 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4986 tree t
= gimple_phi_arg_def (phi
, i
);
4990 error ("missing PHI def");
4994 /* Addressable variables do have SSA_NAMEs but they
4995 are not considered gimple values. */
4996 else if ((TREE_CODE (t
) == SSA_NAME
4997 && virtual_p
!= virtual_operand_p (t
))
4999 && (TREE_CODE (t
) != SSA_NAME
5000 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
5002 && !is_gimple_val (t
)))
5004 error ("invalid PHI argument");
5005 debug_generic_expr (t
);
5008 #ifdef ENABLE_TYPES_CHECKING
5009 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
5011 error ("incompatible types in PHI argument %u", i
);
5012 debug_generic_stmt (TREE_TYPE (phi_result
));
5013 debug_generic_stmt (TREE_TYPE (t
));
5022 /* Verify the GIMPLE statements inside the sequence STMTS. */
5025 verify_gimple_in_seq_2 (gimple_seq stmts
)
5027 gimple_stmt_iterator ittr
;
5030 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
5032 gimple
*stmt
= gsi_stmt (ittr
);
5034 switch (gimple_code (stmt
))
5037 err
|= verify_gimple_in_seq_2 (
5038 gimple_bind_body (as_a
<gbind
*> (stmt
)));
5042 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
5043 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
5046 case GIMPLE_EH_FILTER
:
5047 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
5050 case GIMPLE_EH_ELSE
:
5052 geh_else
*eh_else
= as_a
<geh_else
*> (stmt
);
5053 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else
));
5054 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else
));
5059 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (
5060 as_a
<gcatch
*> (stmt
)));
5063 case GIMPLE_TRANSACTION
:
5064 err
|= verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
5069 bool err2
= verify_gimple_stmt (stmt
);
5071 debug_gimple_stmt (stmt
);
5080 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5081 is a problem, otherwise false. */
5084 verify_gimple_transaction (gtransaction
*stmt
)
5088 lab
= gimple_transaction_label_norm (stmt
);
5089 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5091 lab
= gimple_transaction_label_uninst (stmt
);
5092 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5094 lab
= gimple_transaction_label_over (stmt
);
5095 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5098 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
5102 /* Verify the GIMPLE statements inside the statement list STMTS. */
5105 verify_gimple_in_seq (gimple_seq stmts
)
5107 timevar_push (TV_TREE_STMT_VERIFY
);
5108 if (verify_gimple_in_seq_2 (stmts
))
5109 internal_error ("verify_gimple failed");
5110 timevar_pop (TV_TREE_STMT_VERIFY
);
5113 /* Return true when the T can be shared. */
5116 tree_node_can_be_shared (tree t
)
5118 if (IS_TYPE_OR_DECL_P (t
)
5119 || is_gimple_min_invariant (t
)
5120 || TREE_CODE (t
) == SSA_NAME
5121 || t
== error_mark_node
5122 || TREE_CODE (t
) == IDENTIFIER_NODE
)
5125 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
5134 /* Called via walk_tree. Verify tree sharing. */
5137 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
5139 hash_set
<void *> *visited
= (hash_set
<void *> *) data
;
5141 if (tree_node_can_be_shared (*tp
))
5143 *walk_subtrees
= false;
5147 if (visited
->add (*tp
))
5153 /* Called via walk_gimple_stmt. Verify tree sharing. */
5156 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
5158 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5159 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
5162 static bool eh_error_found
;
5164 verify_eh_throw_stmt_node (gimple
*const &stmt
, const int &,
5165 hash_set
<gimple
*> *visited
)
5167 if (!visited
->contains (stmt
))
5169 error ("dead STMT in EH table");
5170 debug_gimple_stmt (stmt
);
5171 eh_error_found
= true;
5176 /* Verify if the location LOCs block is in BLOCKS. */
5179 verify_location (hash_set
<tree
> *blocks
, location_t loc
)
5181 tree block
= LOCATION_BLOCK (loc
);
5182 if (block
!= NULL_TREE
5183 && !blocks
->contains (block
))
5185 error ("location references block not in block tree");
5188 if (block
!= NULL_TREE
)
5189 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
5193 /* Called via walk_tree. Verify that expressions have no blocks. */
5196 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
5200 *walk_subtrees
= false;
5204 location_t loc
= EXPR_LOCATION (*tp
);
5205 if (LOCATION_BLOCK (loc
) != NULL
)
5211 /* Called via walk_tree. Verify locations of expressions. */
5214 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
5216 hash_set
<tree
> *blocks
= (hash_set
<tree
> *) data
;
5218 if (VAR_P (*tp
) && DECL_HAS_DEBUG_EXPR_P (*tp
))
5220 tree t
= DECL_DEBUG_EXPR (*tp
);
5221 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
5226 || TREE_CODE (*tp
) == PARM_DECL
5227 || TREE_CODE (*tp
) == RESULT_DECL
)
5228 && DECL_HAS_VALUE_EXPR_P (*tp
))
5230 tree t
= DECL_VALUE_EXPR (*tp
);
5231 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
5238 *walk_subtrees
= false;
5242 location_t loc
= EXPR_LOCATION (*tp
);
5243 if (verify_location (blocks
, loc
))
5249 /* Called via walk_gimple_op. Verify locations of expressions. */
5252 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
5254 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5255 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
5258 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5261 collect_subblocks (hash_set
<tree
> *blocks
, tree block
)
5264 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
5267 collect_subblocks (blocks
, t
);
5271 /* Verify the GIMPLE statements in the CFG of FN. */
5274 verify_gimple_in_cfg (struct function
*fn
, bool verify_nothrow
)
5279 timevar_push (TV_TREE_STMT_VERIFY
);
5280 hash_set
<void *> visited
;
5281 hash_set
<gimple
*> visited_stmts
;
5283 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5284 hash_set
<tree
> blocks
;
5285 if (DECL_INITIAL (fn
->decl
))
5287 blocks
.add (DECL_INITIAL (fn
->decl
));
5288 collect_subblocks (&blocks
, DECL_INITIAL (fn
->decl
));
5291 FOR_EACH_BB_FN (bb
, fn
)
5293 gimple_stmt_iterator gsi
;
5295 for (gphi_iterator gpi
= gsi_start_phis (bb
);
5299 gphi
*phi
= gpi
.phi ();
5303 visited_stmts
.add (phi
);
5305 if (gimple_bb (phi
) != bb
)
5307 error ("gimple_bb (phi) is set to a wrong basic block");
5311 err2
|= verify_gimple_phi (phi
);
5313 /* Only PHI arguments have locations. */
5314 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
5316 error ("PHI node with location");
5320 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5322 tree arg
= gimple_phi_arg_def (phi
, i
);
5323 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
5327 error ("incorrect sharing of tree nodes");
5328 debug_generic_expr (addr
);
5331 location_t loc
= gimple_phi_arg_location (phi
, i
);
5332 if (virtual_operand_p (gimple_phi_result (phi
))
5333 && loc
!= UNKNOWN_LOCATION
)
5335 error ("virtual PHI with argument locations");
5338 addr
= walk_tree (&arg
, verify_expr_location_1
, &blocks
, NULL
);
5341 debug_generic_expr (addr
);
5344 err2
|= verify_location (&blocks
, loc
);
5348 debug_gimple_stmt (phi
);
5352 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5354 gimple
*stmt
= gsi_stmt (gsi
);
5356 struct walk_stmt_info wi
;
5360 visited_stmts
.add (stmt
);
5362 if (gimple_bb (stmt
) != bb
)
5364 error ("gimple_bb (stmt) is set to a wrong basic block");
5368 err2
|= verify_gimple_stmt (stmt
);
5369 err2
|= verify_location (&blocks
, gimple_location (stmt
));
5371 memset (&wi
, 0, sizeof (wi
));
5372 wi
.info
= (void *) &visited
;
5373 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
5376 error ("incorrect sharing of tree nodes");
5377 debug_generic_expr (addr
);
5381 memset (&wi
, 0, sizeof (wi
));
5382 wi
.info
= (void *) &blocks
;
5383 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
5386 debug_generic_expr (addr
);
5390 /* ??? Instead of not checking these stmts at all the walker
5391 should know its context via wi. */
5392 if (!is_gimple_debug (stmt
)
5393 && !is_gimple_omp (stmt
))
5395 memset (&wi
, 0, sizeof (wi
));
5396 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
5399 debug_generic_expr (addr
);
5400 inform (gimple_location (stmt
), "in statement");
5405 /* If the statement is marked as part of an EH region, then it is
5406 expected that the statement could throw. Verify that when we
5407 have optimizations that simplify statements such that we prove
5408 that they cannot throw, that we update other data structures
5410 lp_nr
= lookup_stmt_eh_lp (stmt
);
5413 if (!stmt_could_throw_p (stmt
))
5417 error ("statement marked for throw, but doesn%'t");
5421 else if (!gsi_one_before_end_p (gsi
))
5423 error ("statement marked for throw in middle of block");
5429 debug_gimple_stmt (stmt
);
5434 eh_error_found
= false;
5435 hash_map
<gimple
*, int> *eh_table
= get_eh_throw_stmt_table (cfun
);
5437 eh_table
->traverse
<hash_set
<gimple
*> *, verify_eh_throw_stmt_node
>
5440 if (err
|| eh_error_found
)
5441 internal_error ("verify_gimple failed");
5443 verify_histograms ();
5444 timevar_pop (TV_TREE_STMT_VERIFY
);
5448 /* Verifies that the flow information is OK. */
5451 gimple_verify_flow_info (void)
5455 gimple_stmt_iterator gsi
;
5460 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5461 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5463 error ("ENTRY_BLOCK has IL associated with it");
5467 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5468 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5470 error ("EXIT_BLOCK has IL associated with it");
5474 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5475 if (e
->flags
& EDGE_FALLTHRU
)
5477 error ("fallthru to exit from bb %d", e
->src
->index
);
5481 FOR_EACH_BB_FN (bb
, cfun
)
5483 bool found_ctrl_stmt
= false;
5487 /* Skip labels on the start of basic block. */
5488 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5491 gimple
*prev_stmt
= stmt
;
5493 stmt
= gsi_stmt (gsi
);
5495 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5498 label
= gimple_label_label (as_a
<glabel
*> (stmt
));
5499 if (prev_stmt
&& DECL_NONLOCAL (label
))
5501 error ("nonlocal label ");
5502 print_generic_expr (stderr
, label
);
5503 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5508 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
5510 error ("EH landing pad label ");
5511 print_generic_expr (stderr
, label
);
5512 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5517 if (label_to_block (label
) != bb
)
5520 print_generic_expr (stderr
, label
);
5521 fprintf (stderr
, " to block does not match in bb %d",
5526 if (decl_function_context (label
) != current_function_decl
)
5529 print_generic_expr (stderr
, label
);
5530 fprintf (stderr
, " has incorrect context in bb %d",
5536 /* Verify that body of basic block BB is free of control flow. */
5537 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5539 gimple
*stmt
= gsi_stmt (gsi
);
5541 if (found_ctrl_stmt
)
5543 error ("control flow in the middle of basic block %d",
5548 if (stmt_ends_bb_p (stmt
))
5549 found_ctrl_stmt
= true;
5551 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
5554 print_generic_expr (stderr
, gimple_label_label (label_stmt
));
5555 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
5560 gsi
= gsi_last_bb (bb
);
5561 if (gsi_end_p (gsi
))
5564 stmt
= gsi_stmt (gsi
);
5566 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5569 err
|= verify_eh_edges (stmt
);
5571 if (is_ctrl_stmt (stmt
))
5573 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5574 if (e
->flags
& EDGE_FALLTHRU
)
5576 error ("fallthru edge after a control statement in bb %d",
5582 if (gimple_code (stmt
) != GIMPLE_COND
)
5584 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5585 after anything else but if statement. */
5586 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5587 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5589 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5595 switch (gimple_code (stmt
))
5602 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5606 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5607 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5608 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5609 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5610 || EDGE_COUNT (bb
->succs
) >= 3)
5612 error ("wrong outgoing edge flags at end of bb %d",
5620 if (simple_goto_p (stmt
))
5622 error ("explicit goto at end of bb %d", bb
->index
);
5627 /* FIXME. We should double check that the labels in the
5628 destination blocks have their address taken. */
5629 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5630 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5631 | EDGE_FALSE_VALUE
))
5632 || !(e
->flags
& EDGE_ABNORMAL
))
5634 error ("wrong outgoing edge flags at end of bb %d",
5642 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5646 if (!single_succ_p (bb
)
5647 || (single_succ_edge (bb
)->flags
5648 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5649 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5651 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5654 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5656 error ("return edge does not point to exit in bb %d",
5664 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5669 n
= gimple_switch_num_labels (switch_stmt
);
5671 /* Mark all the destination basic blocks. */
5672 for (i
= 0; i
< n
; ++i
)
5674 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5675 basic_block label_bb
= label_to_block (lab
);
5676 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5677 label_bb
->aux
= (void *)1;
5680 /* Verify that the case labels are sorted. */
5681 prev
= gimple_switch_label (switch_stmt
, 0);
5682 for (i
= 1; i
< n
; ++i
)
5684 tree c
= gimple_switch_label (switch_stmt
, i
);
5687 error ("found default case not at the start of "
5693 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5695 error ("case labels not sorted: ");
5696 print_generic_expr (stderr
, prev
);
5697 fprintf (stderr
," is greater than ");
5698 print_generic_expr (stderr
, c
);
5699 fprintf (stderr
," but comes before it.\n");
5704 /* VRP will remove the default case if it can prove it will
5705 never be executed. So do not verify there always exists
5706 a default case here. */
5708 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5712 error ("extra outgoing edge %d->%d",
5713 bb
->index
, e
->dest
->index
);
5717 e
->dest
->aux
= (void *)2;
5718 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5719 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5721 error ("wrong outgoing edge flags at end of bb %d",
5727 /* Check that we have all of them. */
5728 for (i
= 0; i
< n
; ++i
)
5730 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5731 basic_block label_bb
= label_to_block (lab
);
5733 if (label_bb
->aux
!= (void *)2)
5735 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5740 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5741 e
->dest
->aux
= (void *)0;
5745 case GIMPLE_EH_DISPATCH
:
5746 err
|= verify_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
));
5754 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5755 verify_dominators (CDI_DOMINATORS
);
5761 /* Updates phi nodes after creating a forwarder block joined
5762 by edge FALLTHRU. */
5765 gimple_make_forwarder_block (edge fallthru
)
5769 basic_block dummy
, bb
;
5773 dummy
= fallthru
->src
;
5774 bb
= fallthru
->dest
;
5776 if (single_pred_p (bb
))
5779 /* If we redirected a branch we must create new PHI nodes at the
5781 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5783 gphi
*phi
, *new_phi
;
5786 var
= gimple_phi_result (phi
);
5787 new_phi
= create_phi_node (var
, bb
);
5788 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5789 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5793 /* Add the arguments we have stored on edges. */
5794 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5799 flush_pending_stmts (e
);
5804 /* Return a non-special label in the head of basic block BLOCK.
5805 Create one if it doesn't exist. */
5808 gimple_block_label (basic_block bb
)
5810 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5815 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5817 stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
5820 label
= gimple_label_label (stmt
);
5821 if (!DECL_NONLOCAL (label
))
5824 gsi_move_before (&i
, &s
);
5829 label
= create_artificial_label (UNKNOWN_LOCATION
);
5830 stmt
= gimple_build_label (label
);
5831 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5836 /* Attempt to perform edge redirection by replacing a possibly complex
5837 jump instruction by a goto or by removing the jump completely.
5838 This can apply only if all edges now point to the same block. The
5839 parameters and return values are equivalent to
5840 redirect_edge_and_branch. */
5843 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5845 basic_block src
= e
->src
;
5846 gimple_stmt_iterator i
;
5849 /* We can replace or remove a complex jump only when we have exactly
5851 if (EDGE_COUNT (src
->succs
) != 2
5852 /* Verify that all targets will be TARGET. Specifically, the
5853 edge that is not E must also go to TARGET. */
5854 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5857 i
= gsi_last_bb (src
);
5861 stmt
= gsi_stmt (i
);
5863 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5865 gsi_remove (&i
, true);
5866 e
= ssa_redirect_edge (e
, target
);
5867 e
->flags
= EDGE_FALLTHRU
;
5875 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5876 edge representing the redirected branch. */
5879 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5881 basic_block bb
= e
->src
;
5882 gimple_stmt_iterator gsi
;
5886 if (e
->flags
& EDGE_ABNORMAL
)
5889 if (e
->dest
== dest
)
5892 if (e
->flags
& EDGE_EH
)
5893 return redirect_eh_edge (e
, dest
);
5895 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5897 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5902 gsi
= gsi_last_bb (bb
);
5903 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5905 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5908 /* For COND_EXPR, we only need to redirect the edge. */
5912 /* No non-abnormal edges should lead from a non-simple goto, and
5913 simple ones should be represented implicitly. */
5918 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5919 tree label
= gimple_block_label (dest
);
5920 tree cases
= get_cases_for_edge (e
, switch_stmt
);
5922 /* If we have a list of cases associated with E, then use it
5923 as it's a lot faster than walking the entire case vector. */
5926 edge e2
= find_edge (e
->src
, dest
);
5933 CASE_LABEL (cases
) = label
;
5934 cases
= CASE_CHAIN (cases
);
5937 /* If there was already an edge in the CFG, then we need
5938 to move all the cases associated with E to E2. */
5941 tree cases2
= get_cases_for_edge (e2
, switch_stmt
);
5943 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5944 CASE_CHAIN (cases2
) = first
;
5946 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5950 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
5952 for (i
= 0; i
< n
; i
++)
5954 tree elt
= gimple_switch_label (switch_stmt
, i
);
5955 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
5956 CASE_LABEL (elt
) = label
;
5964 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5965 int i
, n
= gimple_asm_nlabels (asm_stmt
);
5968 for (i
= 0; i
< n
; ++i
)
5970 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
5971 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
5974 label
= gimple_block_label (dest
);
5975 TREE_VALUE (cons
) = label
;
5979 /* If we didn't find any label matching the former edge in the
5980 asm labels, we must be redirecting the fallthrough
5982 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5987 gsi_remove (&gsi
, true);
5988 e
->flags
|= EDGE_FALLTHRU
;
5991 case GIMPLE_OMP_RETURN
:
5992 case GIMPLE_OMP_CONTINUE
:
5993 case GIMPLE_OMP_SECTIONS_SWITCH
:
5994 case GIMPLE_OMP_FOR
:
5995 /* The edges from OMP constructs can be simply redirected. */
5998 case GIMPLE_EH_DISPATCH
:
5999 if (!(e
->flags
& EDGE_FALLTHRU
))
6000 redirect_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
), e
, dest
);
6003 case GIMPLE_TRANSACTION
:
6004 if (e
->flags
& EDGE_TM_ABORT
)
6005 gimple_transaction_set_label_over (as_a
<gtransaction
*> (stmt
),
6006 gimple_block_label (dest
));
6007 else if (e
->flags
& EDGE_TM_UNINSTRUMENTED
)
6008 gimple_transaction_set_label_uninst (as_a
<gtransaction
*> (stmt
),
6009 gimple_block_label (dest
));
6011 gimple_transaction_set_label_norm (as_a
<gtransaction
*> (stmt
),
6012 gimple_block_label (dest
));
6016 /* Otherwise it must be a fallthru edge, and we don't need to
6017 do anything besides redirecting it. */
6018 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
6022 /* Update/insert PHI nodes as necessary. */
6024 /* Now update the edges in the CFG. */
6025 e
= ssa_redirect_edge (e
, dest
);
6030 /* Returns true if it is possible to remove edge E by redirecting
6031 it to the destination of the other edge from E->src. */
6034 gimple_can_remove_branch_p (const_edge e
)
6036 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
6042 /* Simple wrapper, as we can always redirect fallthru edges. */
6045 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
6047 e
= gimple_redirect_edge_and_branch (e
, dest
);
6054 /* Splits basic block BB after statement STMT (but at least after the
6055 labels). If STMT is NULL, BB is split just after the labels. */
6058 gimple_split_block (basic_block bb
, void *stmt
)
6060 gimple_stmt_iterator gsi
;
6061 gimple_stmt_iterator gsi_tgt
;
6067 new_bb
= create_empty_bb (bb
);
6069 /* Redirect the outgoing edges. */
6070 new_bb
->succs
= bb
->succs
;
6072 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
6075 /* Get a stmt iterator pointing to the first stmt to move. */
6076 if (!stmt
|| gimple_code ((gimple
*) stmt
) == GIMPLE_LABEL
)
6077 gsi
= gsi_after_labels (bb
);
6080 gsi
= gsi_for_stmt ((gimple
*) stmt
);
6084 /* Move everything from GSI to the new basic block. */
6085 if (gsi_end_p (gsi
))
6088 /* Split the statement list - avoid re-creating new containers as this
6089 brings ugly quadratic memory consumption in the inliner.
6090 (We are still quadratic since we need to update stmt BB pointers,
6092 gsi_split_seq_before (&gsi
, &list
);
6093 set_bb_seq (new_bb
, list
);
6094 for (gsi_tgt
= gsi_start (list
);
6095 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
6096 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
6102 /* Moves basic block BB after block AFTER. */
6105 gimple_move_block_after (basic_block bb
, basic_block after
)
6107 if (bb
->prev_bb
== after
)
6111 link_block (bb
, after
);
6117 /* Return TRUE if block BB has no executable statements, otherwise return
6121 gimple_empty_block_p (basic_block bb
)
6123 /* BB must have no executable statements. */
6124 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
6127 if (gsi_end_p (gsi
))
6129 if (is_gimple_debug (gsi_stmt (gsi
)))
6130 gsi_next_nondebug (&gsi
);
6131 return gsi_end_p (gsi
);
6135 /* Split a basic block if it ends with a conditional branch and if the
6136 other part of the block is not empty. */
6139 gimple_split_block_before_cond_jump (basic_block bb
)
6141 gimple
*last
, *split_point
;
6142 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
6143 if (gsi_end_p (gsi
))
6145 last
= gsi_stmt (gsi
);
6146 if (gimple_code (last
) != GIMPLE_COND
6147 && gimple_code (last
) != GIMPLE_SWITCH
)
6150 split_point
= gsi_stmt (gsi
);
6151 return split_block (bb
, split_point
)->dest
;
6155 /* Return true if basic_block can be duplicated. */
6158 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
6163 /* Create a duplicate of the basic block BB. NOTE: This does not
6164 preserve SSA form. */
6167 gimple_duplicate_bb (basic_block bb
)
6170 gimple_stmt_iterator gsi_tgt
;
6172 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
6174 /* Copy the PHI nodes. We ignore PHI node arguments here because
6175 the incoming edges have not been setup yet. */
6176 for (gphi_iterator gpi
= gsi_start_phis (bb
);
6182 copy
= create_phi_node (NULL_TREE
, new_bb
);
6183 create_new_def_for (gimple_phi_result (phi
), copy
,
6184 gimple_phi_result_ptr (copy
));
6185 gimple_set_uid (copy
, gimple_uid (phi
));
6188 gsi_tgt
= gsi_start_bb (new_bb
);
6189 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
6193 def_operand_p def_p
;
6194 ssa_op_iter op_iter
;
6196 gimple
*stmt
, *copy
;
6198 stmt
= gsi_stmt (gsi
);
6199 if (gimple_code (stmt
) == GIMPLE_LABEL
)
6202 /* Don't duplicate label debug stmts. */
6203 if (gimple_debug_bind_p (stmt
)
6204 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
6208 /* Create a new copy of STMT and duplicate STMT's virtual
6210 copy
= gimple_copy (stmt
);
6211 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
6213 maybe_duplicate_eh_stmt (copy
, stmt
);
6214 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
6216 /* When copying around a stmt writing into a local non-user
6217 aggregate, make sure it won't share stack slot with other
6219 lhs
= gimple_get_lhs (stmt
);
6220 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
6222 tree base
= get_base_address (lhs
);
6224 && (VAR_P (base
) || TREE_CODE (base
) == RESULT_DECL
)
6225 && DECL_IGNORED_P (base
)
6226 && !TREE_STATIC (base
)
6227 && !DECL_EXTERNAL (base
)
6228 && (!VAR_P (base
) || !DECL_HAS_VALUE_EXPR_P (base
)))
6229 DECL_NONSHAREABLE (base
) = 1;
6232 /* Create new names for all the definitions created by COPY and
6233 add replacement mappings for each new name. */
6234 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
6235 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
6241 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6244 add_phi_args_after_copy_edge (edge e_copy
)
6246 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
6249 gphi
*phi
, *phi_copy
;
6251 gphi_iterator psi
, psi_copy
;
6253 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
6256 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
6258 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
6259 dest
= get_bb_original (e_copy
->dest
);
6261 dest
= e_copy
->dest
;
6263 e
= find_edge (bb
, dest
);
6266 /* During loop unrolling the target of the latch edge is copied.
6267 In this case we are not looking for edge to dest, but to
6268 duplicated block whose original was dest. */
6269 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6271 if ((e
->dest
->flags
& BB_DUPLICATED
)
6272 && get_bb_original (e
->dest
) == dest
)
6276 gcc_assert (e
!= NULL
);
6279 for (psi
= gsi_start_phis (e
->dest
),
6280 psi_copy
= gsi_start_phis (e_copy
->dest
);
6282 gsi_next (&psi
), gsi_next (&psi_copy
))
6285 phi_copy
= psi_copy
.phi ();
6286 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
6287 add_phi_arg (phi_copy
, def
, e_copy
,
6288 gimple_phi_arg_location_from_edge (phi
, e
));
6293 /* Basic block BB_COPY was created by code duplication. Add phi node
6294 arguments for edges going out of BB_COPY. The blocks that were
6295 duplicated have BB_DUPLICATED set. */
6298 add_phi_args_after_copy_bb (basic_block bb_copy
)
6303 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
6305 add_phi_args_after_copy_edge (e_copy
);
6309 /* Blocks in REGION_COPY array of length N_REGION were created by
6310 duplication of basic blocks. Add phi node arguments for edges
6311 going from these blocks. If E_COPY is not NULL, also add
6312 phi node arguments for its destination.*/
6315 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
6320 for (i
= 0; i
< n_region
; i
++)
6321 region_copy
[i
]->flags
|= BB_DUPLICATED
;
6323 for (i
= 0; i
< n_region
; i
++)
6324 add_phi_args_after_copy_bb (region_copy
[i
]);
6326 add_phi_args_after_copy_edge (e_copy
);
6328 for (i
= 0; i
< n_region
; i
++)
6329 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
6332 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6333 important exit edge EXIT. By important we mean that no SSA name defined
6334 inside region is live over the other exit edges of the region. All entry
6335 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6336 to the duplicate of the region. Dominance and loop information is
6337 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6338 UPDATE_DOMINANCE is false then we assume that the caller will update the
6339 dominance information after calling this function. The new basic
6340 blocks are stored to REGION_COPY in the same order as they had in REGION,
6341 provided that REGION_COPY is not NULL.
6342 The function returns false if it is unable to copy the region,
6346 gimple_duplicate_sese_region (edge entry
, edge exit
,
6347 basic_block
*region
, unsigned n_region
,
6348 basic_block
*region_copy
,
6349 bool update_dominance
)
6352 bool free_region_copy
= false, copying_header
= false;
6353 struct loop
*loop
= entry
->dest
->loop_father
;
6355 vec
<basic_block
> doms
= vNULL
;
6357 profile_count total_count
= profile_count::uninitialized ();
6358 profile_count entry_count
= profile_count::uninitialized ();
6360 if (!can_copy_bbs_p (region
, n_region
))
6363 /* Some sanity checking. Note that we do not check for all possible
6364 missuses of the functions. I.e. if you ask to copy something weird,
6365 it will work, but the state of structures probably will not be
6367 for (i
= 0; i
< n_region
; i
++)
6369 /* We do not handle subloops, i.e. all the blocks must belong to the
6371 if (region
[i
]->loop_father
!= loop
)
6374 if (region
[i
] != entry
->dest
6375 && region
[i
] == loop
->header
)
6379 /* In case the function is used for loop header copying (which is the primary
6380 use), ensure that EXIT and its copy will be new latch and entry edges. */
6381 if (loop
->header
== entry
->dest
)
6383 copying_header
= true;
6385 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
6388 for (i
= 0; i
< n_region
; i
++)
6389 if (region
[i
] != exit
->src
6390 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
6394 initialize_original_copy_tables ();
6397 set_loop_copy (loop
, loop_outer (loop
));
6399 set_loop_copy (loop
, loop
);
6403 region_copy
= XNEWVEC (basic_block
, n_region
);
6404 free_region_copy
= true;
6407 /* Record blocks outside the region that are dominated by something
6409 if (update_dominance
)
6412 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6415 if (entry
->dest
->count
.initialized_p ())
6417 total_count
= entry
->dest
->count
;
6418 entry_count
= entry
->count ();
6419 /* Fix up corner cases, to avoid division by zero or creation of negative
6421 if (entry_count
> total_count
)
6422 entry_count
= total_count
;
6425 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
6426 split_edge_bb_loc (entry
), update_dominance
);
6427 if (total_count
.initialized_p () && entry_count
.initialized_p ())
6429 scale_bbs_frequencies_profile_count (region
, n_region
,
6430 total_count
- entry_count
,
6432 scale_bbs_frequencies_profile_count (region_copy
, n_region
, entry_count
,
6438 loop
->header
= exit
->dest
;
6439 loop
->latch
= exit
->src
;
6442 /* Redirect the entry and add the phi node arguments. */
6443 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
6444 gcc_assert (redirected
!= NULL
);
6445 flush_pending_stmts (entry
);
6447 /* Concerning updating of dominators: We must recount dominators
6448 for entry block and its copy. Anything that is outside of the
6449 region, but was dominated by something inside needs recounting as
6451 if (update_dominance
)
6453 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
6454 doms
.safe_push (get_bb_original (entry
->dest
));
6455 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6459 /* Add the other PHI node arguments. */
6460 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
6462 if (free_region_copy
)
6465 free_original_copy_tables ();
6469 /* Checks if BB is part of the region defined by N_REGION BBS. */
6471 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
6475 for (n
= 0; n
< n_region
; n
++)
6483 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6484 are stored to REGION_COPY in the same order in that they appear
6485 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6486 the region, EXIT an exit from it. The condition guarding EXIT
6487 is moved to ENTRY. Returns true if duplication succeeds, false
6513 gimple_duplicate_sese_tail (edge entry
, edge exit
,
6514 basic_block
*region
, unsigned n_region
,
6515 basic_block
*region_copy
)
6518 bool free_region_copy
= false;
6519 struct loop
*loop
= exit
->dest
->loop_father
;
6520 struct loop
*orig_loop
= entry
->dest
->loop_father
;
6521 basic_block switch_bb
, entry_bb
, nentry_bb
;
6522 vec
<basic_block
> doms
;
6523 profile_count total_count
= profile_count::uninitialized (),
6524 exit_count
= profile_count::uninitialized ();
6525 edge exits
[2], nexits
[2], e
;
6526 gimple_stmt_iterator gsi
;
6529 basic_block exit_bb
;
6533 struct loop
*target
, *aloop
, *cloop
;
6535 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
6537 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
6539 if (!can_copy_bbs_p (region
, n_region
))
6542 initialize_original_copy_tables ();
6543 set_loop_copy (orig_loop
, loop
);
6546 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
6548 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
6550 cloop
= duplicate_loop (aloop
, target
);
6551 duplicate_subloops (aloop
, cloop
);
6557 region_copy
= XNEWVEC (basic_block
, n_region
);
6558 free_region_copy
= true;
6561 gcc_assert (!need_ssa_update_p (cfun
));
6563 /* Record blocks outside the region that are dominated by something
6565 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6567 total_count
= exit
->src
->count
;
6568 exit_count
= exit
->count ();
6569 /* Fix up corner cases, to avoid division by zero or creation of negative
6571 if (exit_count
> total_count
)
6572 exit_count
= total_count
;
6574 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6575 split_edge_bb_loc (exit
), true);
6576 if (total_count
.initialized_p () && exit_count
.initialized_p ())
6578 scale_bbs_frequencies_profile_count (region
, n_region
,
6579 total_count
- exit_count
,
6581 scale_bbs_frequencies_profile_count (region_copy
, n_region
, exit_count
,
6585 /* Create the switch block, and put the exit condition to it. */
6586 entry_bb
= entry
->dest
;
6587 nentry_bb
= get_bb_copy (entry_bb
);
6588 if (!last_stmt (entry
->src
)
6589 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6590 switch_bb
= entry
->src
;
6592 switch_bb
= split_edge (entry
);
6593 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6595 gsi
= gsi_last_bb (switch_bb
);
6596 cond_stmt
= last_stmt (exit
->src
);
6597 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6598 cond_stmt
= gimple_copy (cond_stmt
);
6600 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6602 sorig
= single_succ_edge (switch_bb
);
6603 sorig
->flags
= exits
[1]->flags
;
6604 sorig
->probability
= exits
[1]->probability
;
6605 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6606 snew
->probability
= exits
[0]->probability
;
6609 /* Register the new edge from SWITCH_BB in loop exit lists. */
6610 rescan_loop_exit (snew
, true, false);
6612 /* Add the PHI node arguments. */
6613 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6615 /* Get rid of now superfluous conditions and associated edges (and phi node
6617 exit_bb
= exit
->dest
;
6619 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6620 PENDING_STMT (e
) = NULL
;
6622 /* The latch of ORIG_LOOP was copied, and so was the backedge
6623 to the original header. We redirect this backedge to EXIT_BB. */
6624 for (i
= 0; i
< n_region
; i
++)
6625 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6627 gcc_assert (single_succ_edge (region_copy
[i
]));
6628 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6629 PENDING_STMT (e
) = NULL
;
6630 for (psi
= gsi_start_phis (exit_bb
);
6635 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6636 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6639 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6640 PENDING_STMT (e
) = NULL
;
6642 /* Anything that is outside of the region, but was dominated by something
6643 inside needs to update dominance info. */
6644 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6646 /* Update the SSA web. */
6647 update_ssa (TODO_update_ssa
);
6649 if (free_region_copy
)
6652 free_original_copy_tables ();
6656 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6657 adding blocks when the dominator traversal reaches EXIT. This
6658 function silently assumes that ENTRY strictly dominates EXIT. */
6661 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6662 vec
<basic_block
> *bbs_p
)
6666 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6668 son
= next_dom_son (CDI_DOMINATORS
, son
))
6670 bbs_p
->safe_push (son
);
6672 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6676 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6677 The duplicates are recorded in VARS_MAP. */
6680 replace_by_duplicate_decl (tree
*tp
, hash_map
<tree
, tree
> *vars_map
,
6683 tree t
= *tp
, new_t
;
6684 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6686 if (DECL_CONTEXT (t
) == to_context
)
6690 tree
&loc
= vars_map
->get_or_insert (t
, &existed
);
6696 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6697 add_local_decl (f
, new_t
);
6701 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6702 new_t
= copy_node (t
);
6704 DECL_CONTEXT (new_t
) = to_context
;
6715 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6716 VARS_MAP maps old ssa names and var_decls to the new ones. */
6719 replace_ssa_name (tree name
, hash_map
<tree
, tree
> *vars_map
,
6724 gcc_assert (!virtual_operand_p (name
));
6726 tree
*loc
= vars_map
->get (name
);
6730 tree decl
= SSA_NAME_VAR (name
);
6733 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name
));
6734 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6735 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6736 decl
, SSA_NAME_DEF_STMT (name
));
6739 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6740 name
, SSA_NAME_DEF_STMT (name
));
6742 /* Now that we've used the def stmt to define new_name, make sure it
6743 doesn't define name anymore. */
6744 SSA_NAME_DEF_STMT (name
) = NULL
;
6746 vars_map
->put (name
, new_name
);
6760 hash_map
<tree
, tree
> *vars_map
;
6761 htab_t new_label_map
;
6762 hash_map
<void *, void *> *eh_map
;
6766 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6767 contained in *TP if it has been ORIG_BLOCK previously and change the
6768 DECL_CONTEXT of every local variable referenced in *TP. */
6771 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6773 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6774 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6779 tree block
= TREE_BLOCK (t
);
6780 if (block
== NULL_TREE
)
6782 else if (block
== p
->orig_block
6783 || p
->orig_block
== NULL_TREE
)
6784 TREE_SET_BLOCK (t
, p
->new_block
);
6785 else if (flag_checking
)
6787 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6788 block
= BLOCK_SUPERCONTEXT (block
);
6789 gcc_assert (block
== p
->orig_block
);
6792 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6794 if (TREE_CODE (t
) == SSA_NAME
)
6795 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6796 else if (TREE_CODE (t
) == PARM_DECL
6797 && gimple_in_ssa_p (cfun
))
6798 *tp
= *(p
->vars_map
->get (t
));
6799 else if (TREE_CODE (t
) == LABEL_DECL
)
6801 if (p
->new_label_map
)
6803 struct tree_map in
, *out
;
6805 out
= (struct tree_map
*)
6806 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6811 /* For FORCED_LABELs we can end up with references from other
6812 functions if some SESE regions are outlined. It is UB to
6813 jump in between them, but they could be used just for printing
6814 addresses etc. In that case, DECL_CONTEXT on the label should
6815 be the function containing the glabel stmt with that LABEL_DECL,
6816 rather than whatever function a reference to the label was seen
6818 if (!FORCED_LABEL (t
) && !DECL_NONLOCAL (t
))
6819 DECL_CONTEXT (t
) = p
->to_context
;
6821 else if (p
->remap_decls_p
)
6823 /* Replace T with its duplicate. T should no longer appear in the
6824 parent function, so this looks wasteful; however, it may appear
6825 in referenced_vars, and more importantly, as virtual operands of
6826 statements, and in alias lists of other variables. It would be
6827 quite difficult to expunge it from all those places. ??? It might
6828 suffice to do this for addressable variables. */
6829 if ((VAR_P (t
) && !is_global_var (t
))
6830 || TREE_CODE (t
) == CONST_DECL
)
6831 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6835 else if (TYPE_P (t
))
6841 /* Helper for move_stmt_r. Given an EH region number for the source
6842 function, map that to the duplicate EH regio number in the dest. */
6845 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6847 eh_region old_r
, new_r
;
6849 old_r
= get_eh_region_from_number (old_nr
);
6850 new_r
= static_cast<eh_region
> (*p
->eh_map
->get (old_r
));
6852 return new_r
->index
;
6855 /* Similar, but operate on INTEGER_CSTs. */
6858 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6862 old_nr
= tree_to_shwi (old_t_nr
);
6863 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6865 return build_int_cst (integer_type_node
, new_nr
);
6868 /* Like move_stmt_op, but for gimple statements.
6870 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6871 contained in the current statement in *GSI_P and change the
6872 DECL_CONTEXT of every local variable referenced in the current
6876 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6877 struct walk_stmt_info
*wi
)
6879 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6880 gimple
*stmt
= gsi_stmt (*gsi_p
);
6881 tree block
= gimple_block (stmt
);
6883 if (block
== p
->orig_block
6884 || (p
->orig_block
== NULL_TREE
6885 && block
!= NULL_TREE
))
6886 gimple_set_block (stmt
, p
->new_block
);
6888 switch (gimple_code (stmt
))
6891 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6893 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6894 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
6895 switch (DECL_FUNCTION_CODE (fndecl
))
6897 case BUILT_IN_EH_COPY_VALUES
:
6898 r
= gimple_call_arg (stmt
, 1);
6899 r
= move_stmt_eh_region_tree_nr (r
, p
);
6900 gimple_call_set_arg (stmt
, 1, r
);
6903 case BUILT_IN_EH_POINTER
:
6904 case BUILT_IN_EH_FILTER
:
6905 r
= gimple_call_arg (stmt
, 0);
6906 r
= move_stmt_eh_region_tree_nr (r
, p
);
6907 gimple_call_set_arg (stmt
, 0, r
);
6918 gresx
*resx_stmt
= as_a
<gresx
*> (stmt
);
6919 int r
= gimple_resx_region (resx_stmt
);
6920 r
= move_stmt_eh_region_nr (r
, p
);
6921 gimple_resx_set_region (resx_stmt
, r
);
6925 case GIMPLE_EH_DISPATCH
:
6927 geh_dispatch
*eh_dispatch_stmt
= as_a
<geh_dispatch
*> (stmt
);
6928 int r
= gimple_eh_dispatch_region (eh_dispatch_stmt
);
6929 r
= move_stmt_eh_region_nr (r
, p
);
6930 gimple_eh_dispatch_set_region (eh_dispatch_stmt
, r
);
6934 case GIMPLE_OMP_RETURN
:
6935 case GIMPLE_OMP_CONTINUE
:
6940 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6941 so that such labels can be referenced from other regions.
6942 Make sure to update it when seeing a GIMPLE_LABEL though,
6943 that is the owner of the label. */
6944 walk_gimple_op (stmt
, move_stmt_op
, wi
);
6945 *handled_ops_p
= true;
6946 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
6947 if (FORCED_LABEL (label
) || DECL_NONLOCAL (label
))
6948 DECL_CONTEXT (label
) = p
->to_context
;
6953 if (is_gimple_omp (stmt
))
6955 /* Do not remap variables inside OMP directives. Variables
6956 referenced in clauses and directive header belong to the
6957 parent function and should not be moved into the child
6959 bool save_remap_decls_p
= p
->remap_decls_p
;
6960 p
->remap_decls_p
= false;
6961 *handled_ops_p
= true;
6963 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
6966 p
->remap_decls_p
= save_remap_decls_p
;
6974 /* Move basic block BB from function CFUN to function DEST_FN. The
6975 block is moved out of the original linked list and placed after
6976 block AFTER in the new list. Also, the block is removed from the
6977 original array of blocks and placed in DEST_FN's array of blocks.
6978 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6979 updated to reflect the moved edges.
6981 The local variables are remapped to new instances, VARS_MAP is used
6982 to record the mapping. */
6985 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
6986 basic_block after
, bool update_edge_count_p
,
6987 struct move_stmt_d
*d
)
6989 struct control_flow_graph
*cfg
;
6992 gimple_stmt_iterator si
;
6993 unsigned old_len
, new_len
;
6995 /* Remove BB from dominance structures. */
6996 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
6998 /* Move BB from its current loop to the copy in the new function. */
7001 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
7003 bb
->loop_father
= new_loop
;
7006 /* Link BB to the new linked list. */
7007 move_block_after (bb
, after
);
7009 /* Update the edge count in the corresponding flowgraphs. */
7010 if (update_edge_count_p
)
7011 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7013 cfun
->cfg
->x_n_edges
--;
7014 dest_cfun
->cfg
->x_n_edges
++;
7017 /* Remove BB from the original basic block array. */
7018 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
7019 cfun
->cfg
->x_n_basic_blocks
--;
7021 /* Grow DEST_CFUN's basic block array if needed. */
7022 cfg
= dest_cfun
->cfg
;
7023 cfg
->x_n_basic_blocks
++;
7024 if (bb
->index
>= cfg
->x_last_basic_block
)
7025 cfg
->x_last_basic_block
= bb
->index
+ 1;
7027 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
7028 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
7030 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
7031 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
7034 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
7036 /* Remap the variables in phi nodes. */
7037 for (gphi_iterator psi
= gsi_start_phis (bb
);
7040 gphi
*phi
= psi
.phi ();
7042 tree op
= PHI_RESULT (phi
);
7046 if (virtual_operand_p (op
))
7048 /* Remove the phi nodes for virtual operands (alias analysis will be
7049 run for the new function, anyway). */
7050 remove_phi_node (&psi
, true);
7054 SET_PHI_RESULT (phi
,
7055 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
7056 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
7058 op
= USE_FROM_PTR (use
);
7059 if (TREE_CODE (op
) == SSA_NAME
)
7060 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
7063 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
7065 location_t locus
= gimple_phi_arg_location (phi
, i
);
7066 tree block
= LOCATION_BLOCK (locus
);
7068 if (locus
== UNKNOWN_LOCATION
)
7070 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
7072 locus
= set_block (locus
, d
->new_block
);
7073 gimple_phi_arg_set_location (phi
, i
, locus
);
7080 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
7082 gimple
*stmt
= gsi_stmt (si
);
7083 struct walk_stmt_info wi
;
7085 memset (&wi
, 0, sizeof (wi
));
7087 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
7089 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
7091 tree label
= gimple_label_label (label_stmt
);
7092 int uid
= LABEL_DECL_UID (label
);
7094 gcc_assert (uid
> -1);
7096 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
7097 if (old_len
<= (unsigned) uid
)
7099 new_len
= 3 * uid
/ 2 + 1;
7100 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
7103 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
7104 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
7106 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
7108 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
7109 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
7112 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
7113 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
7115 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
7116 gimple_remove_stmt_histograms (cfun
, stmt
);
7118 /* We cannot leave any operands allocated from the operand caches of
7119 the current function. */
7120 free_stmt_operands (cfun
, stmt
);
7121 push_cfun (dest_cfun
);
7126 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7127 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
7129 tree block
= LOCATION_BLOCK (e
->goto_locus
);
7130 if (d
->orig_block
== NULL_TREE
7131 || block
== d
->orig_block
)
7132 e
->goto_locus
= set_block (e
->goto_locus
, d
->new_block
);
7136 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7137 the outermost EH region. Use REGION as the incoming base EH region. */
7140 find_outermost_region_in_block (struct function
*src_cfun
,
7141 basic_block bb
, eh_region region
)
7143 gimple_stmt_iterator si
;
7145 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
7147 gimple
*stmt
= gsi_stmt (si
);
7148 eh_region stmt_region
;
7151 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
7152 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
7156 region
= stmt_region
;
7157 else if (stmt_region
!= region
)
7159 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
7160 gcc_assert (region
!= NULL
);
7169 new_label_mapper (tree decl
, void *data
)
7171 htab_t hash
= (htab_t
) data
;
7175 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
7177 m
= XNEW (struct tree_map
);
7178 m
->hash
= DECL_UID (decl
);
7179 m
->base
.from
= decl
;
7180 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
7181 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
7182 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
7183 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
7185 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
7186 gcc_assert (*slot
== NULL
);
7193 /* Tree walker to replace the decls used inside value expressions by
7197 replace_block_vars_by_duplicates_1 (tree
*tp
, int *walk_subtrees
, void *data
)
7199 struct replace_decls_d
*rd
= (struct replace_decls_d
*)data
;
7201 switch (TREE_CODE (*tp
))
7206 replace_by_duplicate_decl (tp
, rd
->vars_map
, rd
->to_context
);
7212 if (IS_TYPE_OR_DECL_P (*tp
))
7213 *walk_subtrees
= false;
7218 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7222 replace_block_vars_by_duplicates (tree block
, hash_map
<tree
, tree
> *vars_map
,
7227 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
7230 if (!VAR_P (t
) && TREE_CODE (t
) != CONST_DECL
)
7232 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
7235 if (VAR_P (*tp
) && DECL_HAS_VALUE_EXPR_P (*tp
))
7237 tree x
= DECL_VALUE_EXPR (*tp
);
7238 struct replace_decls_d rd
= { vars_map
, to_context
};
7240 walk_tree (&x
, replace_block_vars_by_duplicates_1
, &rd
, NULL
);
7241 SET_DECL_VALUE_EXPR (t
, x
);
7242 DECL_HAS_VALUE_EXPR_P (t
) = 1;
7244 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
7249 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
7250 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
7253 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7257 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
7260 /* Discard it from the old loop array. */
7261 (*get_loops (fn1
))[loop
->num
] = NULL
;
7263 /* Place it in the new loop array, assigning it a new number. */
7264 loop
->num
= number_of_loops (fn2
);
7265 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
7267 /* Recurse to children. */
7268 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
7269 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
7272 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7273 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7276 verify_sese (basic_block entry
, basic_block exit
, vec
<basic_block
> *bbs_p
)
7281 bitmap bbs
= BITMAP_ALLOC (NULL
);
7284 gcc_assert (entry
!= NULL
);
7285 gcc_assert (entry
!= exit
);
7286 gcc_assert (bbs_p
!= NULL
);
7288 gcc_assert (bbs_p
->length () > 0);
7290 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7291 bitmap_set_bit (bbs
, bb
->index
);
7293 gcc_assert (bitmap_bit_p (bbs
, entry
->index
));
7294 gcc_assert (exit
== NULL
|| bitmap_bit_p (bbs
, exit
->index
));
7296 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7300 gcc_assert (single_pred_p (entry
));
7301 gcc_assert (!bitmap_bit_p (bbs
, single_pred (entry
)->index
));
7304 for (ei
= ei_start (bb
->preds
); !ei_end_p (ei
); ei_next (&ei
))
7307 gcc_assert (bitmap_bit_p (bbs
, e
->src
->index
));
7312 gcc_assert (single_succ_p (exit
));
7313 gcc_assert (!bitmap_bit_p (bbs
, single_succ (exit
)->index
));
7316 for (ei
= ei_start (bb
->succs
); !ei_end_p (ei
); ei_next (&ei
))
7319 gcc_assert (bitmap_bit_p (bbs
, e
->dest
->index
));
7326 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7329 gather_ssa_name_hash_map_from (tree
const &from
, tree
const &, void *data
)
7331 bitmap release_names
= (bitmap
)data
;
7333 if (TREE_CODE (from
) != SSA_NAME
)
7336 bitmap_set_bit (release_names
, SSA_NAME_VERSION (from
));
7340 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7341 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7342 single basic block in the original CFG and the new basic block is
7343 returned. DEST_CFUN must not have a CFG yet.
7345 Note that the region need not be a pure SESE region. Blocks inside
7346 the region may contain calls to abort/exit. The only restriction
7347 is that ENTRY_BB should be the only entry point and it must
7350 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7351 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7352 to the new function.
7354 All local variables referenced in the region are assumed to be in
7355 the corresponding BLOCK_VARS and unexpanded variable lists
7356 associated with DEST_CFUN.
7358 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7359 reimplement move_sese_region_to_fn by duplicating the region rather than
7363 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
7364 basic_block exit_bb
, tree orig_block
)
7366 vec
<basic_block
> bbs
, dom_bbs
;
7367 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
7368 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
7369 struct function
*saved_cfun
= cfun
;
7370 int *entry_flag
, *exit_flag
;
7371 profile_probability
*entry_prob
, *exit_prob
;
7372 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
7375 htab_t new_label_map
;
7376 hash_map
<void *, void *> *eh_map
;
7377 struct loop
*loop
= entry_bb
->loop_father
;
7378 struct loop
*loop0
= get_loop (saved_cfun
, 0);
7379 struct move_stmt_d d
;
7381 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7383 gcc_assert (entry_bb
!= exit_bb
7385 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
7387 /* Collect all the blocks in the region. Manually add ENTRY_BB
7388 because it won't be added by dfs_enumerate_from. */
7390 bbs
.safe_push (entry_bb
);
7391 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
7394 verify_sese (entry_bb
, exit_bb
, &bbs
);
7396 /* The blocks that used to be dominated by something in BBS will now be
7397 dominated by the new block. */
7398 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
7402 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7403 the predecessor edges to ENTRY_BB and the successor edges to
7404 EXIT_BB so that we can re-attach them to the new basic block that
7405 will replace the region. */
7406 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
7407 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
7408 entry_flag
= XNEWVEC (int, num_entry_edges
);
7409 entry_prob
= XNEWVEC (profile_probability
, num_entry_edges
);
7411 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
7413 entry_prob
[i
] = e
->probability
;
7414 entry_flag
[i
] = e
->flags
;
7415 entry_pred
[i
++] = e
->src
;
7421 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
7422 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
7423 exit_flag
= XNEWVEC (int, num_exit_edges
);
7424 exit_prob
= XNEWVEC (profile_probability
, num_exit_edges
);
7426 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
7428 exit_prob
[i
] = e
->probability
;
7429 exit_flag
[i
] = e
->flags
;
7430 exit_succ
[i
++] = e
->dest
;
7442 /* Switch context to the child function to initialize DEST_FN's CFG. */
7443 gcc_assert (dest_cfun
->cfg
== NULL
);
7444 push_cfun (dest_cfun
);
7446 init_empty_tree_cfg ();
7448 /* Initialize EH information for the new function. */
7450 new_label_map
= NULL
;
7453 eh_region region
= NULL
;
7455 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7456 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
7458 init_eh_for_function ();
7461 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
7462 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
7463 new_label_mapper
, new_label_map
);
7467 /* Initialize an empty loop tree. */
7468 struct loops
*loops
= ggc_cleared_alloc
<struct loops
> ();
7469 init_loops_structure (dest_cfun
, loops
, 1);
7470 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
7471 set_loops_for_fn (dest_cfun
, loops
);
7473 vec
<loop_p
, va_gc
> *larray
= get_loops (saved_cfun
)->copy ();
7475 /* Move the outlined loop tree part. */
7476 num_nodes
= bbs
.length ();
7477 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7479 if (bb
->loop_father
->header
== bb
)
7481 struct loop
*this_loop
= bb
->loop_father
;
7482 struct loop
*outer
= loop_outer (this_loop
);
7484 /* If the SESE region contains some bbs ending with
7485 a noreturn call, those are considered to belong
7486 to the outermost loop in saved_cfun, rather than
7487 the entry_bb's loop_father. */
7491 num_nodes
-= this_loop
->num_nodes
;
7492 flow_loop_tree_node_remove (bb
->loop_father
);
7493 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
7494 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
7497 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
7500 /* Remove loop exits from the outlined region. */
7501 if (loops_for_fn (saved_cfun
)->exits
)
7502 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7504 struct loops
*l
= loops_for_fn (saved_cfun
);
7506 = l
->exits
->find_slot_with_hash (e
, htab_hash_pointer (e
),
7509 l
->exits
->clear_slot (slot
);
7514 /* Adjust the number of blocks in the tree root of the outlined part. */
7515 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
7517 /* Setup a mapping to be used by move_block_to_fn. */
7518 loop
->aux
= current_loops
->tree_root
;
7519 loop0
->aux
= current_loops
->tree_root
;
7521 /* Fix up orig_loop_num. If the block referenced in it has been moved
7522 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7524 FOR_EACH_LOOP_FN (dest_cfun
, dloop
, 0)
7525 if (dloop
->orig_loop_num
)
7527 if ((*larray
)[dloop
->orig_loop_num
] != NULL
7528 && get_loop (saved_cfun
, dloop
->orig_loop_num
) == NULL
)
7529 dloop
->orig_loop_num
= (*larray
)[dloop
->orig_loop_num
]->num
;
7531 dloop
->orig_loop_num
= 0;
7537 /* Move blocks from BBS into DEST_CFUN. */
7538 gcc_assert (bbs
.length () >= 2);
7539 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
7540 hash_map
<tree
, tree
> vars_map
;
7542 memset (&d
, 0, sizeof (d
));
7543 d
.orig_block
= orig_block
;
7544 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
7545 d
.from_context
= cfun
->decl
;
7546 d
.to_context
= dest_cfun
->decl
;
7547 d
.vars_map
= &vars_map
;
7548 d
.new_label_map
= new_label_map
;
7550 d
.remap_decls_p
= true;
7552 if (gimple_in_ssa_p (cfun
))
7553 for (tree arg
= DECL_ARGUMENTS (d
.to_context
); arg
; arg
= DECL_CHAIN (arg
))
7555 tree narg
= make_ssa_name_fn (dest_cfun
, arg
, gimple_build_nop ());
7556 set_ssa_default_def (dest_cfun
, arg
, narg
);
7557 vars_map
.put (arg
, narg
);
7560 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7562 /* No need to update edge counts on the last block. It has
7563 already been updated earlier when we detached the region from
7564 the original CFG. */
7565 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
7571 /* Loop sizes are no longer correct, fix them up. */
7572 loop
->num_nodes
-= num_nodes
;
7573 for (struct loop
*outer
= loop_outer (loop
);
7574 outer
; outer
= loop_outer (outer
))
7575 outer
->num_nodes
-= num_nodes
;
7576 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
7578 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vectorize_loops
)
7581 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
7586 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
7588 dest_cfun
->has_simduid_loops
= true;
7590 if (aloop
->force_vectorize
)
7591 dest_cfun
->has_force_vectorize_loops
= true;
7595 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7599 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7601 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7602 = BLOCK_SUBBLOCKS (orig_block
);
7603 for (block
= BLOCK_SUBBLOCKS (orig_block
);
7604 block
; block
= BLOCK_CHAIN (block
))
7605 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
7606 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
7609 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
7610 &vars_map
, dest_cfun
->decl
);
7613 htab_delete (new_label_map
);
7617 if (gimple_in_ssa_p (cfun
))
7619 /* We need to release ssa-names in a defined order, so first find them,
7620 and then iterate in ascending version order. */
7621 bitmap release_names
= BITMAP_ALLOC (NULL
);
7622 vars_map
.traverse
<void *, gather_ssa_name_hash_map_from
> (release_names
);
7625 EXECUTE_IF_SET_IN_BITMAP (release_names
, 0, i
, bi
)
7626 release_ssa_name (ssa_name (i
));
7627 BITMAP_FREE (release_names
);
7630 /* Rewire the entry and exit blocks. The successor to the entry
7631 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7632 the child function. Similarly, the predecessor of DEST_FN's
7633 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7634 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7635 various CFG manipulation function get to the right CFG.
7637 FIXME, this is silly. The CFG ought to become a parameter to
7639 push_cfun (dest_cfun
);
7640 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= entry_bb
->count
;
7641 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
7644 make_single_succ_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
7645 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
= exit_bb
->count
;
7648 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
= profile_count::zero ();
7651 /* Back in the original function, the SESE region has disappeared,
7652 create a new basic block in its place. */
7653 bb
= create_empty_bb (entry_pred
[0]);
7655 add_bb_to_loop (bb
, loop
);
7656 for (i
= 0; i
< num_entry_edges
; i
++)
7658 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
7659 e
->probability
= entry_prob
[i
];
7662 for (i
= 0; i
< num_exit_edges
; i
++)
7664 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
7665 e
->probability
= exit_prob
[i
];
7668 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
7669 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
7670 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
7687 /* Dump default def DEF to file FILE using FLAGS and indentation
7691 dump_default_def (FILE *file
, tree def
, int spc
, dump_flags_t flags
)
7693 for (int i
= 0; i
< spc
; ++i
)
7694 fprintf (file
, " ");
7695 dump_ssaname_info_to_file (file
, def
, spc
);
7697 print_generic_expr (file
, TREE_TYPE (def
), flags
);
7698 fprintf (file
, " ");
7699 print_generic_expr (file
, def
, flags
);
7700 fprintf (file
, " = ");
7701 print_generic_expr (file
, SSA_NAME_VAR (def
), flags
);
7702 fprintf (file
, ";\n");
7705 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7708 print_no_sanitize_attr_value (FILE *file
, tree value
)
7710 unsigned int flags
= tree_to_uhwi (value
);
7712 for (int i
= 0; sanitizer_opts
[i
].name
!= NULL
; ++i
)
7714 if ((sanitizer_opts
[i
].flag
& flags
) == sanitizer_opts
[i
].flag
)
7717 fprintf (file
, " | ");
7718 fprintf (file
, "%s", sanitizer_opts
[i
].name
);
7724 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7728 dump_function_to_file (tree fndecl
, FILE *file
, dump_flags_t flags
)
7730 tree arg
, var
, old_current_fndecl
= current_function_decl
;
7731 struct function
*dsf
;
7732 bool ignore_topmost_bind
= false, any_var
= false;
7735 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
7736 && decl_is_tm_clone (fndecl
));
7737 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
7739 if (DECL_ATTRIBUTES (fndecl
) != NULL_TREE
)
7741 fprintf (file
, "__attribute__((");
7745 for (chain
= DECL_ATTRIBUTES (fndecl
); chain
;
7746 first
= false, chain
= TREE_CHAIN (chain
))
7749 fprintf (file
, ", ");
7751 tree name
= get_attribute_name (chain
);
7752 print_generic_expr (file
, name
, dump_flags
);
7753 if (TREE_VALUE (chain
) != NULL_TREE
)
7755 fprintf (file
, " (");
7757 if (strstr (IDENTIFIER_POINTER (name
), "no_sanitize"))
7758 print_no_sanitize_attr_value (file
, TREE_VALUE (chain
));
7760 print_generic_expr (file
, TREE_VALUE (chain
), dump_flags
);
7761 fprintf (file
, ")");
7765 fprintf (file
, "))\n");
7768 current_function_decl
= fndecl
;
7769 if (flags
& TDF_GIMPLE
)
7771 print_generic_expr (file
, TREE_TYPE (TREE_TYPE (fndecl
)),
7772 dump_flags
| TDF_SLIM
);
7773 fprintf (file
, " __GIMPLE ()\n%s (", function_name (fun
));
7776 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
7778 arg
= DECL_ARGUMENTS (fndecl
);
7781 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
7782 fprintf (file
, " ");
7783 print_generic_expr (file
, arg
, dump_flags
);
7784 if (DECL_CHAIN (arg
))
7785 fprintf (file
, ", ");
7786 arg
= DECL_CHAIN (arg
);
7788 fprintf (file
, ")\n");
7790 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
7791 if (dsf
&& (flags
& TDF_EH
))
7792 dump_eh_tree (file
, dsf
);
7794 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
7796 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
7797 current_function_decl
= old_current_fndecl
;
7801 /* When GIMPLE is lowered, the variables are no longer available in
7802 BIND_EXPRs, so display them separately. */
7803 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
7806 ignore_topmost_bind
= true;
7808 fprintf (file
, "{\n");
7809 if (gimple_in_ssa_p (fun
)
7810 && (flags
& TDF_ALIAS
))
7812 for (arg
= DECL_ARGUMENTS (fndecl
); arg
!= NULL
;
7813 arg
= DECL_CHAIN (arg
))
7815 tree def
= ssa_default_def (fun
, arg
);
7817 dump_default_def (file
, def
, 2, flags
);
7820 tree res
= DECL_RESULT (fun
->decl
);
7821 if (res
!= NULL_TREE
7822 && DECL_BY_REFERENCE (res
))
7824 tree def
= ssa_default_def (fun
, res
);
7826 dump_default_def (file
, def
, 2, flags
);
7829 tree static_chain
= fun
->static_chain_decl
;
7830 if (static_chain
!= NULL_TREE
)
7832 tree def
= ssa_default_def (fun
, static_chain
);
7834 dump_default_def (file
, def
, 2, flags
);
7838 if (!vec_safe_is_empty (fun
->local_decls
))
7839 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7841 print_generic_decl (file
, var
, flags
);
7842 fprintf (file
, "\n");
7849 if (gimple_in_ssa_p (cfun
))
7850 FOR_EACH_SSA_NAME (ix
, name
, cfun
)
7852 if (!SSA_NAME_VAR (name
))
7854 fprintf (file
, " ");
7855 print_generic_expr (file
, TREE_TYPE (name
), flags
);
7856 fprintf (file
, " ");
7857 print_generic_expr (file
, name
, flags
);
7858 fprintf (file
, ";\n");
7865 if (fun
&& fun
->decl
== fndecl
7867 && basic_block_info_for_fn (fun
))
7869 /* If the CFG has been built, emit a CFG-based dump. */
7870 if (!ignore_topmost_bind
)
7871 fprintf (file
, "{\n");
7873 if (any_var
&& n_basic_blocks_for_fn (fun
))
7874 fprintf (file
, "\n");
7876 FOR_EACH_BB_FN (bb
, fun
)
7877 dump_bb (file
, bb
, 2, flags
);
7879 fprintf (file
, "}\n");
7881 else if (fun
->curr_properties
& PROP_gimple_any
)
7883 /* The function is now in GIMPLE form but the CFG has not been
7884 built yet. Emit the single sequence of GIMPLE statements
7885 that make up its body. */
7886 gimple_seq body
= gimple_body (fndecl
);
7888 if (gimple_seq_first_stmt (body
)
7889 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
7890 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
7891 print_gimple_seq (file
, body
, 0, flags
);
7894 if (!ignore_topmost_bind
)
7895 fprintf (file
, "{\n");
7898 fprintf (file
, "\n");
7900 print_gimple_seq (file
, body
, 2, flags
);
7901 fprintf (file
, "}\n");
7908 /* Make a tree based dump. */
7909 chain
= DECL_SAVED_TREE (fndecl
);
7910 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
7912 if (ignore_topmost_bind
)
7914 chain
= BIND_EXPR_BODY (chain
);
7922 if (!ignore_topmost_bind
)
7924 fprintf (file
, "{\n");
7925 /* No topmost bind, pretend it's ignored for later. */
7926 ignore_topmost_bind
= true;
7932 fprintf (file
, "\n");
7934 print_generic_stmt_indented (file
, chain
, flags
, indent
);
7935 if (ignore_topmost_bind
)
7936 fprintf (file
, "}\n");
7939 if (flags
& TDF_ENUMERATE_LOCALS
)
7940 dump_enumerated_decls (file
, flags
);
7941 fprintf (file
, "\n\n");
7943 current_function_decl
= old_current_fndecl
;
7946 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7949 debug_function (tree fn
, dump_flags_t flags
)
7951 dump_function_to_file (fn
, stderr
, flags
);
7955 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7958 print_pred_bbs (FILE *file
, basic_block bb
)
7963 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7964 fprintf (file
, "bb_%d ", e
->src
->index
);
7968 /* Print on FILE the indexes for the successors of basic_block BB. */
7971 print_succ_bbs (FILE *file
, basic_block bb
)
7976 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7977 fprintf (file
, "bb_%d ", e
->dest
->index
);
7980 /* Print to FILE the basic block BB following the VERBOSITY level. */
7983 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
7985 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
7986 memset ((void *) s_indent
, ' ', (size_t) indent
);
7987 s_indent
[indent
] = '\0';
7989 /* Print basic_block's header. */
7992 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
7993 print_pred_bbs (file
, bb
);
7994 fprintf (file
, "}, succs = {");
7995 print_succ_bbs (file
, bb
);
7996 fprintf (file
, "})\n");
7999 /* Print basic_block's body. */
8002 fprintf (file
, "%s {\n", s_indent
);
8003 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
8004 fprintf (file
, "%s }\n", s_indent
);
8008 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
8010 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8011 VERBOSITY level this outputs the contents of the loop, or just its
8015 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
8023 s_indent
= (char *) alloca ((size_t) indent
+ 1);
8024 memset ((void *) s_indent
, ' ', (size_t) indent
);
8025 s_indent
[indent
] = '\0';
8027 /* Print loop's header. */
8028 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
8030 fprintf (file
, "header = %d", loop
->header
->index
);
8033 fprintf (file
, "deleted)\n");
8037 fprintf (file
, ", latch = %d", loop
->latch
->index
);
8039 fprintf (file
, ", multiple latches");
8040 fprintf (file
, ", niter = ");
8041 print_generic_expr (file
, loop
->nb_iterations
);
8043 if (loop
->any_upper_bound
)
8045 fprintf (file
, ", upper_bound = ");
8046 print_decu (loop
->nb_iterations_upper_bound
, file
);
8048 if (loop
->any_likely_upper_bound
)
8050 fprintf (file
, ", likely_upper_bound = ");
8051 print_decu (loop
->nb_iterations_likely_upper_bound
, file
);
8054 if (loop
->any_estimate
)
8056 fprintf (file
, ", estimate = ");
8057 print_decu (loop
->nb_iterations_estimate
, file
);
8060 fprintf (file
, ", unroll = %d", loop
->unroll
);
8061 fprintf (file
, ")\n");
8063 /* Print loop's body. */
8066 fprintf (file
, "%s{\n", s_indent
);
8067 FOR_EACH_BB_FN (bb
, cfun
)
8068 if (bb
->loop_father
== loop
)
8069 print_loops_bb (file
, bb
, indent
, verbosity
);
8071 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
8072 fprintf (file
, "%s}\n", s_indent
);
8076 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8077 spaces. Following VERBOSITY level this outputs the contents of the
8078 loop, or just its structure. */
8081 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
8087 print_loop (file
, loop
, indent
, verbosity
);
8088 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
8091 /* Follow a CFG edge from the entry point of the program, and on entry
8092 of a loop, pretty print the loop structure on FILE. */
8095 print_loops (FILE *file
, int verbosity
)
8099 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
8100 fprintf (file
, "\nLoops in function: %s\n", current_function_name ());
8101 if (bb
&& bb
->loop_father
)
8102 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
8108 debug (struct loop
&ref
)
8110 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
8114 debug (struct loop
*ptr
)
8119 fprintf (stderr
, "<nil>\n");
8122 /* Dump a loop verbosely. */
8125 debug_verbose (struct loop
&ref
)
8127 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
8131 debug_verbose (struct loop
*ptr
)
8136 fprintf (stderr
, "<nil>\n");
8140 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8143 debug_loops (int verbosity
)
8145 print_loops (stderr
, verbosity
);
8148 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8151 debug_loop (struct loop
*loop
, int verbosity
)
8153 print_loop (stderr
, loop
, 0, verbosity
);
8156 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8160 debug_loop_num (unsigned num
, int verbosity
)
8162 debug_loop (get_loop (cfun
, num
), verbosity
);
8165 /* Return true if BB ends with a call, possibly followed by some
8166 instructions that must stay with the call. Return false,
8170 gimple_block_ends_with_call_p (basic_block bb
)
8172 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
8173 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
8177 /* Return true if BB ends with a conditional branch. Return false,
8181 gimple_block_ends_with_condjump_p (const_basic_block bb
)
8183 gimple
*stmt
= last_stmt (CONST_CAST_BB (bb
));
8184 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
8188 /* Return true if statement T may terminate execution of BB in ways not
8189 explicitly represtented in the CFG. */
8192 stmt_can_terminate_bb_p (gimple
*t
)
8194 tree fndecl
= NULL_TREE
;
8197 /* Eh exception not handled internally terminates execution of the whole
8199 if (stmt_can_throw_external (t
))
8202 /* NORETURN and LONGJMP calls already have an edge to exit.
8203 CONST and PURE calls do not need one.
8204 We don't currently check for CONST and PURE here, although
8205 it would be a good idea, because those attributes are
8206 figured out from the RTL in mark_constant_function, and
8207 the counter incrementation code from -fprofile-arcs
8208 leads to different results from -fbranch-probabilities. */
8209 if (is_gimple_call (t
))
8211 fndecl
= gimple_call_fndecl (t
);
8212 call_flags
= gimple_call_flags (t
);
8215 if (is_gimple_call (t
)
8217 && DECL_BUILT_IN (fndecl
)
8218 && (call_flags
& ECF_NOTHROW
)
8219 && !(call_flags
& ECF_RETURNS_TWICE
)
8220 /* fork() doesn't really return twice, but the effect of
8221 wrapping it in __gcov_fork() which calls __gcov_flush()
8222 and clears the counters before forking has the same
8223 effect as returning twice. Force a fake edge. */
8224 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
8225 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
8228 if (is_gimple_call (t
))
8234 if (call_flags
& (ECF_PURE
| ECF_CONST
)
8235 && !(call_flags
& ECF_LOOPING_CONST_OR_PURE
))
8238 /* Function call may do longjmp, terminate program or do other things.
8239 Special case noreturn that have non-abnormal edges out as in this case
8240 the fact is sufficiently represented by lack of edges out of T. */
8241 if (!(call_flags
& ECF_NORETURN
))
8245 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8246 if ((e
->flags
& EDGE_FAKE
) == 0)
8250 if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (t
))
8251 if (gimple_asm_volatile_p (asm_stmt
) || gimple_asm_input_p (asm_stmt
))
8258 /* Add fake edges to the function exit for any non constant and non
8259 noreturn calls (or noreturn calls with EH/abnormal edges),
8260 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8261 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8264 The goal is to expose cases in which entering a basic block does
8265 not imply that all subsequent instructions must be executed. */
8268 gimple_flow_call_edges_add (sbitmap blocks
)
8271 int blocks_split
= 0;
8272 int last_bb
= last_basic_block_for_fn (cfun
);
8273 bool check_last_block
= false;
8275 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
8279 check_last_block
= true;
8281 check_last_block
= bitmap_bit_p (blocks
,
8282 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
8284 /* In the last basic block, before epilogue generation, there will be
8285 a fallthru edge to EXIT. Special care is required if the last insn
8286 of the last basic block is a call because make_edge folds duplicate
8287 edges, which would result in the fallthru edge also being marked
8288 fake, which would result in the fallthru edge being removed by
8289 remove_fake_edges, which would result in an invalid CFG.
8291 Moreover, we can't elide the outgoing fake edge, since the block
8292 profiler needs to take this into account in order to solve the minimal
8293 spanning tree in the case that the call doesn't return.
8295 Handle this by adding a dummy instruction in a new last basic block. */
8296 if (check_last_block
)
8298 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
8299 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
8302 if (!gsi_end_p (gsi
))
8305 if (t
&& stmt_can_terminate_bb_p (t
))
8309 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8312 gsi_insert_on_edge (e
, gimple_build_nop ());
8313 gsi_commit_edge_inserts ();
8318 /* Now add fake edges to the function exit for any non constant
8319 calls since there is no way that we can determine if they will
8321 for (i
= 0; i
< last_bb
; i
++)
8323 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8324 gimple_stmt_iterator gsi
;
8325 gimple
*stmt
, *last_stmt
;
8330 if (blocks
&& !bitmap_bit_p (blocks
, i
))
8333 gsi
= gsi_last_nondebug_bb (bb
);
8334 if (!gsi_end_p (gsi
))
8336 last_stmt
= gsi_stmt (gsi
);
8339 stmt
= gsi_stmt (gsi
);
8340 if (stmt_can_terminate_bb_p (stmt
))
8344 /* The handling above of the final block before the
8345 epilogue should be enough to verify that there is
8346 no edge to the exit block in CFG already.
8347 Calling make_edge in such case would cause us to
8348 mark that edge as fake and remove it later. */
8349 if (flag_checking
&& stmt
== last_stmt
)
8351 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8352 gcc_assert (e
== NULL
);
8355 /* Note that the following may create a new basic block
8356 and renumber the existing basic blocks. */
8357 if (stmt
!= last_stmt
)
8359 e
= split_block (bb
, stmt
);
8363 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
8364 e
->probability
= profile_probability::guessed_never ();
8368 while (!gsi_end_p (gsi
));
8373 checking_verify_flow_info ();
8375 return blocks_split
;
8378 /* Removes edge E and all the blocks dominated by it, and updates dominance
8379 information. The IL in E->src needs to be updated separately.
8380 If dominance info is not available, only the edge E is removed.*/
8383 remove_edge_and_dominated_blocks (edge e
)
8385 vec
<basic_block
> bbs_to_remove
= vNULL
;
8386 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
8389 bool none_removed
= false;
8391 basic_block bb
, dbb
;
8394 /* If we are removing a path inside a non-root loop that may change
8395 loop ownership of blocks or remove loops. Mark loops for fixup. */
8397 && loop_outer (e
->src
->loop_father
) != NULL
8398 && e
->src
->loop_father
== e
->dest
->loop_father
)
8399 loops_state_set (LOOPS_NEED_FIXUP
);
8401 if (!dom_info_available_p (CDI_DOMINATORS
))
8407 /* No updating is needed for edges to exit. */
8408 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8410 if (cfgcleanup_altered_bbs
)
8411 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
8416 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8417 that is not dominated by E->dest, then this set is empty. Otherwise,
8418 all the basic blocks dominated by E->dest are removed.
8420 Also, to DF_IDOM we store the immediate dominators of the blocks in
8421 the dominance frontier of E (i.e., of the successors of the
8422 removed blocks, if there are any, and of E->dest otherwise). */
8423 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
8428 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
8430 none_removed
= true;
8435 auto_bitmap df
, df_idom
;
8437 bitmap_set_bit (df_idom
,
8438 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
8441 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
8442 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
8444 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
8446 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
8447 bitmap_set_bit (df
, f
->dest
->index
);
8450 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
8451 bitmap_clear_bit (df
, bb
->index
);
8453 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
8455 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8456 bitmap_set_bit (df_idom
,
8457 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
8461 if (cfgcleanup_altered_bbs
)
8463 /* Record the set of the altered basic blocks. */
8464 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
8465 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
8468 /* Remove E and the cancelled blocks. */
8473 /* Walk backwards so as to get a chance to substitute all
8474 released DEFs into debug stmts. See
8475 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8477 for (i
= bbs_to_remove
.length (); i
-- > 0; )
8478 delete_basic_block (bbs_to_remove
[i
]);
8481 /* Update the dominance information. The immediate dominator may change only
8482 for blocks whose immediate dominator belongs to DF_IDOM:
8484 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8485 removal. Let Z the arbitrary block such that idom(Z) = Y and
8486 Z dominates X after the removal. Before removal, there exists a path P
8487 from Y to X that avoids Z. Let F be the last edge on P that is
8488 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8489 dominates W, and because of P, Z does not dominate W), and W belongs to
8490 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8491 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
8493 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8494 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
8496 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
8497 bbs_to_fix_dom
.safe_push (dbb
);
8500 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
8502 bbs_to_remove
.release ();
8503 bbs_to_fix_dom
.release ();
8506 /* Purge dead EH edges from basic block BB. */
8509 gimple_purge_dead_eh_edges (basic_block bb
)
8511 bool changed
= false;
8514 gimple
*stmt
= last_stmt (bb
);
8516 if (stmt
&& stmt_can_throw_internal (stmt
))
8519 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8521 if (e
->flags
& EDGE_EH
)
8523 remove_edge_and_dominated_blocks (e
);
8533 /* Purge dead EH edges from basic block listed in BLOCKS. */
8536 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
8538 bool changed
= false;
8542 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8544 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8546 /* Earlier gimple_purge_dead_eh_edges could have removed
8547 this basic block already. */
8548 gcc_assert (bb
|| changed
);
8550 changed
|= gimple_purge_dead_eh_edges (bb
);
8556 /* Purge dead abnormal call edges from basic block BB. */
8559 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
8561 bool changed
= false;
8564 gimple
*stmt
= last_stmt (bb
);
8566 if (!cfun
->has_nonlocal_label
8567 && !cfun
->calls_setjmp
)
8570 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
8573 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8575 if (e
->flags
& EDGE_ABNORMAL
)
8577 if (e
->flags
& EDGE_FALLTHRU
)
8578 e
->flags
&= ~EDGE_ABNORMAL
;
8580 remove_edge_and_dominated_blocks (e
);
8590 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8593 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
8595 bool changed
= false;
8599 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8601 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8603 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8604 this basic block already. */
8605 gcc_assert (bb
|| changed
);
8607 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
8613 /* This function is called whenever a new edge is created or
8617 gimple_execute_on_growing_pred (edge e
)
8619 basic_block bb
= e
->dest
;
8621 if (!gimple_seq_empty_p (phi_nodes (bb
)))
8622 reserve_phi_args_for_new_edge (bb
);
8625 /* This function is called immediately before edge E is removed from
8626 the edge vector E->dest->preds. */
8629 gimple_execute_on_shrinking_pred (edge e
)
8631 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
8632 remove_phi_args (e
);
8635 /*---------------------------------------------------------------------------
8636 Helper functions for Loop versioning
8637 ---------------------------------------------------------------------------*/
8639 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8640 of 'first'. Both of them are dominated by 'new_head' basic block. When
8641 'new_head' was created by 'second's incoming edge it received phi arguments
8642 on the edge by split_edge(). Later, additional edge 'e' was created to
8643 connect 'new_head' and 'first'. Now this routine adds phi args on this
8644 additional edge 'e' that new_head to second edge received as part of edge
8648 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
8649 basic_block new_head
, edge e
)
8652 gphi_iterator psi1
, psi2
;
8654 edge e2
= find_edge (new_head
, second
);
8656 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8657 edge, we should always have an edge from NEW_HEAD to SECOND. */
8658 gcc_assert (e2
!= NULL
);
8660 /* Browse all 'second' basic block phi nodes and add phi args to
8661 edge 'e' for 'first' head. PHI args are always in correct order. */
8663 for (psi2
= gsi_start_phis (second
),
8664 psi1
= gsi_start_phis (first
);
8665 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
8666 gsi_next (&psi2
), gsi_next (&psi1
))
8670 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
8671 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
8676 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8677 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8678 the destination of the ELSE part. */
8681 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
8682 basic_block second_head ATTRIBUTE_UNUSED
,
8683 basic_block cond_bb
, void *cond_e
)
8685 gimple_stmt_iterator gsi
;
8686 gimple
*new_cond_expr
;
8687 tree cond_expr
= (tree
) cond_e
;
8690 /* Build new conditional expr */
8691 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
8692 NULL_TREE
, NULL_TREE
);
8694 /* Add new cond in cond_bb. */
8695 gsi
= gsi_last_bb (cond_bb
);
8696 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
8698 /* Adjust edges appropriately to connect new head with first head
8699 as well as second head. */
8700 e0
= single_succ_edge (cond_bb
);
8701 e0
->flags
&= ~EDGE_FALLTHRU
;
8702 e0
->flags
|= EDGE_FALSE_VALUE
;
8706 /* Do book-keeping of basic block BB for the profile consistency checker.
8707 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8708 then do post-pass accounting. Store the counting in RECORD. */
8710 gimple_account_profile_record (basic_block bb
, int after_pass
,
8711 struct profile_record
*record
)
8713 gimple_stmt_iterator i
;
8714 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
8716 record
->size
[after_pass
]
8717 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
8718 if (bb
->count
.initialized_p ())
8719 record
->time
[after_pass
]
8720 += estimate_num_insns (gsi_stmt (i
),
8721 &eni_time_weights
) * bb
->count
.to_gcov_type ();
8722 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
8723 record
->time
[after_pass
]
8724 += estimate_num_insns (gsi_stmt (i
),
8725 &eni_time_weights
) * bb
->count
.to_frequency (cfun
);
8729 struct cfg_hooks gimple_cfg_hooks
= {
8731 gimple_verify_flow_info
,
8732 gimple_dump_bb
, /* dump_bb */
8733 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
8734 create_bb
, /* create_basic_block */
8735 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
8736 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
8737 gimple_can_remove_branch_p
, /* can_remove_branch_p */
8738 remove_bb
, /* delete_basic_block */
8739 gimple_split_block
, /* split_block */
8740 gimple_move_block_after
, /* move_block_after */
8741 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
8742 gimple_merge_blocks
, /* merge_blocks */
8743 gimple_predict_edge
, /* predict_edge */
8744 gimple_predicted_by_p
, /* predicted_by_p */
8745 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
8746 gimple_duplicate_bb
, /* duplicate_block */
8747 gimple_split_edge
, /* split_edge */
8748 gimple_make_forwarder_block
, /* make_forward_block */
8749 NULL
, /* tidy_fallthru_edge */
8750 NULL
, /* force_nonfallthru */
8751 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
8752 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
8753 gimple_flow_call_edges_add
, /* flow_call_edges_add */
8754 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
8755 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
8756 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
8757 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
8758 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
8759 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
8760 flush_pending_stmts
, /* flush_pending_stmts */
8761 gimple_empty_block_p
, /* block_empty_p */
8762 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
8763 gimple_account_profile_record
,
8767 /* Split all critical edges. */
8770 split_critical_edges (void)
8776 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8777 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8778 mappings around the calls to split_edge. */
8779 start_recording_case_labels ();
8780 FOR_ALL_BB_FN (bb
, cfun
)
8782 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8784 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
8786 /* PRE inserts statements to edges and expects that
8787 since split_critical_edges was done beforehand, committing edge
8788 insertions will not split more edges. In addition to critical
8789 edges we must split edges that have multiple successors and
8790 end by control flow statements, such as RESX.
8791 Go ahead and split them too. This matches the logic in
8792 gimple_find_edge_insert_loc. */
8793 else if ((!single_pred_p (e
->dest
)
8794 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
8795 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8796 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
8797 && !(e
->flags
& EDGE_ABNORMAL
))
8799 gimple_stmt_iterator gsi
;
8801 gsi
= gsi_last_bb (e
->src
);
8802 if (!gsi_end_p (gsi
)
8803 && stmt_ends_bb_p (gsi_stmt (gsi
))
8804 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
8805 && !gimple_call_builtin_p (gsi_stmt (gsi
),
8811 end_recording_case_labels ();
8817 const pass_data pass_data_split_crit_edges
=
8819 GIMPLE_PASS
, /* type */
8820 "crited", /* name */
8821 OPTGROUP_NONE
, /* optinfo_flags */
8822 TV_TREE_SPLIT_EDGES
, /* tv_id */
8823 PROP_cfg
, /* properties_required */
8824 PROP_no_crit_edges
, /* properties_provided */
8825 0, /* properties_destroyed */
8826 0, /* todo_flags_start */
8827 0, /* todo_flags_finish */
8830 class pass_split_crit_edges
: public gimple_opt_pass
8833 pass_split_crit_edges (gcc::context
*ctxt
)
8834 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
8837 /* opt_pass methods: */
8838 virtual unsigned int execute (function
*) { return split_critical_edges (); }
8840 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
8841 }; // class pass_split_crit_edges
8846 make_pass_split_crit_edges (gcc::context
*ctxt
)
8848 return new pass_split_crit_edges (ctxt
);
8852 /* Insert COND expression which is GIMPLE_COND after STMT
8853 in basic block BB with appropriate basic block split
8854 and creation of a new conditionally executed basic block.
8855 Update profile so the new bb is visited with probability PROB.
8856 Return created basic block. */
8858 insert_cond_bb (basic_block bb
, gimple
*stmt
, gimple
*cond
,
8859 profile_probability prob
)
8861 edge fall
= split_block (bb
, stmt
);
8862 gimple_stmt_iterator iter
= gsi_last_bb (bb
);
8865 /* Insert cond statement. */
8866 gcc_assert (gimple_code (cond
) == GIMPLE_COND
);
8867 if (gsi_end_p (iter
))
8868 gsi_insert_before (&iter
, cond
, GSI_CONTINUE_LINKING
);
8870 gsi_insert_after (&iter
, cond
, GSI_CONTINUE_LINKING
);
8872 /* Create conditionally executed block. */
8873 new_bb
= create_empty_bb (bb
);
8874 edge e
= make_edge (bb
, new_bb
, EDGE_TRUE_VALUE
);
8875 e
->probability
= prob
;
8876 new_bb
->count
= e
->count ();
8877 make_single_succ_edge (new_bb
, fall
->dest
, EDGE_FALLTHRU
);
8879 /* Fix edge for split bb. */
8880 fall
->flags
= EDGE_FALSE_VALUE
;
8881 fall
->probability
-= e
->probability
;
8883 /* Update dominance info. */
8884 if (dom_info_available_p (CDI_DOMINATORS
))
8886 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, bb
);
8887 set_immediate_dominator (CDI_DOMINATORS
, fall
->dest
, bb
);
8890 /* Update loop info. */
8892 add_bb_to_loop (new_bb
, bb
->loop_father
);
8897 /* Build a ternary operation and gimplify it. Emit code before GSI.
8898 Return the gimple_val holding the result. */
8901 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8902 tree type
, tree a
, tree b
, tree c
)
8905 location_t loc
= gimple_location (gsi_stmt (*gsi
));
8907 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
8910 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8914 /* Build a binary operation and gimplify it. Emit code before GSI.
8915 Return the gimple_val holding the result. */
8918 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8919 tree type
, tree a
, tree b
)
8923 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
8926 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8930 /* Build a unary operation and gimplify it. Emit code before GSI.
8931 Return the gimple_val holding the result. */
8934 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
8939 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
8942 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8948 /* Given a basic block B which ends with a conditional and has
8949 precisely two successors, determine which of the edges is taken if
8950 the conditional is true and which is taken if the conditional is
8951 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8954 extract_true_false_edges_from_block (basic_block b
,
8958 edge e
= EDGE_SUCC (b
, 0);
8960 if (e
->flags
& EDGE_TRUE_VALUE
)
8963 *false_edge
= EDGE_SUCC (b
, 1);
8968 *true_edge
= EDGE_SUCC (b
, 1);
8973 /* From a controlling predicate in the immediate dominator DOM of
8974 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8975 predicate evaluates to true and false and store them to
8976 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8977 they are non-NULL. Returns true if the edges can be determined,
8978 else return false. */
8981 extract_true_false_controlled_edges (basic_block dom
, basic_block phiblock
,
8982 edge
*true_controlled_edge
,
8983 edge
*false_controlled_edge
)
8985 basic_block bb
= phiblock
;
8986 edge true_edge
, false_edge
, tem
;
8987 edge e0
= NULL
, e1
= NULL
;
8989 /* We have to verify that one edge into the PHI node is dominated
8990 by the true edge of the predicate block and the other edge
8991 dominated by the false edge. This ensures that the PHI argument
8992 we are going to take is completely determined by the path we
8993 take from the predicate block.
8994 We can only use BB dominance checks below if the destination of
8995 the true/false edges are dominated by their edge, thus only
8996 have a single predecessor. */
8997 extract_true_false_edges_from_block (dom
, &true_edge
, &false_edge
);
8998 tem
= EDGE_PRED (bb
, 0);
8999 if (tem
== true_edge
9000 || (single_pred_p (true_edge
->dest
)
9001 && (tem
->src
== true_edge
->dest
9002 || dominated_by_p (CDI_DOMINATORS
,
9003 tem
->src
, true_edge
->dest
))))
9005 else if (tem
== false_edge
9006 || (single_pred_p (false_edge
->dest
)
9007 && (tem
->src
== false_edge
->dest
9008 || dominated_by_p (CDI_DOMINATORS
,
9009 tem
->src
, false_edge
->dest
))))
9013 tem
= EDGE_PRED (bb
, 1);
9014 if (tem
== true_edge
9015 || (single_pred_p (true_edge
->dest
)
9016 && (tem
->src
== true_edge
->dest
9017 || dominated_by_p (CDI_DOMINATORS
,
9018 tem
->src
, true_edge
->dest
))))
9020 else if (tem
== false_edge
9021 || (single_pred_p (false_edge
->dest
)
9022 && (tem
->src
== false_edge
->dest
9023 || dominated_by_p (CDI_DOMINATORS
,
9024 tem
->src
, false_edge
->dest
))))
9031 if (true_controlled_edge
)
9032 *true_controlled_edge
= e0
;
9033 if (false_controlled_edge
)
9034 *false_controlled_edge
= e1
;
9039 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9040 range [low, high]. Place associated stmts before *GSI. */
9043 generate_range_test (basic_block bb
, tree index
, tree low
, tree high
,
9044 tree
*lhs
, tree
*rhs
)
9046 tree type
= TREE_TYPE (index
);
9047 tree utype
= unsigned_type_for (type
);
9049 low
= fold_convert (type
, low
);
9050 high
= fold_convert (type
, high
);
9052 tree tmp
= make_ssa_name (type
);
9054 = gimple_build_assign (tmp
, MINUS_EXPR
, index
, low
);
9056 *lhs
= make_ssa_name (utype
);
9057 gassign
*a
= gimple_build_assign (*lhs
, NOP_EXPR
, tmp
);
9059 *rhs
= fold_build2 (MINUS_EXPR
, utype
, high
, low
);
9060 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
9061 gsi_insert_before (&gsi
, sub1
, GSI_SAME_STMT
);
9062 gsi_insert_before (&gsi
, a
, GSI_SAME_STMT
);
9065 /* Emit return warnings. */
9069 const pass_data pass_data_warn_function_return
=
9071 GIMPLE_PASS
, /* type */
9072 "*warn_function_return", /* name */
9073 OPTGROUP_NONE
, /* optinfo_flags */
9074 TV_NONE
, /* tv_id */
9075 PROP_cfg
, /* properties_required */
9076 0, /* properties_provided */
9077 0, /* properties_destroyed */
9078 0, /* todo_flags_start */
9079 0, /* todo_flags_finish */
9082 class pass_warn_function_return
: public gimple_opt_pass
9085 pass_warn_function_return (gcc::context
*ctxt
)
9086 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
9089 /* opt_pass methods: */
9090 virtual unsigned int execute (function
*);
9092 }; // class pass_warn_function_return
9095 pass_warn_function_return::execute (function
*fun
)
9097 source_location location
;
9102 if (!targetm
.warn_func_return (fun
->decl
))
9105 /* If we have a path to EXIT, then we do return. */
9106 if (TREE_THIS_VOLATILE (fun
->decl
)
9107 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0)
9109 location
= UNKNOWN_LOCATION
;
9110 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
);
9111 (e
= ei_safe_edge (ei
)); )
9113 last
= last_stmt (e
->src
);
9114 if ((gimple_code (last
) == GIMPLE_RETURN
9115 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
9116 && location
== UNKNOWN_LOCATION
9117 && ((location
= LOCATION_LOCUS (gimple_location (last
)))
9118 != UNKNOWN_LOCATION
)
9121 /* When optimizing, replace return stmts in noreturn functions
9122 with __builtin_unreachable () call. */
9123 if (optimize
&& gimple_code (last
) == GIMPLE_RETURN
)
9125 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
9126 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
9127 gimple_set_location (new_stmt
, gimple_location (last
));
9128 gimple_stmt_iterator gsi
= gsi_for_stmt (last
);
9129 gsi_replace (&gsi
, new_stmt
, true);
9135 if (location
== UNKNOWN_LOCATION
)
9136 location
= cfun
->function_end_locus
;
9137 warning_at (location
, 0, "%<noreturn%> function does return");
9140 /* If we see "return;" in some basic block, then we do reach the end
9141 without returning a value. */
9142 else if (warn_return_type
> 0
9143 && !TREE_NO_WARNING (fun
->decl
)
9144 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun
->decl
))))
9146 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
9148 gimple
*last
= last_stmt (e
->src
);
9149 greturn
*return_stmt
= dyn_cast
<greturn
*> (last
);
9151 && gimple_return_retval (return_stmt
) == NULL
9152 && !gimple_no_warning_p (last
))
9154 location
= gimple_location (last
);
9155 if (LOCATION_LOCUS (location
) == UNKNOWN_LOCATION
)
9156 location
= fun
->function_end_locus
;
9157 warning_at (location
, OPT_Wreturn_type
,
9158 "control reaches end of non-void function");
9159 TREE_NO_WARNING (fun
->decl
) = 1;
9163 /* The C++ FE turns fallthrough from the end of non-void function
9164 into __builtin_unreachable () call with BUILTINS_LOCATION.
9165 Recognize those too. */
9167 if (!TREE_NO_WARNING (fun
->decl
))
9168 FOR_EACH_BB_FN (bb
, fun
)
9169 if (EDGE_COUNT (bb
->succs
) == 0)
9171 gimple
*last
= last_stmt (bb
);
9172 const enum built_in_function ubsan_missing_ret
9173 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN
;
9175 && ((LOCATION_LOCUS (gimple_location (last
))
9176 == BUILTINS_LOCATION
9177 && gimple_call_builtin_p (last
, BUILT_IN_UNREACHABLE
))
9178 || gimple_call_builtin_p (last
, ubsan_missing_ret
)))
9180 gimple_stmt_iterator gsi
= gsi_for_stmt (last
);
9181 gsi_prev_nondebug (&gsi
);
9182 gimple
*prev
= gsi_stmt (gsi
);
9184 location
= UNKNOWN_LOCATION
;
9186 location
= gimple_location (prev
);
9187 if (LOCATION_LOCUS (location
) == UNKNOWN_LOCATION
)
9188 location
= fun
->function_end_locus
;
9189 warning_at (location
, OPT_Wreturn_type
,
9190 "control reaches end of non-void function");
9191 TREE_NO_WARNING (fun
->decl
) = 1;
9202 make_pass_warn_function_return (gcc::context
*ctxt
)
9204 return new pass_warn_function_return (ctxt
);
9207 /* Walk a gimplified function and warn for functions whose return value is
9208 ignored and attribute((warn_unused_result)) is set. This is done before
9209 inlining, so we don't have to worry about that. */
9212 do_warn_unused_result (gimple_seq seq
)
9215 gimple_stmt_iterator i
;
9217 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
9219 gimple
*g
= gsi_stmt (i
);
9221 switch (gimple_code (g
))
9224 do_warn_unused_result (gimple_bind_body (as_a
<gbind
*>(g
)));
9227 do_warn_unused_result (gimple_try_eval (g
));
9228 do_warn_unused_result (gimple_try_cleanup (g
));
9231 do_warn_unused_result (gimple_catch_handler (
9232 as_a
<gcatch
*> (g
)));
9234 case GIMPLE_EH_FILTER
:
9235 do_warn_unused_result (gimple_eh_filter_failure (g
));
9239 if (gimple_call_lhs (g
))
9241 if (gimple_call_internal_p (g
))
9244 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9245 LHS. All calls whose value is ignored should be
9246 represented like this. Look for the attribute. */
9247 fdecl
= gimple_call_fndecl (g
);
9248 ftype
= gimple_call_fntype (g
);
9250 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
9252 location_t loc
= gimple_location (g
);
9255 warning_at (loc
, OPT_Wunused_result
,
9256 "ignoring return value of %qD, "
9257 "declared with attribute warn_unused_result",
9260 warning_at (loc
, OPT_Wunused_result
,
9261 "ignoring return value of function "
9262 "declared with attribute warn_unused_result");
9267 /* Not a container, not a call, or a call whose value is used. */
9275 const pass_data pass_data_warn_unused_result
=
9277 GIMPLE_PASS
, /* type */
9278 "*warn_unused_result", /* name */
9279 OPTGROUP_NONE
, /* optinfo_flags */
9280 TV_NONE
, /* tv_id */
9281 PROP_gimple_any
, /* properties_required */
9282 0, /* properties_provided */
9283 0, /* properties_destroyed */
9284 0, /* todo_flags_start */
9285 0, /* todo_flags_finish */
9288 class pass_warn_unused_result
: public gimple_opt_pass
9291 pass_warn_unused_result (gcc::context
*ctxt
)
9292 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
9295 /* opt_pass methods: */
9296 virtual bool gate (function
*) { return flag_warn_unused_result
; }
9297 virtual unsigned int execute (function
*)
9299 do_warn_unused_result (gimple_body (current_function_decl
));
9303 }; // class pass_warn_unused_result
9308 make_pass_warn_unused_result (gcc::context
*ctxt
)
9310 return new pass_warn_unused_result (ctxt
);
9313 /* IPA passes, compilation of earlier functions or inlining
9314 might have changed some properties, such as marked functions nothrow,
9315 pure, const or noreturn.
9316 Remove redundant edges and basic blocks, and create new ones if necessary.
9318 This pass can't be executed as stand alone pass from pass manager, because
9319 in between inlining and this fixup the verify_flow_info would fail. */
9322 execute_fixup_cfg (void)
9325 gimple_stmt_iterator gsi
;
9327 cgraph_node
*node
= cgraph_node::get (current_function_decl
);
9328 profile_count num
= node
->count
;
9329 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
9330 bool scale
= num
.initialized_p () && !(num
== den
);
9334 profile_count::adjust_for_ipa_scaling (&num
, &den
);
9335 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= node
->count
;
9336 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
9337 = EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
.apply_scale (num
, den
);
9340 FOR_EACH_BB_FN (bb
, cfun
)
9343 bb
->count
= bb
->count
.apply_scale (num
, den
);
9344 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
9346 gimple
*stmt
= gsi_stmt (gsi
);
9347 tree decl
= is_gimple_call (stmt
)
9348 ? gimple_call_fndecl (stmt
)
9352 int flags
= gimple_call_flags (stmt
);
9353 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
9355 if (gimple_purge_dead_abnormal_call_edges (bb
))
9356 todo
|= TODO_cleanup_cfg
;
9358 if (gimple_in_ssa_p (cfun
))
9360 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9365 if (flags
& ECF_NORETURN
9366 && fixup_noreturn_call (stmt
))
9367 todo
|= TODO_cleanup_cfg
;
9370 /* Remove stores to variables we marked write-only.
9371 Keep access when store has side effect, i.e. in case when source
9373 if (gimple_store_p (stmt
)
9374 && !gimple_has_side_effects (stmt
))
9376 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
9379 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
9380 && varpool_node::get (lhs
)->writeonly
)
9382 unlink_stmt_vdef (stmt
);
9383 gsi_remove (&gsi
, true);
9384 release_defs (stmt
);
9385 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9389 /* For calls we can simply remove LHS when it is known
9390 to be write-only. */
9391 if (is_gimple_call (stmt
)
9392 && gimple_get_lhs (stmt
))
9394 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
9397 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
9398 && varpool_node::get (lhs
)->writeonly
)
9400 gimple_call_set_lhs (stmt
, NULL
);
9402 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9406 if (maybe_clean_eh_stmt (stmt
)
9407 && gimple_purge_dead_eh_edges (bb
))
9408 todo
|= TODO_cleanup_cfg
;
9412 /* If we have a basic block with no successors that does not
9413 end with a control statement or a noreturn call end it with
9414 a call to __builtin_unreachable. This situation can occur
9415 when inlining a noreturn call that does in fact return. */
9416 if (EDGE_COUNT (bb
->succs
) == 0)
9418 gimple
*stmt
= last_stmt (bb
);
9420 || (!is_ctrl_stmt (stmt
)
9421 && (!is_gimple_call (stmt
)
9422 || !gimple_call_noreturn_p (stmt
))))
9424 if (stmt
&& is_gimple_call (stmt
))
9425 gimple_call_set_ctrl_altering (stmt
, false);
9426 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
9427 stmt
= gimple_build_call (fndecl
, 0);
9428 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
9429 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
9430 if (!cfun
->after_inlining
)
9432 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
9433 node
->create_edge (cgraph_node::get_create (fndecl
),
9434 call_stmt
, bb
->count
);
9440 compute_function_frequency ();
9443 && (todo
& TODO_cleanup_cfg
))
9444 loops_state_set (LOOPS_NEED_FIXUP
);
9451 const pass_data pass_data_fixup_cfg
=
9453 GIMPLE_PASS
, /* type */
9454 "fixup_cfg", /* name */
9455 OPTGROUP_NONE
, /* optinfo_flags */
9456 TV_NONE
, /* tv_id */
9457 PROP_cfg
, /* properties_required */
9458 0, /* properties_provided */
9459 0, /* properties_destroyed */
9460 0, /* todo_flags_start */
9461 0, /* todo_flags_finish */
9464 class pass_fixup_cfg
: public gimple_opt_pass
9467 pass_fixup_cfg (gcc::context
*ctxt
)
9468 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
9471 /* opt_pass methods: */
9472 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
9473 virtual unsigned int execute (function
*) { return execute_fixup_cfg (); }
9475 }; // class pass_fixup_cfg
9480 make_pass_fixup_cfg (gcc::context
*ctxt
)
9482 return new pass_fixup_cfg (ctxt
);
9485 /* Garbage collection support for edge_def. */
9487 extern void gt_ggc_mx (tree
&);
9488 extern void gt_ggc_mx (gimple
*&);
9489 extern void gt_ggc_mx (rtx
&);
9490 extern void gt_ggc_mx (basic_block
&);
9493 gt_ggc_mx (rtx_insn
*& x
)
9496 gt_ggc_mx_rtx_def ((void *) x
);
9500 gt_ggc_mx (edge_def
*e
)
9502 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9504 gt_ggc_mx (e
->dest
);
9505 if (current_ir_type () == IR_GIMPLE
)
9506 gt_ggc_mx (e
->insns
.g
);
9508 gt_ggc_mx (e
->insns
.r
);
9512 /* PCH support for edge_def. */
9514 extern void gt_pch_nx (tree
&);
9515 extern void gt_pch_nx (gimple
*&);
9516 extern void gt_pch_nx (rtx
&);
9517 extern void gt_pch_nx (basic_block
&);
9520 gt_pch_nx (rtx_insn
*& x
)
9523 gt_pch_nx_rtx_def ((void *) x
);
9527 gt_pch_nx (edge_def
*e
)
9529 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9531 gt_pch_nx (e
->dest
);
9532 if (current_ir_type () == IR_GIMPLE
)
9533 gt_pch_nx (e
->insns
.g
);
9535 gt_pch_nx (e
->insns
.r
);
9540 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
9542 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9543 op (&(e
->src
), cookie
);
9544 op (&(e
->dest
), cookie
);
9545 if (current_ir_type () == IR_GIMPLE
)
9546 op (&(e
->insns
.g
), cookie
);
9548 op (&(e
->insns
.r
), cookie
);
9549 op (&(block
), cookie
);
9554 namespace selftest
{
9556 /* Helper function for CFG selftests: create a dummy function decl
9557 and push it as cfun. */
9560 push_fndecl (const char *name
)
9562 tree fn_type
= build_function_type_array (integer_type_node
, 0, NULL
);
9563 /* FIXME: this uses input_location: */
9564 tree fndecl
= build_fn_decl (name
, fn_type
);
9565 tree retval
= build_decl (UNKNOWN_LOCATION
, RESULT_DECL
,
9566 NULL_TREE
, integer_type_node
);
9567 DECL_RESULT (fndecl
) = retval
;
9568 push_struct_function (fndecl
);
9569 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9570 ASSERT_TRUE (fun
!= NULL
);
9571 init_empty_tree_cfg_for_function (fun
);
9572 ASSERT_EQ (2, n_basic_blocks_for_fn (fun
));
9573 ASSERT_EQ (0, n_edges_for_fn (fun
));
9577 /* These tests directly create CFGs.
9578 Compare with the static fns within tree-cfg.c:
9580 - make_blocks: calls create_basic_block (seq, bb);
9583 /* Verify a simple cfg of the form:
9584 ENTRY -> A -> B -> C -> EXIT. */
9587 test_linear_chain ()
9589 gimple_register_cfg_hooks ();
9591 tree fndecl
= push_fndecl ("cfg_test_linear_chain");
9592 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9594 /* Create some empty blocks. */
9595 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
9596 basic_block bb_b
= create_empty_bb (bb_a
);
9597 basic_block bb_c
= create_empty_bb (bb_b
);
9599 ASSERT_EQ (5, n_basic_blocks_for_fn (fun
));
9600 ASSERT_EQ (0, n_edges_for_fn (fun
));
9602 /* Create some edges: a simple linear chain of BBs. */
9603 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
9604 make_edge (bb_a
, bb_b
, 0);
9605 make_edge (bb_b
, bb_c
, 0);
9606 make_edge (bb_c
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9608 /* Verify the edges. */
9609 ASSERT_EQ (4, n_edges_for_fn (fun
));
9610 ASSERT_EQ (NULL
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->preds
);
9611 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
->length ());
9612 ASSERT_EQ (1, bb_a
->preds
->length ());
9613 ASSERT_EQ (1, bb_a
->succs
->length ());
9614 ASSERT_EQ (1, bb_b
->preds
->length ());
9615 ASSERT_EQ (1, bb_b
->succs
->length ());
9616 ASSERT_EQ (1, bb_c
->preds
->length ());
9617 ASSERT_EQ (1, bb_c
->succs
->length ());
9618 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
->length ());
9619 ASSERT_EQ (NULL
, EXIT_BLOCK_PTR_FOR_FN (fun
)->succs
);
9621 /* Verify the dominance information
9622 Each BB in our simple chain should be dominated by the one before
9624 calculate_dominance_info (CDI_DOMINATORS
);
9625 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
9626 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
9627 vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
9628 ASSERT_EQ (1, dom_by_b
.length ());
9629 ASSERT_EQ (bb_c
, dom_by_b
[0]);
9630 free_dominance_info (CDI_DOMINATORS
);
9631 dom_by_b
.release ();
9633 /* Similarly for post-dominance: each BB in our chain is post-dominated
9634 by the one after it. */
9635 calculate_dominance_info (CDI_POST_DOMINATORS
);
9636 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
9637 ASSERT_EQ (bb_c
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
9638 vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
9639 ASSERT_EQ (1, postdom_by_b
.length ());
9640 ASSERT_EQ (bb_a
, postdom_by_b
[0]);
9641 free_dominance_info (CDI_POST_DOMINATORS
);
9642 postdom_by_b
.release ();
9647 /* Verify a simple CFG of the form:
9663 gimple_register_cfg_hooks ();
9665 tree fndecl
= push_fndecl ("cfg_test_diamond");
9666 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9668 /* Create some empty blocks. */
9669 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
9670 basic_block bb_b
= create_empty_bb (bb_a
);
9671 basic_block bb_c
= create_empty_bb (bb_a
);
9672 basic_block bb_d
= create_empty_bb (bb_b
);
9674 ASSERT_EQ (6, n_basic_blocks_for_fn (fun
));
9675 ASSERT_EQ (0, n_edges_for_fn (fun
));
9677 /* Create the edges. */
9678 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
9679 make_edge (bb_a
, bb_b
, EDGE_TRUE_VALUE
);
9680 make_edge (bb_a
, bb_c
, EDGE_FALSE_VALUE
);
9681 make_edge (bb_b
, bb_d
, 0);
9682 make_edge (bb_c
, bb_d
, 0);
9683 make_edge (bb_d
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9685 /* Verify the edges. */
9686 ASSERT_EQ (6, n_edges_for_fn (fun
));
9687 ASSERT_EQ (1, bb_a
->preds
->length ());
9688 ASSERT_EQ (2, bb_a
->succs
->length ());
9689 ASSERT_EQ (1, bb_b
->preds
->length ());
9690 ASSERT_EQ (1, bb_b
->succs
->length ());
9691 ASSERT_EQ (1, bb_c
->preds
->length ());
9692 ASSERT_EQ (1, bb_c
->succs
->length ());
9693 ASSERT_EQ (2, bb_d
->preds
->length ());
9694 ASSERT_EQ (1, bb_d
->succs
->length ());
9696 /* Verify the dominance information. */
9697 calculate_dominance_info (CDI_DOMINATORS
);
9698 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
9699 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
9700 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_d
));
9701 vec
<basic_block
> dom_by_a
= get_dominated_by (CDI_DOMINATORS
, bb_a
);
9702 ASSERT_EQ (3, dom_by_a
.length ()); /* B, C, D, in some order. */
9703 dom_by_a
.release ();
9704 vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
9705 ASSERT_EQ (0, dom_by_b
.length ());
9706 dom_by_b
.release ();
9707 free_dominance_info (CDI_DOMINATORS
);
9709 /* Similarly for post-dominance. */
9710 calculate_dominance_info (CDI_POST_DOMINATORS
);
9711 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
9712 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
9713 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_c
));
9714 vec
<basic_block
> postdom_by_d
= get_dominated_by (CDI_POST_DOMINATORS
, bb_d
);
9715 ASSERT_EQ (3, postdom_by_d
.length ()); /* A, B, C in some order. */
9716 postdom_by_d
.release ();
9717 vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
9718 ASSERT_EQ (0, postdom_by_b
.length ());
9719 postdom_by_b
.release ();
9720 free_dominance_info (CDI_POST_DOMINATORS
);
9725 /* Verify that we can handle a CFG containing a "complete" aka
9726 fully-connected subgraph (where A B C D below all have edges
9727 pointing to each other node, also to themselves).
9745 test_fully_connected ()
9747 gimple_register_cfg_hooks ();
9749 tree fndecl
= push_fndecl ("cfg_fully_connected");
9750 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9754 /* Create some empty blocks. */
9755 auto_vec
<basic_block
> subgraph_nodes
;
9756 for (int i
= 0; i
< n
; i
++)
9757 subgraph_nodes
.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
)));
9759 ASSERT_EQ (n
+ 2, n_basic_blocks_for_fn (fun
));
9760 ASSERT_EQ (0, n_edges_for_fn (fun
));
9762 /* Create the edges. */
9763 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), subgraph_nodes
[0], EDGE_FALLTHRU
);
9764 make_edge (subgraph_nodes
[0], EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9765 for (int i
= 0; i
< n
; i
++)
9766 for (int j
= 0; j
< n
; j
++)
9767 make_edge (subgraph_nodes
[i
], subgraph_nodes
[j
], 0);
9769 /* Verify the edges. */
9770 ASSERT_EQ (2 + (n
* n
), n_edges_for_fn (fun
));
9771 /* The first one is linked to ENTRY/EXIT as well as itself and
9773 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->preds
->length ());
9774 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->succs
->length ());
9775 /* The other ones in the subgraph are linked to everything in
9776 the subgraph (including themselves). */
9777 for (int i
= 1; i
< n
; i
++)
9779 ASSERT_EQ (n
, subgraph_nodes
[i
]->preds
->length ());
9780 ASSERT_EQ (n
, subgraph_nodes
[i
]->succs
->length ());
9783 /* Verify the dominance information. */
9784 calculate_dominance_info (CDI_DOMINATORS
);
9785 /* The initial block in the subgraph should be dominated by ENTRY. */
9786 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun
),
9787 get_immediate_dominator (CDI_DOMINATORS
,
9788 subgraph_nodes
[0]));
9789 /* Every other block in the subgraph should be dominated by the
9791 for (int i
= 1; i
< n
; i
++)
9792 ASSERT_EQ (subgraph_nodes
[0],
9793 get_immediate_dominator (CDI_DOMINATORS
,
9794 subgraph_nodes
[i
]));
9795 free_dominance_info (CDI_DOMINATORS
);
9797 /* Similarly for post-dominance. */
9798 calculate_dominance_info (CDI_POST_DOMINATORS
);
9799 /* The initial block in the subgraph should be postdominated by EXIT. */
9800 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun
),
9801 get_immediate_dominator (CDI_POST_DOMINATORS
,
9802 subgraph_nodes
[0]));
9803 /* Every other block in the subgraph should be postdominated by the
9804 initial block, since that leads to EXIT. */
9805 for (int i
= 1; i
< n
; i
++)
9806 ASSERT_EQ (subgraph_nodes
[0],
9807 get_immediate_dominator (CDI_POST_DOMINATORS
,
9808 subgraph_nodes
[i
]));
9809 free_dominance_info (CDI_POST_DOMINATORS
);
9814 /* Run all of the selftests within this file. */
9819 test_linear_chain ();
9821 test_fully_connected ();
9824 } // namespace selftest
9826 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9829 - switch statement (a block with many out-edges)
9830 - something that jumps to itself
9833 #endif /* CHECKING_P */