1 /* Control flow functions for trees.
2 Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
66 /* This file contains functions for building the Control Flow Graph (CFG)
67 for a function tree. */
69 /* Local declarations. */
71 /* Initial capacity for the basic block array. */
72 static const int initial_cfg_capacity
= 20;
74 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
75 which use a particular edge. The CASE_LABEL_EXPRs are chained together
76 via their CASE_CHAIN field, which we clear after we're done with the
77 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
80 update the case vector in response to edge redirections.
82 Right now this table is set up and torn down at key points in the
83 compilation process. It would be nice if we could make the table
84 more persistent. The key is getting notification of changes to
85 the CFG (particularly edge removal, creation and redirection). */
87 static hash_map
<edge
, tree
> *edge_to_cases
;
89 /* If we record edge_to_cases, this bitmap will hold indexes
90 of basic blocks that end in a GIMPLE_SWITCH which we touched
91 due to edge manipulations. */
93 static bitmap touched_switch_bbs
;
98 long num_merged_labels
;
101 static struct cfg_stats_d cfg_stats
;
103 /* Data to pass to replace_block_vars_by_duplicates_1. */
104 struct replace_decls_d
106 hash_map
<tree
, tree
> *vars_map
;
110 /* Hash table to store last discriminator assigned for each locus. */
111 struct locus_discrim_map
117 /* Hashtable helpers. */
119 struct locus_discrim_hasher
: free_ptr_hash
<locus_discrim_map
>
121 static inline hashval_t
hash (const locus_discrim_map
*);
122 static inline bool equal (const locus_discrim_map
*,
123 const locus_discrim_map
*);
126 /* Trivial hash function for a location_t. ITEM is a pointer to
127 a hash table entry that maps a location_t to a discriminator. */
130 locus_discrim_hasher::hash (const locus_discrim_map
*item
)
132 return LOCATION_LINE (item
->locus
);
135 /* Equality function for the locus-to-discriminator map. A and B
136 point to the two hash table entries to compare. */
139 locus_discrim_hasher::equal (const locus_discrim_map
*a
,
140 const locus_discrim_map
*b
)
142 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
145 static hash_table
<locus_discrim_hasher
> *discriminator_per_locus
;
147 /* Basic blocks and flowgraphs. */
148 static void make_blocks (gimple_seq
);
151 static void make_edges (void);
152 static void assign_discriminators (void);
153 static void make_cond_expr_edges (basic_block
);
154 static void make_gimple_switch_edges (gswitch
*, basic_block
);
155 static bool make_goto_expr_edges (basic_block
);
156 static void make_gimple_asm_edges (basic_block
);
157 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
158 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
160 /* Various helpers. */
161 static inline bool stmt_starts_bb_p (gimple
*, gimple
*);
162 static int gimple_verify_flow_info (void);
163 static void gimple_make_forwarder_block (edge
);
164 static gimple
*first_non_label_stmt (basic_block
);
165 static bool verify_gimple_transaction (gtransaction
*);
166 static bool call_can_make_abnormal_goto (gimple
*);
168 /* Flowgraph optimization and cleanup. */
169 static void gimple_merge_blocks (basic_block
, basic_block
);
170 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
171 static void remove_bb (basic_block
);
172 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
173 static edge
find_taken_edge_cond_expr (const gcond
*, tree
);
174 static edge
find_taken_edge_switch_expr (const gswitch
*, tree
);
175 static tree
find_case_label_for_value (const gswitch
*, tree
);
176 static void lower_phi_internal_fn ();
179 init_empty_tree_cfg_for_function (struct function
*fn
)
181 /* Initialize the basic block array. */
183 profile_status_for_fn (fn
) = PROFILE_ABSENT
;
184 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
185 last_basic_block_for_fn (fn
) = NUM_FIXED_BLOCKS
;
186 vec_alloc (basic_block_info_for_fn (fn
), initial_cfg_capacity
);
187 vec_safe_grow_cleared (basic_block_info_for_fn (fn
),
188 initial_cfg_capacity
);
190 /* Build a mapping of labels to their associated blocks. */
191 vec_alloc (label_to_block_map_for_fn (fn
), initial_cfg_capacity
);
192 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
),
193 initial_cfg_capacity
);
195 SET_BASIC_BLOCK_FOR_FN (fn
, ENTRY_BLOCK
, ENTRY_BLOCK_PTR_FOR_FN (fn
));
196 SET_BASIC_BLOCK_FOR_FN (fn
, EXIT_BLOCK
, EXIT_BLOCK_PTR_FOR_FN (fn
));
198 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
199 = EXIT_BLOCK_PTR_FOR_FN (fn
);
200 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
201 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
205 init_empty_tree_cfg (void)
207 init_empty_tree_cfg_for_function (cfun
);
210 /*---------------------------------------------------------------------------
212 ---------------------------------------------------------------------------*/
214 /* Entry point to the CFG builder for trees. SEQ is the sequence of
215 statements to be added to the flowgraph. */
218 build_gimple_cfg (gimple_seq seq
)
220 /* Register specific gimple functions. */
221 gimple_register_cfg_hooks ();
223 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
225 init_empty_tree_cfg ();
229 /* Make sure there is always at least one block, even if it's empty. */
230 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
231 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
233 /* Adjust the size of the array. */
234 if (basic_block_info_for_fn (cfun
)->length ()
235 < (size_t) n_basic_blocks_for_fn (cfun
))
236 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
237 n_basic_blocks_for_fn (cfun
));
239 /* To speed up statement iterator walks, we first purge dead labels. */
240 cleanup_dead_labels ();
242 /* Group case nodes to reduce the number of edges.
243 We do this after cleaning up dead labels because otherwise we miss
244 a lot of obvious case merging opportunities. */
245 group_case_labels ();
247 /* Create the edges of the flowgraph. */
248 discriminator_per_locus
= new hash_table
<locus_discrim_hasher
> (13);
250 assign_discriminators ();
251 lower_phi_internal_fn ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus
;
254 discriminator_per_locus
= NULL
;
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
262 replace_loop_annotate_in_block (basic_block bb
, struct loop
*loop
)
264 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
265 gimple
*stmt
= gsi_stmt (gsi
);
267 if (!(stmt
&& gimple_code (stmt
) == GIMPLE_COND
))
270 for (gsi_prev_nondebug (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
272 stmt
= gsi_stmt (gsi
);
273 if (gimple_code (stmt
) != GIMPLE_CALL
)
275 if (!gimple_call_internal_p (stmt
)
276 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
279 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
281 case annot_expr_ivdep_kind
:
282 loop
->safelen
= INT_MAX
;
284 case annot_expr_unroll_kind
:
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt
, 2));
287 cfun
->has_unroll
= true;
289 case annot_expr_no_vector_kind
:
290 loop
->dont_vectorize
= true;
292 case annot_expr_vector_kind
:
293 loop
->force_vectorize
= true;
294 cfun
->has_force_vectorize_loops
= true;
296 case annot_expr_parallel_kind
:
297 loop
->can_be_parallel
= true;
298 loop
->safelen
= INT_MAX
;
304 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
305 gimple_call_arg (stmt
, 0));
306 gsi_replace (&gsi
, stmt
, true);
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
315 replace_loop_annotate (void)
319 gimple_stmt_iterator gsi
;
322 FOR_EACH_LOOP (loop
, 0)
324 /* First look into the header. */
325 replace_loop_annotate_in_block (loop
->header
, loop
);
327 /* Then look into the latch, if any. */
329 replace_loop_annotate_in_block (loop
->latch
, loop
);
332 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
333 FOR_EACH_BB_FN (bb
, cfun
)
335 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
337 stmt
= gsi_stmt (gsi
);
338 if (gimple_code (stmt
) != GIMPLE_CALL
)
340 if (!gimple_call_internal_p (stmt
)
341 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
344 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
346 case annot_expr_ivdep_kind
:
347 case annot_expr_unroll_kind
:
348 case annot_expr_no_vector_kind
:
349 case annot_expr_vector_kind
:
355 warning_at (gimple_location (stmt
), 0, "ignoring loop annotation");
356 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
357 gimple_call_arg (stmt
, 0));
358 gsi_replace (&gsi
, stmt
, true);
363 /* Lower internal PHI function from GIMPLE FE. */
366 lower_phi_internal_fn ()
368 basic_block bb
, pred
= NULL
;
369 gimple_stmt_iterator gsi
;
374 /* After edge creation, handle __PHI function from GIMPLE FE. */
375 FOR_EACH_BB_FN (bb
, cfun
)
377 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
);)
379 stmt
= gsi_stmt (gsi
);
380 if (! gimple_call_internal_p (stmt
, IFN_PHI
))
383 lhs
= gimple_call_lhs (stmt
);
384 phi_node
= create_phi_node (lhs
, bb
);
386 /* Add arguments to the PHI node. */
387 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
389 tree arg
= gimple_call_arg (stmt
, i
);
390 if (TREE_CODE (arg
) == LABEL_DECL
)
391 pred
= label_to_block (arg
);
394 edge e
= find_edge (pred
, bb
);
395 add_phi_arg (phi_node
, arg
, e
, UNKNOWN_LOCATION
);
399 gsi_remove (&gsi
, true);
405 execute_build_cfg (void)
407 gimple_seq body
= gimple_body (current_function_decl
);
409 build_gimple_cfg (body
);
410 gimple_set_body (current_function_decl
, NULL
);
411 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
413 fprintf (dump_file
, "Scope blocks:\n");
414 dump_scope_blocks (dump_file
, dump_flags
);
417 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
418 replace_loop_annotate ();
424 const pass_data pass_data_build_cfg
=
426 GIMPLE_PASS
, /* type */
428 OPTGROUP_NONE
, /* optinfo_flags */
429 TV_TREE_CFG
, /* tv_id */
430 PROP_gimple_leh
, /* properties_required */
431 ( PROP_cfg
| PROP_loops
), /* properties_provided */
432 0, /* properties_destroyed */
433 0, /* todo_flags_start */
434 0, /* todo_flags_finish */
437 class pass_build_cfg
: public gimple_opt_pass
440 pass_build_cfg (gcc::context
*ctxt
)
441 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
444 /* opt_pass methods: */
445 virtual unsigned int execute (function
*) { return execute_build_cfg (); }
447 }; // class pass_build_cfg
452 make_pass_build_cfg (gcc::context
*ctxt
)
454 return new pass_build_cfg (ctxt
);
458 /* Return true if T is a computed goto. */
461 computed_goto_p (gimple
*t
)
463 return (gimple_code (t
) == GIMPLE_GOTO
464 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
467 /* Returns true if the sequence of statements STMTS only contains
468 a call to __builtin_unreachable (). */
471 gimple_seq_unreachable_p (gimple_seq stmts
)
474 /* Return false if -fsanitize=unreachable, we don't want to
475 optimize away those calls, but rather turn them into
476 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
478 || sanitize_flags_p (SANITIZE_UNREACHABLE
))
481 gimple_stmt_iterator gsi
= gsi_last (stmts
);
483 if (!gimple_call_builtin_p (gsi_stmt (gsi
), BUILT_IN_UNREACHABLE
))
486 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
488 gimple
*stmt
= gsi_stmt (gsi
);
489 if (gimple_code (stmt
) != GIMPLE_LABEL
490 && !is_gimple_debug (stmt
)
491 && !gimple_clobber_p (stmt
))
497 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
498 the other edge points to a bb with just __builtin_unreachable ().
499 I.e. return true for C->M edge in:
507 __builtin_unreachable ();
511 assert_unreachable_fallthru_edge_p (edge e
)
513 basic_block pred_bb
= e
->src
;
514 gimple
*last
= last_stmt (pred_bb
);
515 if (last
&& gimple_code (last
) == GIMPLE_COND
)
517 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
518 if (other_bb
== e
->dest
)
519 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
520 if (EDGE_COUNT (other_bb
->succs
) == 0)
521 return gimple_seq_unreachable_p (bb_seq (other_bb
));
527 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
528 could alter control flow except via eh. We initialize the flag at
529 CFG build time and only ever clear it later. */
532 gimple_call_initialize_ctrl_altering (gimple
*stmt
)
534 int flags
= gimple_call_flags (stmt
);
536 /* A call alters control flow if it can make an abnormal goto. */
537 if (call_can_make_abnormal_goto (stmt
)
538 /* A call also alters control flow if it does not return. */
539 || flags
& ECF_NORETURN
540 /* TM ending statements have backedges out of the transaction.
541 Return true so we split the basic block containing them.
542 Note that the TM_BUILTIN test is merely an optimization. */
543 || ((flags
& ECF_TM_BUILTIN
)
544 && is_tm_ending_fndecl (gimple_call_fndecl (stmt
)))
545 /* BUILT_IN_RETURN call is same as return statement. */
546 || gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
)
547 /* IFN_UNIQUE should be the last insn, to make checking for it
548 as cheap as possible. */
549 || (gimple_call_internal_p (stmt
)
550 && gimple_call_internal_unique_p (stmt
)))
551 gimple_call_set_ctrl_altering (stmt
, true);
553 gimple_call_set_ctrl_altering (stmt
, false);
557 /* Insert SEQ after BB and build a flowgraph. */
560 make_blocks_1 (gimple_seq seq
, basic_block bb
)
562 gimple_stmt_iterator i
= gsi_start (seq
);
564 gimple
*prev_stmt
= NULL
;
565 bool start_new_block
= true;
566 bool first_stmt_of_seq
= true;
568 while (!gsi_end_p (i
))
570 /* PREV_STMT should only be set to a debug stmt if the debug
571 stmt is before nondebug stmts. Once stmt reaches a nondebug
572 nonlabel, prev_stmt will be set to it, so that
573 stmt_starts_bb_p will know to start a new block if a label is
574 found. However, if stmt was a label after debug stmts only,
575 keep the label in prev_stmt even if we find further debug
576 stmts, for there may be other labels after them, and they
577 should land in the same block. */
578 if (!prev_stmt
|| !stmt
|| !is_gimple_debug (stmt
))
582 if (stmt
&& is_gimple_call (stmt
))
583 gimple_call_initialize_ctrl_altering (stmt
);
585 /* If the statement starts a new basic block or if we have determined
586 in a previous pass that we need to create a new block for STMT, do
588 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
590 if (!first_stmt_of_seq
)
591 gsi_split_seq_before (&i
, &seq
);
592 bb
= create_basic_block (seq
, bb
);
593 start_new_block
= false;
597 /* Now add STMT to BB and create the subgraphs for special statement
599 gimple_set_bb (stmt
, bb
);
601 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
603 if (stmt_ends_bb_p (stmt
))
605 /* If the stmt can make abnormal goto use a new temporary
606 for the assignment to the LHS. This makes sure the old value
607 of the LHS is available on the abnormal edge. Otherwise
608 we will end up with overlapping life-ranges for abnormal
610 if (gimple_has_lhs (stmt
)
611 && stmt_can_make_abnormal_goto (stmt
)
612 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
614 tree lhs
= gimple_get_lhs (stmt
);
615 tree tmp
= create_tmp_var (TREE_TYPE (lhs
));
616 gimple
*s
= gimple_build_assign (lhs
, tmp
);
617 gimple_set_location (s
, gimple_location (stmt
));
618 gimple_set_block (s
, gimple_block (stmt
));
619 gimple_set_lhs (stmt
, tmp
);
620 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
621 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
622 DECL_GIMPLE_REG_P (tmp
) = 1;
623 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
625 start_new_block
= true;
629 first_stmt_of_seq
= false;
634 /* Build a flowgraph for the sequence of stmts SEQ. */
637 make_blocks (gimple_seq seq
)
639 /* Look for debug markers right before labels, and move the debug
640 stmts after the labels. Accepting labels among debug markers
641 adds no value, just complexity; if we wanted to annotate labels
642 with view numbers (so sequencing among markers would matter) or
643 somesuch, we're probably better off still moving the labels, but
644 adding other debug annotations in their original positions or
645 emitting nonbind or bind markers associated with the labels in
646 the original position of the labels.
648 Moving labels would probably be simpler, but we can't do that:
649 moving labels assigns label ids to them, and doing so because of
650 debug markers makes for -fcompare-debug and possibly even codegen
651 differences. So, we have to move the debug stmts instead. To
652 that end, we scan SEQ backwards, marking the position of the
653 latest (earliest we find) label, and moving debug stmts that are
654 not separated from it by nondebug nonlabel stmts after the
656 if (MAY_HAVE_DEBUG_MARKER_STMTS
)
658 gimple_stmt_iterator label
= gsi_none ();
660 for (gimple_stmt_iterator i
= gsi_last (seq
); !gsi_end_p (i
); gsi_prev (&i
))
662 gimple
*stmt
= gsi_stmt (i
);
664 /* If this is the first label we encounter (latest in SEQ)
665 before nondebug stmts, record its position. */
666 if (is_a
<glabel
*> (stmt
))
668 if (gsi_end_p (label
))
673 /* Without a recorded label position to move debug stmts to,
674 there's nothing to do. */
675 if (gsi_end_p (label
))
678 /* Move the debug stmt at I after LABEL. */
679 if (is_gimple_debug (stmt
))
681 gcc_assert (gimple_debug_nonbind_marker_p (stmt
));
682 /* As STMT is removed, I advances to the stmt after
683 STMT, so the gsi_prev in the for "increment"
684 expression gets us to the stmt we're to visit after
685 STMT. LABEL, however, would advance to the moved
686 stmt if we passed it to gsi_move_after, so pass it a
687 copy instead, so as to keep LABEL pointing to the
689 gimple_stmt_iterator copy
= label
;
690 gsi_move_after (&i
, ©
);
694 /* There aren't any (more?) debug stmts before label, so
695 there isn't anything else to move after it. */
700 make_blocks_1 (seq
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
703 /* Create and return a new empty basic block after bb AFTER. */
706 create_bb (void *h
, void *e
, basic_block after
)
712 /* Create and initialize a new basic block. Since alloc_block uses
713 GC allocation that clears memory to allocate a basic block, we do
714 not have to clear the newly allocated basic block here. */
717 bb
->index
= last_basic_block_for_fn (cfun
);
719 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
721 /* Add the new block to the linked list of blocks. */
722 link_block (bb
, after
);
724 /* Grow the basic block array if needed. */
725 if ((size_t) last_basic_block_for_fn (cfun
)
726 == basic_block_info_for_fn (cfun
)->length ())
729 (last_basic_block_for_fn (cfun
)
730 + (last_basic_block_for_fn (cfun
) + 3) / 4);
731 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
), new_size
);
734 /* Add the newly created block to the array. */
735 SET_BASIC_BLOCK_FOR_FN (cfun
, last_basic_block_for_fn (cfun
), bb
);
737 n_basic_blocks_for_fn (cfun
)++;
738 last_basic_block_for_fn (cfun
)++;
744 /*---------------------------------------------------------------------------
746 ---------------------------------------------------------------------------*/
748 /* If basic block BB has an abnormal edge to a basic block
749 containing IFN_ABNORMAL_DISPATCHER internal call, return
750 that the dispatcher's basic block, otherwise return NULL. */
753 get_abnormal_succ_dispatcher (basic_block bb
)
758 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
759 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)) == EDGE_ABNORMAL
)
761 gimple_stmt_iterator gsi
762 = gsi_start_nondebug_after_labels_bb (e
->dest
);
763 gimple
*g
= gsi_stmt (gsi
);
764 if (g
&& gimple_call_internal_p (g
, IFN_ABNORMAL_DISPATCHER
))
770 /* Helper function for make_edges. Create a basic block with
771 with ABNORMAL_DISPATCHER internal call in it if needed, and
772 create abnormal edges from BBS to it and from it to FOR_BB
773 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
776 handle_abnormal_edges (basic_block
*dispatcher_bbs
,
777 basic_block for_bb
, int *bb_to_omp_idx
,
778 auto_vec
<basic_block
> *bbs
, bool computed_goto
)
780 basic_block
*dispatcher
= dispatcher_bbs
+ (computed_goto
? 1 : 0);
781 unsigned int idx
= 0;
787 dispatcher
= dispatcher_bbs
+ 2 * bb_to_omp_idx
[for_bb
->index
];
788 if (bb_to_omp_idx
[for_bb
->index
] != 0)
792 /* If the dispatcher has been created already, then there are basic
793 blocks with abnormal edges to it, so just make a new edge to
795 if (*dispatcher
== NULL
)
797 /* Check if there are any basic blocks that need to have
798 abnormal edges to this dispatcher. If there are none, return
800 if (bb_to_omp_idx
== NULL
)
802 if (bbs
->is_empty ())
807 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
808 if (bb_to_omp_idx
[bb
->index
] == bb_to_omp_idx
[for_bb
->index
])
814 /* Create the dispatcher bb. */
815 *dispatcher
= create_basic_block (NULL
, for_bb
);
818 /* Factor computed gotos into a common computed goto site. Also
819 record the location of that site so that we can un-factor the
820 gotos after we have converted back to normal form. */
821 gimple_stmt_iterator gsi
= gsi_start_bb (*dispatcher
);
823 /* Create the destination of the factored goto. Each original
824 computed goto will put its desired destination into this
825 variable and jump to the label we create immediately below. */
826 tree var
= create_tmp_var (ptr_type_node
, "gotovar");
828 /* Build a label for the new block which will contain the
829 factored computed goto. */
830 tree factored_label_decl
831 = create_artificial_label (UNKNOWN_LOCATION
);
832 gimple
*factored_computed_goto_label
833 = gimple_build_label (factored_label_decl
);
834 gsi_insert_after (&gsi
, factored_computed_goto_label
, GSI_NEW_STMT
);
836 /* Build our new computed goto. */
837 gimple
*factored_computed_goto
= gimple_build_goto (var
);
838 gsi_insert_after (&gsi
, factored_computed_goto
, GSI_NEW_STMT
);
840 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
843 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
846 gsi
= gsi_last_bb (bb
);
847 gimple
*last
= gsi_stmt (gsi
);
849 gcc_assert (computed_goto_p (last
));
851 /* Copy the original computed goto's destination into VAR. */
853 = gimple_build_assign (var
, gimple_goto_dest (last
));
854 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
856 edge e
= make_edge (bb
, *dispatcher
, EDGE_FALLTHRU
);
857 e
->goto_locus
= gimple_location (last
);
858 gsi_remove (&gsi
, true);
863 tree arg
= inner
? boolean_true_node
: boolean_false_node
;
864 gimple
*g
= gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER
,
866 gimple_stmt_iterator gsi
= gsi_after_labels (*dispatcher
);
867 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
869 /* Create predecessor edges of the dispatcher. */
870 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
873 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
875 make_edge (bb
, *dispatcher
, EDGE_ABNORMAL
);
880 make_edge (*dispatcher
, for_bb
, EDGE_ABNORMAL
);
883 /* Creates outgoing edges for BB. Returns 1 when it ends with an
884 computed goto, returns 2 when it ends with a statement that
885 might return to this function via an nonlocal goto, otherwise
886 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
889 make_edges_bb (basic_block bb
, struct omp_region
**pcur_region
, int *pomp_index
)
891 gimple
*last
= last_stmt (bb
);
892 bool fallthru
= false;
898 switch (gimple_code (last
))
901 if (make_goto_expr_edges (bb
))
907 edge e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
908 e
->goto_locus
= gimple_location (last
);
913 make_cond_expr_edges (bb
);
917 make_gimple_switch_edges (as_a
<gswitch
*> (last
), bb
);
921 make_eh_edges (last
);
924 case GIMPLE_EH_DISPATCH
:
925 fallthru
= make_eh_dispatch_edges (as_a
<geh_dispatch
*> (last
));
929 /* If this function receives a nonlocal goto, then we need to
930 make edges from this call site to all the nonlocal goto
932 if (stmt_can_make_abnormal_goto (last
))
935 /* If this statement has reachable exception handlers, then
936 create abnormal edges to them. */
937 make_eh_edges (last
);
939 /* BUILTIN_RETURN is really a return statement. */
940 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
942 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
945 /* Some calls are known not to return. */
947 fallthru
= !gimple_call_noreturn_p (last
);
951 /* A GIMPLE_ASSIGN may throw internally and thus be considered
953 if (is_ctrl_altering_stmt (last
))
954 make_eh_edges (last
);
959 make_gimple_asm_edges (bb
);
964 fallthru
= omp_make_gimple_edges (bb
, pcur_region
, pomp_index
);
967 case GIMPLE_TRANSACTION
:
969 gtransaction
*txn
= as_a
<gtransaction
*> (last
);
970 tree label1
= gimple_transaction_label_norm (txn
);
971 tree label2
= gimple_transaction_label_uninst (txn
);
974 make_edge (bb
, label_to_block (label1
), EDGE_FALLTHRU
);
976 make_edge (bb
, label_to_block (label2
),
977 EDGE_TM_UNINSTRUMENTED
| (label1
? 0 : EDGE_FALLTHRU
));
979 tree label3
= gimple_transaction_label_over (txn
);
980 if (gimple_transaction_subcode (txn
)
981 & (GTMA_HAVE_ABORT
| GTMA_IS_OUTER
))
982 make_edge (bb
, label_to_block (label3
), EDGE_TM_ABORT
);
989 gcc_assert (!stmt_ends_bb_p (last
));
995 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
1000 /* Join all the blocks in the flowgraph. */
1006 struct omp_region
*cur_region
= NULL
;
1007 auto_vec
<basic_block
> ab_edge_goto
;
1008 auto_vec
<basic_block
> ab_edge_call
;
1009 int *bb_to_omp_idx
= NULL
;
1010 int cur_omp_region_idx
= 0;
1012 /* Create an edge from entry to the first block with executable
1013 statements in it. */
1014 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
1015 BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
),
1018 /* Traverse the basic block array placing edges. */
1019 FOR_EACH_BB_FN (bb
, cfun
)
1024 bb_to_omp_idx
[bb
->index
] = cur_omp_region_idx
;
1026 mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
1028 ab_edge_goto
.safe_push (bb
);
1030 ab_edge_call
.safe_push (bb
);
1032 if (cur_region
&& bb_to_omp_idx
== NULL
)
1033 bb_to_omp_idx
= XCNEWVEC (int, n_basic_blocks_for_fn (cfun
));
1036 /* Computed gotos are hell to deal with, especially if there are
1037 lots of them with a large number of destinations. So we factor
1038 them to a common computed goto location before we build the
1039 edge list. After we convert back to normal form, we will un-factor
1040 the computed gotos since factoring introduces an unwanted jump.
1041 For non-local gotos and abnormal edges from calls to calls that return
1042 twice or forced labels, factor the abnormal edges too, by having all
1043 abnormal edges from the calls go to a common artificial basic block
1044 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1045 basic block to all forced labels and calls returning twice.
1046 We do this per-OpenMP structured block, because those regions
1047 are guaranteed to be single entry single exit by the standard,
1048 so it is not allowed to enter or exit such regions abnormally this way,
1049 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1050 must not transfer control across SESE region boundaries. */
1051 if (!ab_edge_goto
.is_empty () || !ab_edge_call
.is_empty ())
1053 gimple_stmt_iterator gsi
;
1054 basic_block dispatcher_bb_array
[2] = { NULL
, NULL
};
1055 basic_block
*dispatcher_bbs
= dispatcher_bb_array
;
1056 int count
= n_basic_blocks_for_fn (cfun
);
1059 dispatcher_bbs
= XCNEWVEC (basic_block
, 2 * count
);
1061 FOR_EACH_BB_FN (bb
, cfun
)
1063 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1065 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1071 target
= gimple_label_label (label_stmt
);
1073 /* Make an edge to every label block that has been marked as a
1074 potential target for a computed goto or a non-local goto. */
1075 if (FORCED_LABEL (target
))
1076 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1077 &ab_edge_goto
, true);
1078 if (DECL_NONLOCAL (target
))
1080 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1081 &ab_edge_call
, false);
1086 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
1087 gsi_next_nondebug (&gsi
);
1088 if (!gsi_end_p (gsi
))
1090 /* Make an edge to every setjmp-like call. */
1091 gimple
*call_stmt
= gsi_stmt (gsi
);
1092 if (is_gimple_call (call_stmt
)
1093 && ((gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
)
1094 || gimple_call_builtin_p (call_stmt
,
1095 BUILT_IN_SETJMP_RECEIVER
)))
1096 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1097 &ab_edge_call
, false);
1102 XDELETE (dispatcher_bbs
);
1105 XDELETE (bb_to_omp_idx
);
1107 omp_free_regions ();
1110 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1111 needed. Returns true if new bbs were created.
1112 Note: This is transitional code, and should not be used for new code. We
1113 should be able to get rid of this by rewriting all target va-arg
1114 gimplification hooks to use an interface gimple_build_cond_value as described
1115 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1118 gimple_find_sub_bbs (gimple_seq seq
, gimple_stmt_iterator
*gsi
)
1120 gimple
*stmt
= gsi_stmt (*gsi
);
1121 basic_block bb
= gimple_bb (stmt
);
1122 basic_block lastbb
, afterbb
;
1123 int old_num_bbs
= n_basic_blocks_for_fn (cfun
);
1125 lastbb
= make_blocks_1 (seq
, bb
);
1126 if (old_num_bbs
== n_basic_blocks_for_fn (cfun
))
1128 e
= split_block (bb
, stmt
);
1129 /* Move e->dest to come after the new basic blocks. */
1131 unlink_block (afterbb
);
1132 link_block (afterbb
, lastbb
);
1133 redirect_edge_succ (e
, bb
->next_bb
);
1135 while (bb
!= afterbb
)
1137 struct omp_region
*cur_region
= NULL
;
1138 profile_count cnt
= profile_count::zero ();
1141 int cur_omp_region_idx
= 0;
1142 int mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
1143 gcc_assert (!mer
&& !cur_region
);
1144 add_bb_to_loop (bb
, afterbb
->loop_father
);
1148 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1150 if (e
->count ().initialized_p ())
1155 tree_guess_outgoing_edge_probabilities (bb
);
1156 if (all
|| profile_status_for_fn (cfun
) == PROFILE_READ
)
1164 /* Find the next available discriminator value for LOCUS. The
1165 discriminator distinguishes among several basic blocks that
1166 share a common locus, allowing for more accurate sample-based
1170 next_discriminator_for_locus (location_t locus
)
1172 struct locus_discrim_map item
;
1173 struct locus_discrim_map
**slot
;
1176 item
.discriminator
= 0;
1177 slot
= discriminator_per_locus
->find_slot_with_hash (
1178 &item
, LOCATION_LINE (locus
), INSERT
);
1180 if (*slot
== HTAB_EMPTY_ENTRY
)
1182 *slot
= XNEW (struct locus_discrim_map
);
1184 (*slot
)->locus
= locus
;
1185 (*slot
)->discriminator
= 0;
1187 (*slot
)->discriminator
++;
1188 return (*slot
)->discriminator
;
1191 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1194 same_line_p (location_t locus1
, location_t locus2
)
1196 expanded_location from
, to
;
1198 if (locus1
== locus2
)
1201 from
= expand_location (locus1
);
1202 to
= expand_location (locus2
);
1204 if (from
.line
!= to
.line
)
1206 if (from
.file
== to
.file
)
1208 return (from
.file
!= NULL
1210 && filename_cmp (from
.file
, to
.file
) == 0);
1213 /* Assign discriminators to each basic block. */
1216 assign_discriminators (void)
1220 FOR_EACH_BB_FN (bb
, cfun
)
1224 gimple
*last
= last_stmt (bb
);
1225 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
1227 if (locus
== UNKNOWN_LOCATION
)
1230 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1232 gimple
*first
= first_non_label_stmt (e
->dest
);
1233 gimple
*last
= last_stmt (e
->dest
);
1234 if ((first
&& same_line_p (locus
, gimple_location (first
)))
1235 || (last
&& same_line_p (locus
, gimple_location (last
))))
1237 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
1238 bb
->discriminator
= next_discriminator_for_locus (locus
);
1240 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
1246 /* Create the edges for a GIMPLE_COND starting at block BB. */
1249 make_cond_expr_edges (basic_block bb
)
1251 gcond
*entry
= as_a
<gcond
*> (last_stmt (bb
));
1252 gimple
*then_stmt
, *else_stmt
;
1253 basic_block then_bb
, else_bb
;
1254 tree then_label
, else_label
;
1258 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
1260 /* Entry basic blocks for each component. */
1261 then_label
= gimple_cond_true_label (entry
);
1262 else_label
= gimple_cond_false_label (entry
);
1263 then_bb
= label_to_block (then_label
);
1264 else_bb
= label_to_block (else_label
);
1265 then_stmt
= first_stmt (then_bb
);
1266 else_stmt
= first_stmt (else_bb
);
1268 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1269 e
->goto_locus
= gimple_location (then_stmt
);
1270 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1272 e
->goto_locus
= gimple_location (else_stmt
);
1274 /* We do not need the labels anymore. */
1275 gimple_cond_set_true_label (entry
, NULL_TREE
);
1276 gimple_cond_set_false_label (entry
, NULL_TREE
);
1280 /* Called for each element in the hash table (P) as we delete the
1281 edge to cases hash table.
1283 Clear all the CASE_CHAINs to prevent problems with copying of
1284 SWITCH_EXPRs and structure sharing rules, then free the hash table
1288 edge_to_cases_cleanup (edge
const &, tree
const &value
, void *)
1292 for (t
= value
; t
; t
= next
)
1294 next
= CASE_CHAIN (t
);
1295 CASE_CHAIN (t
) = NULL
;
1301 /* Start recording information mapping edges to case labels. */
1304 start_recording_case_labels (void)
1306 gcc_assert (edge_to_cases
== NULL
);
1307 edge_to_cases
= new hash_map
<edge
, tree
>;
1308 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
1311 /* Return nonzero if we are recording information for case labels. */
1314 recording_case_labels_p (void)
1316 return (edge_to_cases
!= NULL
);
1319 /* Stop recording information mapping edges to case labels and
1320 remove any information we have recorded. */
1322 end_recording_case_labels (void)
1326 edge_to_cases
->traverse
<void *, edge_to_cases_cleanup
> (NULL
);
1327 delete edge_to_cases
;
1328 edge_to_cases
= NULL
;
1329 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
1331 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
1334 gimple
*stmt
= last_stmt (bb
);
1335 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1336 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1339 BITMAP_FREE (touched_switch_bbs
);
1342 /* If we are inside a {start,end}_recording_cases block, then return
1343 a chain of CASE_LABEL_EXPRs from T which reference E.
1345 Otherwise return NULL. */
1348 get_cases_for_edge (edge e
, gswitch
*t
)
1353 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1354 chains available. Return NULL so the caller can detect this case. */
1355 if (!recording_case_labels_p ())
1358 slot
= edge_to_cases
->get (e
);
1362 /* If we did not find E in the hash table, then this must be the first
1363 time we have been queried for information about E & T. Add all the
1364 elements from T to the hash table then perform the query again. */
1366 n
= gimple_switch_num_labels (t
);
1367 for (i
= 0; i
< n
; i
++)
1369 tree elt
= gimple_switch_label (t
, i
);
1370 tree lab
= CASE_LABEL (elt
);
1371 basic_block label_bb
= label_to_block (lab
);
1372 edge this_edge
= find_edge (e
->src
, label_bb
);
1374 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1376 tree
&s
= edge_to_cases
->get_or_insert (this_edge
);
1377 CASE_CHAIN (elt
) = s
;
1381 return *edge_to_cases
->get (e
);
1384 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1387 make_gimple_switch_edges (gswitch
*entry
, basic_block bb
)
1391 n
= gimple_switch_num_labels (entry
);
1393 for (i
= 0; i
< n
; ++i
)
1395 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1396 basic_block label_bb
= label_to_block (lab
);
1397 make_edge (bb
, label_bb
, 0);
1402 /* Return the basic block holding label DEST. */
1405 label_to_block_fn (struct function
*ifun
, tree dest
)
1407 int uid
= LABEL_DECL_UID (dest
);
1409 /* We would die hard when faced by an undefined label. Emit a label to
1410 the very first basic block. This will hopefully make even the dataflow
1411 and undefined variable warnings quite right. */
1412 if (seen_error () && uid
< 0)
1414 gimple_stmt_iterator gsi
=
1415 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
));
1418 stmt
= gimple_build_label (dest
);
1419 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1420 uid
= LABEL_DECL_UID (dest
);
1422 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1424 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1427 /* Create edges for a goto statement at block BB. Returns true
1428 if abnormal edges should be created. */
1431 make_goto_expr_edges (basic_block bb
)
1433 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1434 gimple
*goto_t
= gsi_stmt (last
);
1436 /* A simple GOTO creates normal edges. */
1437 if (simple_goto_p (goto_t
))
1439 tree dest
= gimple_goto_dest (goto_t
);
1440 basic_block label_bb
= label_to_block (dest
);
1441 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1442 e
->goto_locus
= gimple_location (goto_t
);
1443 gsi_remove (&last
, true);
1447 /* A computed GOTO creates abnormal edges. */
1451 /* Create edges for an asm statement with labels at block BB. */
1454 make_gimple_asm_edges (basic_block bb
)
1456 gasm
*stmt
= as_a
<gasm
*> (last_stmt (bb
));
1457 int i
, n
= gimple_asm_nlabels (stmt
);
1459 for (i
= 0; i
< n
; ++i
)
1461 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1462 basic_block label_bb
= label_to_block (label
);
1463 make_edge (bb
, label_bb
, 0);
1467 /*---------------------------------------------------------------------------
1469 ---------------------------------------------------------------------------*/
1471 /* Cleanup useless labels in basic blocks. This is something we wish
1472 to do early because it allows us to group case labels before creating
1473 the edges for the CFG, and it speeds up block statement iterators in
1474 all passes later on.
1475 We rerun this pass after CFG is created, to get rid of the labels that
1476 are no longer referenced. After then we do not run it any more, since
1477 (almost) no new labels should be created. */
1479 /* A map from basic block index to the leading label of that block. */
1480 static struct label_record
1485 /* True if the label is referenced from somewhere. */
1489 /* Given LABEL return the first label in the same basic block. */
1492 main_block_label (tree label
)
1494 basic_block bb
= label_to_block (label
);
1495 tree main_label
= label_for_bb
[bb
->index
].label
;
1497 /* label_to_block possibly inserted undefined label into the chain. */
1500 label_for_bb
[bb
->index
].label
= label
;
1504 label_for_bb
[bb
->index
].used
= true;
1508 /* Clean up redundant labels within the exception tree. */
1511 cleanup_dead_labels_eh (void)
1518 if (cfun
->eh
== NULL
)
1521 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1522 if (lp
&& lp
->post_landing_pad
)
1524 lab
= main_block_label (lp
->post_landing_pad
);
1525 if (lab
!= lp
->post_landing_pad
)
1527 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1528 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1532 FOR_ALL_EH_REGION (r
)
1536 case ERT_MUST_NOT_THROW
:
1542 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1546 c
->label
= main_block_label (lab
);
1551 case ERT_ALLOWED_EXCEPTIONS
:
1552 lab
= r
->u
.allowed
.label
;
1554 r
->u
.allowed
.label
= main_block_label (lab
);
1560 /* Cleanup redundant labels. This is a three-step process:
1561 1) Find the leading label for each block.
1562 2) Redirect all references to labels to the leading labels.
1563 3) Cleanup all useless labels. */
1566 cleanup_dead_labels (void)
1569 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block_for_fn (cfun
));
1571 /* Find a suitable label for each block. We use the first user-defined
1572 label if there is one, or otherwise just the first label we see. */
1573 FOR_EACH_BB_FN (bb
, cfun
)
1575 gimple_stmt_iterator i
;
1577 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1580 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1585 label
= gimple_label_label (label_stmt
);
1587 /* If we have not yet seen a label for the current block,
1588 remember this one and see if there are more labels. */
1589 if (!label_for_bb
[bb
->index
].label
)
1591 label_for_bb
[bb
->index
].label
= label
;
1595 /* If we did see a label for the current block already, but it
1596 is an artificially created label, replace it if the current
1597 label is a user defined label. */
1598 if (!DECL_ARTIFICIAL (label
)
1599 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1601 label_for_bb
[bb
->index
].label
= label
;
1607 /* Now redirect all jumps/branches to the selected label.
1608 First do so for each block ending in a control statement. */
1609 FOR_EACH_BB_FN (bb
, cfun
)
1611 gimple
*stmt
= last_stmt (bb
);
1612 tree label
, new_label
;
1617 switch (gimple_code (stmt
))
1621 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
1622 label
= gimple_cond_true_label (cond_stmt
);
1625 new_label
= main_block_label (label
);
1626 if (new_label
!= label
)
1627 gimple_cond_set_true_label (cond_stmt
, new_label
);
1630 label
= gimple_cond_false_label (cond_stmt
);
1633 new_label
= main_block_label (label
);
1634 if (new_label
!= label
)
1635 gimple_cond_set_false_label (cond_stmt
, new_label
);
1642 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
1643 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
1645 /* Replace all destination labels. */
1646 for (i
= 0; i
< n
; ++i
)
1648 tree case_label
= gimple_switch_label (switch_stmt
, i
);
1649 label
= CASE_LABEL (case_label
);
1650 new_label
= main_block_label (label
);
1651 if (new_label
!= label
)
1652 CASE_LABEL (case_label
) = new_label
;
1659 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1660 int i
, n
= gimple_asm_nlabels (asm_stmt
);
1662 for (i
= 0; i
< n
; ++i
)
1664 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
1665 tree label
= main_block_label (TREE_VALUE (cons
));
1666 TREE_VALUE (cons
) = label
;
1671 /* We have to handle gotos until they're removed, and we don't
1672 remove them until after we've created the CFG edges. */
1674 if (!computed_goto_p (stmt
))
1676 ggoto
*goto_stmt
= as_a
<ggoto
*> (stmt
);
1677 label
= gimple_goto_dest (goto_stmt
);
1678 new_label
= main_block_label (label
);
1679 if (new_label
!= label
)
1680 gimple_goto_set_dest (goto_stmt
, new_label
);
1684 case GIMPLE_TRANSACTION
:
1686 gtransaction
*txn
= as_a
<gtransaction
*> (stmt
);
1688 label
= gimple_transaction_label_norm (txn
);
1691 new_label
= main_block_label (label
);
1692 if (new_label
!= label
)
1693 gimple_transaction_set_label_norm (txn
, new_label
);
1696 label
= gimple_transaction_label_uninst (txn
);
1699 new_label
= main_block_label (label
);
1700 if (new_label
!= label
)
1701 gimple_transaction_set_label_uninst (txn
, new_label
);
1704 label
= gimple_transaction_label_over (txn
);
1707 new_label
= main_block_label (label
);
1708 if (new_label
!= label
)
1709 gimple_transaction_set_label_over (txn
, new_label
);
1719 /* Do the same for the exception region tree labels. */
1720 cleanup_dead_labels_eh ();
1722 /* Finally, purge dead labels. All user-defined labels and labels that
1723 can be the target of non-local gotos and labels which have their
1724 address taken are preserved. */
1725 FOR_EACH_BB_FN (bb
, cfun
)
1727 gimple_stmt_iterator i
;
1728 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1730 if (!label_for_this_bb
)
1733 /* If the main label of the block is unused, we may still remove it. */
1734 if (!label_for_bb
[bb
->index
].used
)
1735 label_for_this_bb
= NULL
;
1737 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1740 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1745 label
= gimple_label_label (label_stmt
);
1747 if (label
== label_for_this_bb
1748 || !DECL_ARTIFICIAL (label
)
1749 || DECL_NONLOCAL (label
)
1750 || FORCED_LABEL (label
))
1753 gsi_remove (&i
, true);
1757 free (label_for_bb
);
1760 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1761 the ones jumping to the same label.
1762 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1765 group_case_labels_stmt (gswitch
*stmt
)
1767 int old_size
= gimple_switch_num_labels (stmt
);
1768 int i
, next_index
, new_size
;
1769 basic_block default_bb
= NULL
;
1771 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1773 /* Look for possible opportunities to merge cases. */
1775 while (i
< old_size
)
1777 tree base_case
, base_high
;
1778 basic_block base_bb
;
1780 base_case
= gimple_switch_label (stmt
, i
);
1782 gcc_assert (base_case
);
1783 base_bb
= label_to_block (CASE_LABEL (base_case
));
1785 /* Discard cases that have the same destination as the default case or
1786 whose destiniation blocks have already been removed as unreachable. */
1787 if (base_bb
== NULL
|| base_bb
== default_bb
)
1793 base_high
= CASE_HIGH (base_case
)
1794 ? CASE_HIGH (base_case
)
1795 : CASE_LOW (base_case
);
1798 /* Try to merge case labels. Break out when we reach the end
1799 of the label vector or when we cannot merge the next case
1800 label with the current one. */
1801 while (next_index
< old_size
)
1803 tree merge_case
= gimple_switch_label (stmt
, next_index
);
1804 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1805 wide_int bhp1
= wi::to_wide (base_high
) + 1;
1807 /* Merge the cases if they jump to the same place,
1808 and their ranges are consecutive. */
1809 if (merge_bb
== base_bb
1810 && wi::to_wide (CASE_LOW (merge_case
)) == bhp1
)
1812 base_high
= CASE_HIGH (merge_case
) ?
1813 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1814 CASE_HIGH (base_case
) = base_high
;
1821 /* Discard cases that have an unreachable destination block. */
1822 if (EDGE_COUNT (base_bb
->succs
) == 0
1823 && gimple_seq_unreachable_p (bb_seq (base_bb
))
1824 /* Don't optimize this if __builtin_unreachable () is the
1825 implicitly added one by the C++ FE too early, before
1826 -Wreturn-type can be diagnosed. We'll optimize it later
1827 during switchconv pass or any other cfg cleanup. */
1828 && (gimple_in_ssa_p (cfun
)
1829 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb
)))
1830 != BUILTINS_LOCATION
)))
1832 edge base_edge
= find_edge (gimple_bb (stmt
), base_bb
);
1833 if (base_edge
!= NULL
)
1834 remove_edge_and_dominated_blocks (base_edge
);
1840 gimple_switch_set_label (stmt
, new_size
,
1841 gimple_switch_label (stmt
, i
));
1846 gcc_assert (new_size
<= old_size
);
1848 if (new_size
< old_size
)
1849 gimple_switch_set_num_labels (stmt
, new_size
);
1851 return new_size
< old_size
;
1854 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1855 and scan the sorted vector of cases. Combine the ones jumping to the
1859 group_case_labels (void)
1862 bool changed
= false;
1864 FOR_EACH_BB_FN (bb
, cfun
)
1866 gimple
*stmt
= last_stmt (bb
);
1867 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1868 changed
|= group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1874 /* Checks whether we can merge block B into block A. */
1877 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1881 if (!single_succ_p (a
))
1884 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1887 if (single_succ (a
) != b
)
1890 if (!single_pred_p (b
))
1893 if (a
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
1894 || b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1897 /* If A ends by a statement causing exceptions or something similar, we
1898 cannot merge the blocks. */
1899 stmt
= last_stmt (a
);
1900 if (stmt
&& stmt_ends_bb_p (stmt
))
1903 /* Do not allow a block with only a non-local label to be merged. */
1905 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
1906 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
1909 /* Examine the labels at the beginning of B. */
1910 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);
1914 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1917 lab
= gimple_label_label (label_stmt
);
1919 /* Do not remove user forced labels or for -O0 any user labels. */
1920 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1924 /* Protect simple loop latches. We only want to avoid merging
1925 the latch with the loop header or with a block in another
1926 loop in this case. */
1928 && b
->loop_father
->latch
== b
1929 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
)
1930 && (b
->loop_father
->header
== a
1931 || b
->loop_father
!= a
->loop_father
))
1934 /* It must be possible to eliminate all phi nodes in B. If ssa form
1935 is not up-to-date and a name-mapping is registered, we cannot eliminate
1936 any phis. Symbols marked for renaming are never a problem though. */
1937 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);
1940 gphi
*phi
= gsi
.phi ();
1941 /* Technically only new names matter. */
1942 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1946 /* When not optimizing, don't merge if we'd lose goto_locus. */
1948 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1950 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1951 gimple_stmt_iterator prev
, next
;
1952 prev
= gsi_last_nondebug_bb (a
);
1953 next
= gsi_after_labels (b
);
1954 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1955 gsi_next_nondebug (&next
);
1956 if ((gsi_end_p (prev
)
1957 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1958 && (gsi_end_p (next
)
1959 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1966 /* Replaces all uses of NAME by VAL. */
1969 replace_uses_by (tree name
, tree val
)
1971 imm_use_iterator imm_iter
;
1976 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1978 /* Mark the block if we change the last stmt in it. */
1979 if (cfgcleanup_altered_bbs
1980 && stmt_ends_bb_p (stmt
))
1981 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1983 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1985 replace_exp (use
, val
);
1987 if (gimple_code (stmt
) == GIMPLE_PHI
)
1989 e
= gimple_phi_arg_edge (as_a
<gphi
*> (stmt
),
1990 PHI_ARG_INDEX_FROM_USE (use
));
1991 if (e
->flags
& EDGE_ABNORMAL
1992 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
))
1994 /* This can only occur for virtual operands, since
1995 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1996 would prevent replacement. */
1997 gcc_checking_assert (virtual_operand_p (name
));
1998 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
2003 if (gimple_code (stmt
) != GIMPLE_PHI
)
2005 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2006 gimple
*orig_stmt
= stmt
;
2009 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2010 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2011 only change sth from non-invariant to invariant, and only
2012 when propagating constants. */
2013 if (is_gimple_min_invariant (val
))
2014 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
2016 tree op
= gimple_op (stmt
, i
);
2017 /* Operands may be empty here. For example, the labels
2018 of a GIMPLE_COND are nulled out following the creation
2019 of the corresponding CFG edges. */
2020 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
2021 recompute_tree_invariant_for_addr_expr (op
);
2024 if (fold_stmt (&gsi
))
2025 stmt
= gsi_stmt (gsi
);
2027 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
2028 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
2034 gcc_checking_assert (has_zero_uses (name
));
2036 /* Also update the trees stored in loop structures. */
2041 FOR_EACH_LOOP (loop
, 0)
2043 substitute_in_loop_info (loop
, name
, val
);
2048 /* Merge block B into block A. */
2051 gimple_merge_blocks (basic_block a
, basic_block b
)
2053 gimple_stmt_iterator last
, gsi
;
2057 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
2059 /* Remove all single-valued PHI nodes from block B of the form
2060 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2061 gsi
= gsi_last_bb (a
);
2062 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
2064 gimple
*phi
= gsi_stmt (psi
);
2065 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
2067 bool may_replace_uses
= (virtual_operand_p (def
)
2068 || may_propagate_copy (def
, use
));
2070 /* In case we maintain loop closed ssa form, do not propagate arguments
2071 of loop exit phi nodes. */
2073 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
2074 && !virtual_operand_p (def
)
2075 && TREE_CODE (use
) == SSA_NAME
2076 && a
->loop_father
!= b
->loop_father
)
2077 may_replace_uses
= false;
2079 if (!may_replace_uses
)
2081 gcc_assert (!virtual_operand_p (def
));
2083 /* Note that just emitting the copies is fine -- there is no problem
2084 with ordering of phi nodes. This is because A is the single
2085 predecessor of B, therefore results of the phi nodes cannot
2086 appear as arguments of the phi nodes. */
2087 copy
= gimple_build_assign (def
, use
);
2088 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
2089 remove_phi_node (&psi
, false);
2093 /* If we deal with a PHI for virtual operands, we can simply
2094 propagate these without fussing with folding or updating
2096 if (virtual_operand_p (def
))
2098 imm_use_iterator iter
;
2099 use_operand_p use_p
;
2102 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
2103 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
2104 SET_USE (use_p
, use
);
2106 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
2107 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
2110 replace_uses_by (def
, use
);
2112 remove_phi_node (&psi
, true);
2116 /* Ensure that B follows A. */
2117 move_block_after (b
, a
);
2119 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
2120 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
2122 /* Remove labels from B and set gimple_bb to A for other statements. */
2123 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
2125 gimple
*stmt
= gsi_stmt (gsi
);
2126 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2128 tree label
= gimple_label_label (label_stmt
);
2131 gsi_remove (&gsi
, false);
2133 /* Now that we can thread computed gotos, we might have
2134 a situation where we have a forced label in block B
2135 However, the label at the start of block B might still be
2136 used in other ways (think about the runtime checking for
2137 Fortran assigned gotos). So we can not just delete the
2138 label. Instead we move the label to the start of block A. */
2139 if (FORCED_LABEL (label
))
2141 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
2142 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
2144 /* Other user labels keep around in a form of a debug stmt. */
2145 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_BIND_STMTS
)
2147 gimple
*dbg
= gimple_build_debug_bind (label
,
2150 gimple_debug_bind_reset_value (dbg
);
2151 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
2154 lp_nr
= EH_LANDING_PAD_NR (label
);
2157 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
2158 lp
->post_landing_pad
= NULL
;
2163 gimple_set_bb (stmt
, a
);
2168 /* When merging two BBs, if their counts are different, the larger count
2169 is selected as the new bb count. This is to handle inconsistent
2171 if (a
->loop_father
== b
->loop_father
)
2173 a
->count
= a
->count
.merge (b
->count
);
2176 /* Merge the sequences. */
2177 last
= gsi_last_bb (a
);
2178 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
2179 set_bb_seq (b
, NULL
);
2181 if (cfgcleanup_altered_bbs
)
2182 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
2186 /* Return the one of two successors of BB that is not reachable by a
2187 complex edge, if there is one. Else, return BB. We use
2188 this in optimizations that use post-dominators for their heuristics,
2189 to catch the cases in C++ where function calls are involved. */
2192 single_noncomplex_succ (basic_block bb
)
2195 if (EDGE_COUNT (bb
->succs
) != 2)
2198 e0
= EDGE_SUCC (bb
, 0);
2199 e1
= EDGE_SUCC (bb
, 1);
2200 if (e0
->flags
& EDGE_COMPLEX
)
2202 if (e1
->flags
& EDGE_COMPLEX
)
2208 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2211 notice_special_calls (gcall
*call
)
2213 int flags
= gimple_call_flags (call
);
2215 if (flags
& ECF_MAY_BE_ALLOCA
)
2216 cfun
->calls_alloca
= true;
2217 if (flags
& ECF_RETURNS_TWICE
)
2218 cfun
->calls_setjmp
= true;
2222 /* Clear flags set by notice_special_calls. Used by dead code removal
2223 to update the flags. */
2226 clear_special_calls (void)
2228 cfun
->calls_alloca
= false;
2229 cfun
->calls_setjmp
= false;
2232 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2235 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
2237 /* Since this block is no longer reachable, we can just delete all
2238 of its PHI nodes. */
2239 remove_phi_nodes (bb
);
2241 /* Remove edges to BB's successors. */
2242 while (EDGE_COUNT (bb
->succs
) > 0)
2243 remove_edge (EDGE_SUCC (bb
, 0));
2247 /* Remove statements of basic block BB. */
2250 remove_bb (basic_block bb
)
2252 gimple_stmt_iterator i
;
2256 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
2257 if (dump_flags
& TDF_DETAILS
)
2259 dump_bb (dump_file
, bb
, 0, TDF_BLOCKS
);
2260 fprintf (dump_file
, "\n");
2266 struct loop
*loop
= bb
->loop_father
;
2268 /* If a loop gets removed, clean up the information associated
2270 if (loop
->latch
== bb
2271 || loop
->header
== bb
)
2272 free_numbers_of_iterations_estimates (loop
);
2275 /* Remove all the instructions in the block. */
2276 if (bb_seq (bb
) != NULL
)
2278 /* Walk backwards so as to get a chance to substitute all
2279 released DEFs into debug stmts. See
2280 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2282 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
2284 gimple
*stmt
= gsi_stmt (i
);
2285 glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
);
2287 && (FORCED_LABEL (gimple_label_label (label_stmt
))
2288 || DECL_NONLOCAL (gimple_label_label (label_stmt
))))
2291 gimple_stmt_iterator new_gsi
;
2293 /* A non-reachable non-local label may still be referenced.
2294 But it no longer needs to carry the extra semantics of
2296 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
2298 DECL_NONLOCAL (gimple_label_label (label_stmt
)) = 0;
2299 FORCED_LABEL (gimple_label_label (label_stmt
)) = 1;
2302 new_bb
= bb
->prev_bb
;
2303 new_gsi
= gsi_start_bb (new_bb
);
2304 gsi_remove (&i
, false);
2305 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
2309 /* Release SSA definitions. */
2310 release_defs (stmt
);
2311 gsi_remove (&i
, true);
2315 i
= gsi_last_bb (bb
);
2321 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2322 bb
->il
.gimple
.seq
= NULL
;
2323 bb
->il
.gimple
.phi_nodes
= NULL
;
2327 /* Given a basic block BB and a value VAL for use in the final statement
2328 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2329 the edge that will be taken out of the block.
2330 If VAL is NULL_TREE, then the current value of the final statement's
2331 predicate or index is used.
2332 If the value does not match a unique edge, NULL is returned. */
2335 find_taken_edge (basic_block bb
, tree val
)
2339 stmt
= last_stmt (bb
);
2341 /* Handle ENTRY and EXIT. */
2345 if (gimple_code (stmt
) == GIMPLE_COND
)
2346 return find_taken_edge_cond_expr (as_a
<gcond
*> (stmt
), val
);
2348 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
2349 return find_taken_edge_switch_expr (as_a
<gswitch
*> (stmt
), val
);
2351 if (computed_goto_p (stmt
))
2353 /* Only optimize if the argument is a label, if the argument is
2354 not a label then we can not construct a proper CFG.
2356 It may be the case that we only need to allow the LABEL_REF to
2357 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2358 appear inside a LABEL_EXPR just to be safe. */
2360 && (TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
2361 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
2362 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
2365 /* Otherwise we only know the taken successor edge if it's unique. */
2366 return single_succ_p (bb
) ? single_succ_edge (bb
) : NULL
;
2369 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2370 statement, determine which of the outgoing edges will be taken out of the
2371 block. Return NULL if either edge may be taken. */
2374 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2379 dest
= label_to_block (val
);
2382 e
= find_edge (bb
, dest
);
2383 gcc_assert (e
!= NULL
);
2389 /* Given COND_STMT and a constant value VAL for use as the predicate,
2390 determine which of the two edges will be taken out of
2391 the statement's block. Return NULL if either edge may be taken.
2392 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2396 find_taken_edge_cond_expr (const gcond
*cond_stmt
, tree val
)
2398 edge true_edge
, false_edge
;
2400 if (val
== NULL_TREE
)
2402 /* Use the current value of the predicate. */
2403 if (gimple_cond_true_p (cond_stmt
))
2404 val
= integer_one_node
;
2405 else if (gimple_cond_false_p (cond_stmt
))
2406 val
= integer_zero_node
;
2410 else if (TREE_CODE (val
) != INTEGER_CST
)
2413 extract_true_false_edges_from_block (gimple_bb (cond_stmt
),
2414 &true_edge
, &false_edge
);
2416 return (integer_zerop (val
) ? false_edge
: true_edge
);
2419 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2420 which edge will be taken out of the statement's block. Return NULL if any
2422 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2426 find_taken_edge_switch_expr (const gswitch
*switch_stmt
, tree val
)
2428 basic_block dest_bb
;
2432 if (gimple_switch_num_labels (switch_stmt
) == 1)
2433 taken_case
= gimple_switch_default_label (switch_stmt
);
2436 if (val
== NULL_TREE
)
2437 val
= gimple_switch_index (switch_stmt
);
2438 if (TREE_CODE (val
) != INTEGER_CST
)
2441 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2443 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2445 e
= find_edge (gimple_bb (switch_stmt
), dest_bb
);
2451 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2452 We can make optimal use here of the fact that the case labels are
2453 sorted: We can do a binary search for a case matching VAL. */
2456 find_case_label_for_value (const gswitch
*switch_stmt
, tree val
)
2458 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2459 tree default_case
= gimple_switch_default_label (switch_stmt
);
2461 for (low
= 0, high
= n
; high
- low
> 1; )
2463 size_t i
= (high
+ low
) / 2;
2464 tree t
= gimple_switch_label (switch_stmt
, i
);
2467 /* Cache the result of comparing CASE_LOW and val. */
2468 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2475 if (CASE_HIGH (t
) == NULL
)
2477 /* A singe-valued case label. */
2483 /* A case range. We can only handle integer ranges. */
2484 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2489 return default_case
;
2493 /* Dump a basic block on stderr. */
2496 gimple_debug_bb (basic_block bb
)
2498 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2502 /* Dump basic block with index N on stderr. */
2505 gimple_debug_bb_n (int n
)
2507 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun
, n
));
2508 return BASIC_BLOCK_FOR_FN (cfun
, n
);
2512 /* Dump the CFG on stderr.
2514 FLAGS are the same used by the tree dumping functions
2515 (see TDF_* in dumpfile.h). */
2518 gimple_debug_cfg (dump_flags_t flags
)
2520 gimple_dump_cfg (stderr
, flags
);
2524 /* Dump the program showing basic block boundaries on the given FILE.
2526 FLAGS are the same used by the tree dumping functions (see TDF_* in
2530 gimple_dump_cfg (FILE *file
, dump_flags_t flags
)
2532 if (flags
& TDF_DETAILS
)
2534 dump_function_header (file
, current_function_decl
, flags
);
2535 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2536 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2537 last_basic_block_for_fn (cfun
));
2539 brief_dump_cfg (file
, flags
);
2540 fprintf (file
, "\n");
2543 if (flags
& TDF_STATS
)
2544 dump_cfg_stats (file
);
2546 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2550 /* Dump CFG statistics on FILE. */
2553 dump_cfg_stats (FILE *file
)
2555 static long max_num_merged_labels
= 0;
2556 unsigned long size
, total
= 0;
2559 const char * const fmt_str
= "%-30s%-13s%12s\n";
2560 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2561 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2562 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2563 const char *funcname
= current_function_name ();
2565 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2567 fprintf (file
, "---------------------------------------------------------\n");
2568 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2569 fprintf (file
, fmt_str
, "", " instances ", "used ");
2570 fprintf (file
, "---------------------------------------------------------\n");
2572 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2574 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2575 SCALE (size
), LABEL (size
));
2578 FOR_EACH_BB_FN (bb
, cfun
)
2579 num_edges
+= EDGE_COUNT (bb
->succs
);
2580 size
= num_edges
* sizeof (struct edge_def
);
2582 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2584 fprintf (file
, "---------------------------------------------------------\n");
2585 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2587 fprintf (file
, "---------------------------------------------------------\n");
2588 fprintf (file
, "\n");
2590 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2591 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2593 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2594 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2596 fprintf (file
, "\n");
2600 /* Dump CFG statistics on stderr. Keep extern so that it's always
2601 linked in the final executable. */
2604 debug_cfg_stats (void)
2606 dump_cfg_stats (stderr
);
2609 /*---------------------------------------------------------------------------
2610 Miscellaneous helpers
2611 ---------------------------------------------------------------------------*/
2613 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2614 flow. Transfers of control flow associated with EH are excluded. */
2617 call_can_make_abnormal_goto (gimple
*t
)
2619 /* If the function has no non-local labels, then a call cannot make an
2620 abnormal transfer of control. */
2621 if (!cfun
->has_nonlocal_label
2622 && !cfun
->calls_setjmp
)
2625 /* Likewise if the call has no side effects. */
2626 if (!gimple_has_side_effects (t
))
2629 /* Likewise if the called function is leaf. */
2630 if (gimple_call_flags (t
) & ECF_LEAF
)
2637 /* Return true if T can make an abnormal transfer of control flow.
2638 Transfers of control flow associated with EH are excluded. */
2641 stmt_can_make_abnormal_goto (gimple
*t
)
2643 if (computed_goto_p (t
))
2645 if (is_gimple_call (t
))
2646 return call_can_make_abnormal_goto (t
);
2651 /* Return true if T represents a stmt that always transfers control. */
2654 is_ctrl_stmt (gimple
*t
)
2656 switch (gimple_code (t
))
2670 /* Return true if T is a statement that may alter the flow of control
2671 (e.g., a call to a non-returning function). */
2674 is_ctrl_altering_stmt (gimple
*t
)
2678 switch (gimple_code (t
))
2681 /* Per stmt call flag indicates whether the call could alter
2683 if (gimple_call_ctrl_altering_p (t
))
2687 case GIMPLE_EH_DISPATCH
:
2688 /* EH_DISPATCH branches to the individual catch handlers at
2689 this level of a try or allowed-exceptions region. It can
2690 fallthru to the next statement as well. */
2694 if (gimple_asm_nlabels (as_a
<gasm
*> (t
)) > 0)
2699 /* OpenMP directives alter control flow. */
2702 case GIMPLE_TRANSACTION
:
2703 /* A transaction start alters control flow. */
2710 /* If a statement can throw, it alters control flow. */
2711 return stmt_can_throw_internal (t
);
2715 /* Return true if T is a simple local goto. */
2718 simple_goto_p (gimple
*t
)
2720 return (gimple_code (t
) == GIMPLE_GOTO
2721 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2725 /* Return true if STMT should start a new basic block. PREV_STMT is
2726 the statement preceding STMT. It is used when STMT is a label or a
2727 case label. Labels should only start a new basic block if their
2728 previous statement wasn't a label. Otherwise, sequence of labels
2729 would generate unnecessary basic blocks that only contain a single
2733 stmt_starts_bb_p (gimple
*stmt
, gimple
*prev_stmt
)
2738 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2739 any nondebug stmts in the block. We don't want to start another
2740 block in this case: the debug stmt will already have started the
2741 one STMT would start if we weren't outputting debug stmts. */
2742 if (prev_stmt
&& is_gimple_debug (prev_stmt
))
2745 /* Labels start a new basic block only if the preceding statement
2746 wasn't a label of the same type. This prevents the creation of
2747 consecutive blocks that have nothing but a single label. */
2748 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2750 /* Nonlocal and computed GOTO targets always start a new block. */
2751 if (DECL_NONLOCAL (gimple_label_label (label_stmt
))
2752 || FORCED_LABEL (gimple_label_label (label_stmt
)))
2755 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2757 if (DECL_NONLOCAL (gimple_label_label (
2758 as_a
<glabel
*> (prev_stmt
))))
2761 cfg_stats
.num_merged_labels
++;
2767 else if (gimple_code (stmt
) == GIMPLE_CALL
)
2769 if (gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2770 /* setjmp acts similar to a nonlocal GOTO target and thus should
2771 start a new block. */
2773 if (gimple_call_internal_p (stmt
, IFN_PHI
)
2775 && gimple_code (prev_stmt
) != GIMPLE_LABEL
2776 && (gimple_code (prev_stmt
) != GIMPLE_CALL
2777 || ! gimple_call_internal_p (prev_stmt
, IFN_PHI
)))
2778 /* PHI nodes start a new block unless preceeded by a label
2787 /* Return true if T should end a basic block. */
2790 stmt_ends_bb_p (gimple
*t
)
2792 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2795 /* Remove block annotations and other data structures. */
2798 delete_tree_cfg_annotations (struct function
*fn
)
2800 vec_free (label_to_block_map_for_fn (fn
));
2803 /* Return the virtual phi in BB. */
2806 get_virtual_phi (basic_block bb
)
2808 for (gphi_iterator gsi
= gsi_start_phis (bb
);
2812 gphi
*phi
= gsi
.phi ();
2814 if (virtual_operand_p (PHI_RESULT (phi
)))
2821 /* Return the first statement in basic block BB. */
2824 first_stmt (basic_block bb
)
2826 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2827 gimple
*stmt
= NULL
;
2829 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2837 /* Return the first non-label statement in basic block BB. */
2840 first_non_label_stmt (basic_block bb
)
2842 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2843 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2845 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2848 /* Return the last statement in basic block BB. */
2851 last_stmt (basic_block bb
)
2853 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2854 gimple
*stmt
= NULL
;
2856 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2864 /* Return the last statement of an otherwise empty block. Return NULL
2865 if the block is totally empty, or if it contains more than one
2869 last_and_only_stmt (basic_block bb
)
2871 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2872 gimple
*last
, *prev
;
2877 last
= gsi_stmt (i
);
2878 gsi_prev_nondebug (&i
);
2882 /* Empty statements should no longer appear in the instruction stream.
2883 Everything that might have appeared before should be deleted by
2884 remove_useless_stmts, and the optimizers should just gsi_remove
2885 instead of smashing with build_empty_stmt.
2887 Thus the only thing that should appear here in a block containing
2888 one executable statement is a label. */
2889 prev
= gsi_stmt (i
);
2890 if (gimple_code (prev
) == GIMPLE_LABEL
)
2896 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2899 reinstall_phi_args (edge new_edge
, edge old_edge
)
2905 vec
<edge_var_map
> *v
= redirect_edge_var_map_vector (old_edge
);
2909 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2910 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2911 i
++, gsi_next (&phis
))
2913 gphi
*phi
= phis
.phi ();
2914 tree result
= redirect_edge_var_map_result (vm
);
2915 tree arg
= redirect_edge_var_map_def (vm
);
2917 gcc_assert (result
== gimple_phi_result (phi
));
2919 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2922 redirect_edge_var_map_clear (old_edge
);
2925 /* Returns the basic block after which the new basic block created
2926 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2927 near its "logical" location. This is of most help to humans looking
2928 at debugging dumps. */
2931 split_edge_bb_loc (edge edge_in
)
2933 basic_block dest
= edge_in
->dest
;
2934 basic_block dest_prev
= dest
->prev_bb
;
2938 edge e
= find_edge (dest_prev
, dest
);
2939 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2940 return edge_in
->src
;
2945 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2946 Abort on abnormal edges. */
2949 gimple_split_edge (edge edge_in
)
2951 basic_block new_bb
, after_bb
, dest
;
2954 /* Abnormal edges cannot be split. */
2955 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2957 dest
= edge_in
->dest
;
2959 after_bb
= split_edge_bb_loc (edge_in
);
2961 new_bb
= create_empty_bb (after_bb
);
2962 new_bb
->count
= edge_in
->count ();
2964 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2965 gcc_assert (e
== edge_in
);
2967 new_edge
= make_single_succ_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2968 reinstall_phi_args (new_edge
, e
);
2974 /* Verify properties of the address expression T with base object BASE. */
2977 verify_address (tree t
, tree base
)
2980 bool old_side_effects
;
2982 bool new_side_effects
;
2984 old_constant
= TREE_CONSTANT (t
);
2985 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2987 recompute_tree_invariant_for_addr_expr (t
);
2988 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2989 new_constant
= TREE_CONSTANT (t
);
2991 if (old_constant
!= new_constant
)
2993 error ("constant not recomputed when ADDR_EXPR changed");
2996 if (old_side_effects
!= new_side_effects
)
2998 error ("side effects not recomputed when ADDR_EXPR changed");
3003 || TREE_CODE (base
) == PARM_DECL
3004 || TREE_CODE (base
) == RESULT_DECL
))
3007 if (DECL_GIMPLE_REG_P (base
))
3009 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
3016 /* Callback for walk_tree, check that all elements with address taken are
3017 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3018 inside a PHI node. */
3021 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
3028 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3029 #define CHECK_OP(N, MSG) \
3030 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3031 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3033 switch (TREE_CODE (t
))
3036 if (SSA_NAME_IN_FREE_LIST (t
))
3038 error ("SSA name in freelist but still referenced");
3047 tree context
= decl_function_context (t
);
3048 if (context
!= cfun
->decl
3049 && !SCOPE_FILE_SCOPE_P (context
)
3051 && !DECL_EXTERNAL (t
))
3053 error ("Local declaration from a different function");
3060 error ("INDIRECT_REF in gimple IL");
3064 x
= TREE_OPERAND (t
, 0);
3065 if (!POINTER_TYPE_P (TREE_TYPE (x
))
3066 || !is_gimple_mem_ref_addr (x
))
3068 error ("invalid first operand of MEM_REF");
3071 if (!poly_int_tree_p (TREE_OPERAND (t
, 1))
3072 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
3074 error ("invalid offset operand of MEM_REF");
3075 return TREE_OPERAND (t
, 1);
3077 if (TREE_CODE (x
) == ADDR_EXPR
)
3079 tree va
= verify_address (x
, TREE_OPERAND (x
, 0));
3082 x
= TREE_OPERAND (x
, 0);
3084 walk_tree (&x
, verify_expr
, data
, NULL
);
3089 x
= fold (ASSERT_EXPR_COND (t
));
3090 if (x
== boolean_false_node
)
3092 error ("ASSERT_EXPR with an always-false condition");
3098 error ("MODIFY_EXPR not expected while having tuples");
3105 gcc_assert (is_gimple_address (t
));
3107 /* Skip any references (they will be checked when we recurse down the
3108 tree) and ensure that any variable used as a prefix is marked
3110 for (x
= TREE_OPERAND (t
, 0);
3111 handled_component_p (x
);
3112 x
= TREE_OPERAND (x
, 0))
3115 if ((tem
= verify_address (t
, x
)))
3119 || TREE_CODE (x
) == PARM_DECL
3120 || TREE_CODE (x
) == RESULT_DECL
))
3123 if (!TREE_ADDRESSABLE (x
))
3125 error ("address taken, but ADDRESSABLE bit not set");
3133 x
= COND_EXPR_COND (t
);
3134 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
3136 error ("non-integral used in condition");
3139 if (!is_gimple_condexpr (x
))
3141 error ("invalid conditional operand");
3146 case NON_LVALUE_EXPR
:
3147 case TRUTH_NOT_EXPR
:
3151 case FIX_TRUNC_EXPR
:
3156 CHECK_OP (0, "invalid operand to unary operator");
3162 if (!is_gimple_reg_type (TREE_TYPE (t
)))
3164 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3168 if (TREE_CODE (t
) == BIT_FIELD_REF
)
3170 tree t0
= TREE_OPERAND (t
, 0);
3171 tree t1
= TREE_OPERAND (t
, 1);
3172 tree t2
= TREE_OPERAND (t
, 2);
3173 poly_uint64 size
, bitpos
;
3174 if (!poly_int_tree_p (t1
, &size
)
3175 || !poly_int_tree_p (t2
, &bitpos
)
3176 || !types_compatible_p (bitsizetype
, TREE_TYPE (t1
))
3177 || !types_compatible_p (bitsizetype
, TREE_TYPE (t2
)))
3179 error ("invalid position or size operand to BIT_FIELD_REF");
3182 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
3183 && maybe_ne (TYPE_PRECISION (TREE_TYPE (t
)), size
))
3185 error ("integral result type precision does not match "
3186 "field size of BIT_FIELD_REF");
3189 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
3190 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
3191 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t
))),
3194 error ("mode size of non-integral result does not "
3195 "match field size of BIT_FIELD_REF");
3198 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0
))
3199 && maybe_gt (size
+ bitpos
,
3200 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (t0
)))))
3202 error ("position plus size exceeds size of referenced object in "
3207 t
= TREE_OPERAND (t
, 0);
3212 case ARRAY_RANGE_REF
:
3213 case VIEW_CONVERT_EXPR
:
3214 /* We have a nest of references. Verify that each of the operands
3215 that determine where to reference is either a constant or a variable,
3216 verify that the base is valid, and then show we've already checked
3218 while (handled_component_p (t
))
3220 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
3221 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3222 else if (TREE_CODE (t
) == ARRAY_REF
3223 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
3225 CHECK_OP (1, "invalid array index");
3226 if (TREE_OPERAND (t
, 2))
3227 CHECK_OP (2, "invalid array lower bound");
3228 if (TREE_OPERAND (t
, 3))
3229 CHECK_OP (3, "invalid array stride");
3231 else if (TREE_CODE (t
) == BIT_FIELD_REF
3232 || TREE_CODE (t
) == REALPART_EXPR
3233 || TREE_CODE (t
) == IMAGPART_EXPR
)
3235 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3240 t
= TREE_OPERAND (t
, 0);
3243 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
3245 error ("invalid reference prefix");
3248 walk_tree (&t
, verify_expr
, data
, NULL
);
3253 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3254 POINTER_PLUS_EXPR. */
3255 if (POINTER_TYPE_P (TREE_TYPE (t
)))
3257 error ("invalid operand to plus/minus, type is a pointer");
3260 CHECK_OP (0, "invalid operand to binary operator");
3261 CHECK_OP (1, "invalid operand to binary operator");
3264 case POINTER_DIFF_EXPR
:
3265 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0)))
3266 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
3268 error ("invalid operand to pointer diff, operand is not a pointer");
3271 if (TREE_CODE (TREE_TYPE (t
)) != INTEGER_TYPE
3272 || TYPE_UNSIGNED (TREE_TYPE (t
))
3273 || (TYPE_PRECISION (TREE_TYPE (t
))
3274 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t
, 0)))))
3276 error ("invalid type for pointer diff");
3279 CHECK_OP (0, "invalid operand to pointer diff");
3280 CHECK_OP (1, "invalid operand to pointer diff");
3283 case POINTER_PLUS_EXPR
:
3284 /* Check to make sure the first operand is a pointer or reference type. */
3285 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
3287 error ("invalid operand to pointer plus, first operand is not a pointer");
3290 /* Check to make sure the second operand is a ptrofftype. */
3291 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
3293 error ("invalid operand to pointer plus, second operand is not an "
3294 "integer type of appropriate width");
3304 case UNORDERED_EXPR
:
3313 case TRUNC_DIV_EXPR
:
3315 case FLOOR_DIV_EXPR
:
3316 case ROUND_DIV_EXPR
:
3317 case TRUNC_MOD_EXPR
:
3319 case FLOOR_MOD_EXPR
:
3320 case ROUND_MOD_EXPR
:
3322 case EXACT_DIV_EXPR
:
3332 CHECK_OP (0, "invalid operand to binary operator");
3333 CHECK_OP (1, "invalid operand to binary operator");
3337 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
3341 case CASE_LABEL_EXPR
:
3344 error ("invalid CASE_CHAIN");
3358 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3359 Returns true if there is an error, otherwise false. */
3362 verify_types_in_gimple_min_lval (tree expr
)
3366 if (is_gimple_id (expr
))
3369 if (TREE_CODE (expr
) != TARGET_MEM_REF
3370 && TREE_CODE (expr
) != MEM_REF
)
3372 error ("invalid expression for min lvalue");
3376 /* TARGET_MEM_REFs are strange beasts. */
3377 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3380 op
= TREE_OPERAND (expr
, 0);
3381 if (!is_gimple_val (op
))
3383 error ("invalid operand in indirect reference");
3384 debug_generic_stmt (op
);
3387 /* Memory references now generally can involve a value conversion. */
3392 /* Verify if EXPR is a valid GIMPLE reference expression. If
3393 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3394 if there is an error, otherwise false. */
3397 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
3399 while (handled_component_p (expr
))
3401 tree op
= TREE_OPERAND (expr
, 0);
3403 if (TREE_CODE (expr
) == ARRAY_REF
3404 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
3406 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
3407 || (TREE_OPERAND (expr
, 2)
3408 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3409 || (TREE_OPERAND (expr
, 3)
3410 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
3412 error ("invalid operands to array reference");
3413 debug_generic_stmt (expr
);
3418 /* Verify if the reference array element types are compatible. */
3419 if (TREE_CODE (expr
) == ARRAY_REF
3420 && !useless_type_conversion_p (TREE_TYPE (expr
),
3421 TREE_TYPE (TREE_TYPE (op
))))
3423 error ("type mismatch in array reference");
3424 debug_generic_stmt (TREE_TYPE (expr
));
3425 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3428 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
3429 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
3430 TREE_TYPE (TREE_TYPE (op
))))
3432 error ("type mismatch in array range reference");
3433 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
3434 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3438 if ((TREE_CODE (expr
) == REALPART_EXPR
3439 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3440 && !useless_type_conversion_p (TREE_TYPE (expr
),
3441 TREE_TYPE (TREE_TYPE (op
))))
3443 error ("type mismatch in real/imagpart reference");
3444 debug_generic_stmt (TREE_TYPE (expr
));
3445 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3449 if (TREE_CODE (expr
) == COMPONENT_REF
3450 && !useless_type_conversion_p (TREE_TYPE (expr
),
3451 TREE_TYPE (TREE_OPERAND (expr
, 1))))
3453 error ("type mismatch in component reference");
3454 debug_generic_stmt (TREE_TYPE (expr
));
3455 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
3459 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3461 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3462 that their operand is not an SSA name or an invariant when
3463 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3464 bug). Otherwise there is nothing to verify, gross mismatches at
3465 most invoke undefined behavior. */
3467 && (TREE_CODE (op
) == SSA_NAME
3468 || is_gimple_min_invariant (op
)))
3470 error ("conversion of an SSA_NAME on the left hand side");
3471 debug_generic_stmt (expr
);
3474 else if (TREE_CODE (op
) == SSA_NAME
3475 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3477 error ("conversion of register to a different size");
3478 debug_generic_stmt (expr
);
3481 else if (!handled_component_p (op
))
3488 if (TREE_CODE (expr
) == MEM_REF
)
3490 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
3492 error ("invalid address operand in MEM_REF");
3493 debug_generic_stmt (expr
);
3496 if (!poly_int_tree_p (TREE_OPERAND (expr
, 1))
3497 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3499 error ("invalid offset operand in MEM_REF");
3500 debug_generic_stmt (expr
);
3504 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3506 if (!TMR_BASE (expr
)
3507 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3509 error ("invalid address operand in TARGET_MEM_REF");
3512 if (!TMR_OFFSET (expr
)
3513 || !poly_int_tree_p (TMR_OFFSET (expr
))
3514 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3516 error ("invalid offset operand in TARGET_MEM_REF");
3517 debug_generic_stmt (expr
);
3522 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3523 && verify_types_in_gimple_min_lval (expr
));
3526 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3527 list of pointer-to types that is trivially convertible to DEST. */
3530 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3534 if (!TYPE_POINTER_TO (src_obj
))
3537 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3538 if (useless_type_conversion_p (dest
, src
))
3544 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3545 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3548 valid_fixed_convert_types_p (tree type1
, tree type2
)
3550 return (FIXED_POINT_TYPE_P (type1
)
3551 && (INTEGRAL_TYPE_P (type2
)
3552 || SCALAR_FLOAT_TYPE_P (type2
)
3553 || FIXED_POINT_TYPE_P (type2
)));
3556 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3557 is a problem, otherwise false. */
3560 verify_gimple_call (gcall
*stmt
)
3562 tree fn
= gimple_call_fn (stmt
);
3563 tree fntype
, fndecl
;
3566 if (gimple_call_internal_p (stmt
))
3570 error ("gimple call has two targets");
3571 debug_generic_stmt (fn
);
3574 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3575 else if (gimple_call_internal_fn (stmt
) == IFN_PHI
)
3584 error ("gimple call has no target");
3589 if (fn
&& !is_gimple_call_addr (fn
))
3591 error ("invalid function in gimple call");
3592 debug_generic_stmt (fn
);
3597 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3598 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3599 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3601 error ("non-function in gimple call");
3605 fndecl
= gimple_call_fndecl (stmt
);
3607 && TREE_CODE (fndecl
) == FUNCTION_DECL
3608 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3609 && !DECL_PURE_P (fndecl
)
3610 && !TREE_READONLY (fndecl
))
3612 error ("invalid pure const state for function");
3616 tree lhs
= gimple_call_lhs (stmt
);
3618 && (!is_gimple_lvalue (lhs
)
3619 || verify_types_in_gimple_reference (lhs
, true)))
3621 error ("invalid LHS in gimple call");
3625 if (gimple_call_ctrl_altering_p (stmt
)
3626 && gimple_call_noreturn_p (stmt
)
3627 && should_remove_lhs_p (lhs
))
3629 error ("LHS in noreturn call");
3633 fntype
= gimple_call_fntype (stmt
);
3636 && !useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (fntype
))
3637 /* ??? At least C++ misses conversions at assignments from
3638 void * call results.
3639 For now simply allow arbitrary pointer type conversions. */
3640 && !(POINTER_TYPE_P (TREE_TYPE (lhs
))
3641 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3643 error ("invalid conversion in gimple call");
3644 debug_generic_stmt (TREE_TYPE (lhs
));
3645 debug_generic_stmt (TREE_TYPE (fntype
));
3649 if (gimple_call_chain (stmt
)
3650 && !is_gimple_val (gimple_call_chain (stmt
)))
3652 error ("invalid static chain in gimple call");
3653 debug_generic_stmt (gimple_call_chain (stmt
));
3657 /* If there is a static chain argument, the call should either be
3658 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3659 if (gimple_call_chain (stmt
)
3661 && !DECL_STATIC_CHAIN (fndecl
))
3663 error ("static chain with function that doesn%'t use one");
3667 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3669 switch (DECL_FUNCTION_CODE (fndecl
))
3671 case BUILT_IN_UNREACHABLE
:
3673 if (gimple_call_num_args (stmt
) > 0)
3675 /* Built-in unreachable with parameters might not be caught by
3676 undefined behavior sanitizer. Front-ends do check users do not
3677 call them that way but we also produce calls to
3678 __builtin_unreachable internally, for example when IPA figures
3679 out a call cannot happen in a legal program. In such cases,
3680 we must make sure arguments are stripped off. */
3681 error ("__builtin_unreachable or __builtin_trap call with "
3691 /* ??? The C frontend passes unpromoted arguments in case it
3692 didn't see a function declaration before the call. So for now
3693 leave the call arguments mostly unverified. Once we gimplify
3694 unit-at-a-time we have a chance to fix this. */
3696 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3698 tree arg
= gimple_call_arg (stmt
, i
);
3699 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3700 && !is_gimple_val (arg
))
3701 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3702 && !is_gimple_lvalue (arg
)))
3704 error ("invalid argument to gimple call");
3705 debug_generic_expr (arg
);
3713 /* Verifies the gimple comparison with the result type TYPE and
3714 the operands OP0 and OP1, comparison code is CODE. */
3717 verify_gimple_comparison (tree type
, tree op0
, tree op1
, enum tree_code code
)
3719 tree op0_type
= TREE_TYPE (op0
);
3720 tree op1_type
= TREE_TYPE (op1
);
3722 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3724 error ("invalid operands in gimple comparison");
3728 /* For comparisons we do not have the operations type as the
3729 effective type the comparison is carried out in. Instead
3730 we require that either the first operand is trivially
3731 convertible into the second, or the other way around.
3732 Because we special-case pointers to void we allow
3733 comparisons of pointers with the same mode as well. */
3734 if (!useless_type_conversion_p (op0_type
, op1_type
)
3735 && !useless_type_conversion_p (op1_type
, op0_type
)
3736 && (!POINTER_TYPE_P (op0_type
)
3737 || !POINTER_TYPE_P (op1_type
)
3738 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3740 error ("mismatching comparison operand types");
3741 debug_generic_expr (op0_type
);
3742 debug_generic_expr (op1_type
);
3746 /* The resulting type of a comparison may be an effective boolean type. */
3747 if (INTEGRAL_TYPE_P (type
)
3748 && (TREE_CODE (type
) == BOOLEAN_TYPE
3749 || TYPE_PRECISION (type
) == 1))
3751 if ((TREE_CODE (op0_type
) == VECTOR_TYPE
3752 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3753 && code
!= EQ_EXPR
&& code
!= NE_EXPR
3754 && !VECTOR_BOOLEAN_TYPE_P (op0_type
)
3755 && !VECTOR_INTEGER_TYPE_P (op0_type
))
3757 error ("unsupported operation or type for vector comparison"
3758 " returning a boolean");
3759 debug_generic_expr (op0_type
);
3760 debug_generic_expr (op1_type
);
3764 /* Or a boolean vector type with the same element count
3765 as the comparison operand types. */
3766 else if (TREE_CODE (type
) == VECTOR_TYPE
3767 && TREE_CODE (TREE_TYPE (type
)) == BOOLEAN_TYPE
)
3769 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3770 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3772 error ("non-vector operands in vector comparison");
3773 debug_generic_expr (op0_type
);
3774 debug_generic_expr (op1_type
);
3778 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type
),
3779 TYPE_VECTOR_SUBPARTS (op0_type
)))
3781 error ("invalid vector comparison resulting type");
3782 debug_generic_expr (type
);
3788 error ("bogus comparison result type");
3789 debug_generic_expr (type
);
3796 /* Verify a gimple assignment statement STMT with an unary rhs.
3797 Returns true if anything is wrong. */
3800 verify_gimple_assign_unary (gassign
*stmt
)
3802 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3803 tree lhs
= gimple_assign_lhs (stmt
);
3804 tree lhs_type
= TREE_TYPE (lhs
);
3805 tree rhs1
= gimple_assign_rhs1 (stmt
);
3806 tree rhs1_type
= TREE_TYPE (rhs1
);
3808 if (!is_gimple_reg (lhs
))
3810 error ("non-register as LHS of unary operation");
3814 if (!is_gimple_val (rhs1
))
3816 error ("invalid operand in unary operation");
3820 /* First handle conversions. */
3825 /* Allow conversions from pointer type to integral type only if
3826 there is no sign or zero extension involved.
3827 For targets were the precision of ptrofftype doesn't match that
3828 of pointers we need to allow arbitrary conversions to ptrofftype. */
3829 if ((POINTER_TYPE_P (lhs_type
)
3830 && INTEGRAL_TYPE_P (rhs1_type
))
3831 || (POINTER_TYPE_P (rhs1_type
)
3832 && INTEGRAL_TYPE_P (lhs_type
)
3833 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3834 || ptrofftype_p (sizetype
))))
3837 /* Allow conversion from integral to offset type and vice versa. */
3838 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3839 && INTEGRAL_TYPE_P (rhs1_type
))
3840 || (INTEGRAL_TYPE_P (lhs_type
)
3841 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3844 /* Otherwise assert we are converting between types of the
3846 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3848 error ("invalid types in nop conversion");
3849 debug_generic_expr (lhs_type
);
3850 debug_generic_expr (rhs1_type
);
3857 case ADDR_SPACE_CONVERT_EXPR
:
3859 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3860 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3861 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3863 error ("invalid types in address space conversion");
3864 debug_generic_expr (lhs_type
);
3865 debug_generic_expr (rhs1_type
);
3872 case FIXED_CONVERT_EXPR
:
3874 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3875 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3877 error ("invalid types in fixed-point conversion");
3878 debug_generic_expr (lhs_type
);
3879 debug_generic_expr (rhs1_type
);
3888 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3889 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3890 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3892 error ("invalid types in conversion to floating point");
3893 debug_generic_expr (lhs_type
);
3894 debug_generic_expr (rhs1_type
);
3901 case FIX_TRUNC_EXPR
:
3903 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3904 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3905 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3907 error ("invalid types in conversion to integer");
3908 debug_generic_expr (lhs_type
);
3909 debug_generic_expr (rhs1_type
);
3916 case VEC_UNPACK_HI_EXPR
:
3917 case VEC_UNPACK_LO_EXPR
:
3918 case VEC_UNPACK_FLOAT_HI_EXPR
:
3919 case VEC_UNPACK_FLOAT_LO_EXPR
:
3930 case VEC_DUPLICATE_EXPR
:
3931 if (TREE_CODE (lhs_type
) != VECTOR_TYPE
3932 || !useless_type_conversion_p (TREE_TYPE (lhs_type
), rhs1_type
))
3934 error ("vec_duplicate should be from a scalar to a like vector");
3935 debug_generic_expr (lhs_type
);
3936 debug_generic_expr (rhs1_type
);
3945 /* For the remaining codes assert there is no conversion involved. */
3946 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3948 error ("non-trivial conversion in unary operation");
3949 debug_generic_expr (lhs_type
);
3950 debug_generic_expr (rhs1_type
);
3957 /* Verify a gimple assignment statement STMT with a binary rhs.
3958 Returns true if anything is wrong. */
3961 verify_gimple_assign_binary (gassign
*stmt
)
3963 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3964 tree lhs
= gimple_assign_lhs (stmt
);
3965 tree lhs_type
= TREE_TYPE (lhs
);
3966 tree rhs1
= gimple_assign_rhs1 (stmt
);
3967 tree rhs1_type
= TREE_TYPE (rhs1
);
3968 tree rhs2
= gimple_assign_rhs2 (stmt
);
3969 tree rhs2_type
= TREE_TYPE (rhs2
);
3971 if (!is_gimple_reg (lhs
))
3973 error ("non-register as LHS of binary operation");
3977 if (!is_gimple_val (rhs1
)
3978 || !is_gimple_val (rhs2
))
3980 error ("invalid operands in binary operation");
3984 /* First handle operations that involve different types. */
3989 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3990 || !(INTEGRAL_TYPE_P (rhs1_type
)
3991 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3992 || !(INTEGRAL_TYPE_P (rhs2_type
)
3993 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3995 error ("type mismatch in complex expression");
3996 debug_generic_expr (lhs_type
);
3997 debug_generic_expr (rhs1_type
);
3998 debug_generic_expr (rhs2_type
);
4010 /* Shifts and rotates are ok on integral types, fixed point
4011 types and integer vector types. */
4012 if ((!INTEGRAL_TYPE_P (rhs1_type
)
4013 && !FIXED_POINT_TYPE_P (rhs1_type
)
4014 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
4015 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
4016 || (!INTEGRAL_TYPE_P (rhs2_type
)
4017 /* Vector shifts of vectors are also ok. */
4018 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
4019 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
4020 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
4021 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
4022 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
4024 error ("type mismatch in shift expression");
4025 debug_generic_expr (lhs_type
);
4026 debug_generic_expr (rhs1_type
);
4027 debug_generic_expr (rhs2_type
);
4034 case WIDEN_LSHIFT_EXPR
:
4036 if (!INTEGRAL_TYPE_P (lhs_type
)
4037 || !INTEGRAL_TYPE_P (rhs1_type
)
4038 || TREE_CODE (rhs2
) != INTEGER_CST
4039 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
4041 error ("type mismatch in widening vector shift expression");
4042 debug_generic_expr (lhs_type
);
4043 debug_generic_expr (rhs1_type
);
4044 debug_generic_expr (rhs2_type
);
4051 case VEC_WIDEN_LSHIFT_HI_EXPR
:
4052 case VEC_WIDEN_LSHIFT_LO_EXPR
:
4054 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4055 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4056 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
4057 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
4058 || TREE_CODE (rhs2
) != INTEGER_CST
4059 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
4060 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
4062 error ("type mismatch in widening vector shift expression");
4063 debug_generic_expr (lhs_type
);
4064 debug_generic_expr (rhs1_type
);
4065 debug_generic_expr (rhs2_type
);
4075 tree lhs_etype
= lhs_type
;
4076 tree rhs1_etype
= rhs1_type
;
4077 tree rhs2_etype
= rhs2_type
;
4078 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
4080 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4081 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
4083 error ("invalid non-vector operands to vector valued plus");
4086 lhs_etype
= TREE_TYPE (lhs_type
);
4087 rhs1_etype
= TREE_TYPE (rhs1_type
);
4088 rhs2_etype
= TREE_TYPE (rhs2_type
);
4090 if (POINTER_TYPE_P (lhs_etype
)
4091 || POINTER_TYPE_P (rhs1_etype
)
4092 || POINTER_TYPE_P (rhs2_etype
))
4094 error ("invalid (pointer) operands to plus/minus");
4098 /* Continue with generic binary expression handling. */
4102 case POINTER_PLUS_EXPR
:
4104 if (!POINTER_TYPE_P (rhs1_type
)
4105 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
4106 || !ptrofftype_p (rhs2_type
))
4108 error ("type mismatch in pointer plus expression");
4109 debug_generic_stmt (lhs_type
);
4110 debug_generic_stmt (rhs1_type
);
4111 debug_generic_stmt (rhs2_type
);
4118 case POINTER_DIFF_EXPR
:
4120 if (!POINTER_TYPE_P (rhs1_type
)
4121 || !POINTER_TYPE_P (rhs2_type
)
4122 /* Because we special-case pointers to void we allow difference
4123 of arbitrary pointers with the same mode. */
4124 || TYPE_MODE (rhs1_type
) != TYPE_MODE (rhs2_type
)
4125 || TREE_CODE (lhs_type
) != INTEGER_TYPE
4126 || TYPE_UNSIGNED (lhs_type
)
4127 || TYPE_PRECISION (lhs_type
) != TYPE_PRECISION (rhs1_type
))
4129 error ("type mismatch in pointer diff expression");
4130 debug_generic_stmt (lhs_type
);
4131 debug_generic_stmt (rhs1_type
);
4132 debug_generic_stmt (rhs2_type
);
4139 case TRUTH_ANDIF_EXPR
:
4140 case TRUTH_ORIF_EXPR
:
4141 case TRUTH_AND_EXPR
:
4143 case TRUTH_XOR_EXPR
:
4153 case UNORDERED_EXPR
:
4161 /* Comparisons are also binary, but the result type is not
4162 connected to the operand types. */
4163 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
, rhs_code
);
4165 case WIDEN_MULT_EXPR
:
4166 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
4168 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
4169 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
4171 case WIDEN_SUM_EXPR
:
4173 if (((TREE_CODE (rhs1_type
) != VECTOR_TYPE
4174 || TREE_CODE (lhs_type
) != VECTOR_TYPE
)
4175 && ((!INTEGRAL_TYPE_P (rhs1_type
)
4176 && !SCALAR_FLOAT_TYPE_P (rhs1_type
))
4177 || (!INTEGRAL_TYPE_P (lhs_type
)
4178 && !SCALAR_FLOAT_TYPE_P (lhs_type
))))
4179 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
4180 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type
)),
4181 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4183 error ("type mismatch in widening sum reduction");
4184 debug_generic_expr (lhs_type
);
4185 debug_generic_expr (rhs1_type
);
4186 debug_generic_expr (rhs2_type
);
4192 case VEC_WIDEN_MULT_HI_EXPR
:
4193 case VEC_WIDEN_MULT_LO_EXPR
:
4194 case VEC_WIDEN_MULT_EVEN_EXPR
:
4195 case VEC_WIDEN_MULT_ODD_EXPR
:
4197 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4198 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4199 || !types_compatible_p (rhs1_type
, rhs2_type
)
4200 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type
)),
4201 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4203 error ("type mismatch in vector widening multiplication");
4204 debug_generic_expr (lhs_type
);
4205 debug_generic_expr (rhs1_type
);
4206 debug_generic_expr (rhs2_type
);
4212 case VEC_PACK_TRUNC_EXPR
:
4213 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4214 vector boolean types. */
4215 if (VECTOR_BOOLEAN_TYPE_P (lhs_type
)
4216 && VECTOR_BOOLEAN_TYPE_P (rhs1_type
)
4217 && types_compatible_p (rhs1_type
, rhs2_type
)
4218 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type
),
4219 2 * TYPE_VECTOR_SUBPARTS (rhs1_type
)))
4223 case VEC_PACK_SAT_EXPR
:
4224 case VEC_PACK_FIX_TRUNC_EXPR
:
4226 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4227 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4228 || !((rhs_code
== VEC_PACK_FIX_TRUNC_EXPR
4229 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
))
4230 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
)))
4231 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
4232 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))))
4233 || !types_compatible_p (rhs1_type
, rhs2_type
)
4234 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type
)),
4235 2 * GET_MODE_SIZE (element_mode (lhs_type
))))
4237 error ("type mismatch in vector pack expression");
4238 debug_generic_expr (lhs_type
);
4239 debug_generic_expr (rhs1_type
);
4240 debug_generic_expr (rhs2_type
);
4248 case MULT_HIGHPART_EXPR
:
4249 case TRUNC_DIV_EXPR
:
4251 case FLOOR_DIV_EXPR
:
4252 case ROUND_DIV_EXPR
:
4253 case TRUNC_MOD_EXPR
:
4255 case FLOOR_MOD_EXPR
:
4256 case ROUND_MOD_EXPR
:
4258 case EXACT_DIV_EXPR
:
4264 /* Continue with generic binary expression handling. */
4267 case VEC_SERIES_EXPR
:
4268 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
))
4270 error ("type mismatch in series expression");
4271 debug_generic_expr (rhs1_type
);
4272 debug_generic_expr (rhs2_type
);
4275 if (TREE_CODE (lhs_type
) != VECTOR_TYPE
4276 || !useless_type_conversion_p (TREE_TYPE (lhs_type
), rhs1_type
))
4278 error ("vector type expected in series expression");
4279 debug_generic_expr (lhs_type
);
4288 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4289 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4291 error ("type mismatch in binary expression");
4292 debug_generic_stmt (lhs_type
);
4293 debug_generic_stmt (rhs1_type
);
4294 debug_generic_stmt (rhs2_type
);
4301 /* Verify a gimple assignment statement STMT with a ternary rhs.
4302 Returns true if anything is wrong. */
4305 verify_gimple_assign_ternary (gassign
*stmt
)
4307 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4308 tree lhs
= gimple_assign_lhs (stmt
);
4309 tree lhs_type
= TREE_TYPE (lhs
);
4310 tree rhs1
= gimple_assign_rhs1 (stmt
);
4311 tree rhs1_type
= TREE_TYPE (rhs1
);
4312 tree rhs2
= gimple_assign_rhs2 (stmt
);
4313 tree rhs2_type
= TREE_TYPE (rhs2
);
4314 tree rhs3
= gimple_assign_rhs3 (stmt
);
4315 tree rhs3_type
= TREE_TYPE (rhs3
);
4317 if (!is_gimple_reg (lhs
))
4319 error ("non-register as LHS of ternary operation");
4323 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
4324 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
4325 || !is_gimple_val (rhs2
)
4326 || !is_gimple_val (rhs3
))
4328 error ("invalid operands in ternary operation");
4332 /* First handle operations that involve different types. */
4335 case WIDEN_MULT_PLUS_EXPR
:
4336 case WIDEN_MULT_MINUS_EXPR
:
4337 if ((!INTEGRAL_TYPE_P (rhs1_type
)
4338 && !FIXED_POINT_TYPE_P (rhs1_type
))
4339 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
4340 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4341 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
4342 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
4344 error ("type mismatch in widening multiply-accumulate expression");
4345 debug_generic_expr (lhs_type
);
4346 debug_generic_expr (rhs1_type
);
4347 debug_generic_expr (rhs2_type
);
4348 debug_generic_expr (rhs3_type
);
4354 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4355 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
4356 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4358 error ("type mismatch in fused multiply-add expression");
4359 debug_generic_expr (lhs_type
);
4360 debug_generic_expr (rhs1_type
);
4361 debug_generic_expr (rhs2_type
);
4362 debug_generic_expr (rhs3_type
);
4368 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type
)
4369 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type
),
4370 TYPE_VECTOR_SUBPARTS (lhs_type
)))
4372 error ("the first argument of a VEC_COND_EXPR must be of a "
4373 "boolean vector type of the same number of elements "
4375 debug_generic_expr (lhs_type
);
4376 debug_generic_expr (rhs1_type
);
4381 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
4382 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4384 error ("type mismatch in conditional expression");
4385 debug_generic_expr (lhs_type
);
4386 debug_generic_expr (rhs2_type
);
4387 debug_generic_expr (rhs3_type
);
4393 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4394 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4396 error ("type mismatch in vector permute expression");
4397 debug_generic_expr (lhs_type
);
4398 debug_generic_expr (rhs1_type
);
4399 debug_generic_expr (rhs2_type
);
4400 debug_generic_expr (rhs3_type
);
4404 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4405 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4406 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4408 error ("vector types expected in vector permute expression");
4409 debug_generic_expr (lhs_type
);
4410 debug_generic_expr (rhs1_type
);
4411 debug_generic_expr (rhs2_type
);
4412 debug_generic_expr (rhs3_type
);
4416 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type
),
4417 TYPE_VECTOR_SUBPARTS (rhs2_type
))
4418 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type
),
4419 TYPE_VECTOR_SUBPARTS (rhs3_type
))
4420 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type
),
4421 TYPE_VECTOR_SUBPARTS (lhs_type
)))
4423 error ("vectors with different element number found "
4424 "in vector permute expression");
4425 debug_generic_expr (lhs_type
);
4426 debug_generic_expr (rhs1_type
);
4427 debug_generic_expr (rhs2_type
);
4428 debug_generic_expr (rhs3_type
);
4432 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
4433 || (TREE_CODE (rhs3
) != VECTOR_CST
4434 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4435 (TREE_TYPE (rhs3_type
)))
4436 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4437 (TREE_TYPE (rhs1_type
))))))
4439 error ("invalid mask type in vector permute expression");
4440 debug_generic_expr (lhs_type
);
4441 debug_generic_expr (rhs1_type
);
4442 debug_generic_expr (rhs2_type
);
4443 debug_generic_expr (rhs3_type
);
4450 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
)
4451 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4452 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
)))
4453 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type
))))
4455 error ("type mismatch in sad expression");
4456 debug_generic_expr (lhs_type
);
4457 debug_generic_expr (rhs1_type
);
4458 debug_generic_expr (rhs2_type
);
4459 debug_generic_expr (rhs3_type
);
4463 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4464 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4465 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4467 error ("vector types expected in sad expression");
4468 debug_generic_expr (lhs_type
);
4469 debug_generic_expr (rhs1_type
);
4470 debug_generic_expr (rhs2_type
);
4471 debug_generic_expr (rhs3_type
);
4477 case BIT_INSERT_EXPR
:
4478 if (! useless_type_conversion_p (lhs_type
, rhs1_type
))
4480 error ("type mismatch in BIT_INSERT_EXPR");
4481 debug_generic_expr (lhs_type
);
4482 debug_generic_expr (rhs1_type
);
4485 if (! ((INTEGRAL_TYPE_P (rhs1_type
)
4486 && INTEGRAL_TYPE_P (rhs2_type
))
4487 || (VECTOR_TYPE_P (rhs1_type
)
4488 && types_compatible_p (TREE_TYPE (rhs1_type
), rhs2_type
))))
4490 error ("not allowed type combination in BIT_INSERT_EXPR");
4491 debug_generic_expr (rhs1_type
);
4492 debug_generic_expr (rhs2_type
);
4495 if (! tree_fits_uhwi_p (rhs3
)
4496 || ! types_compatible_p (bitsizetype
, TREE_TYPE (rhs3
))
4497 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type
)))
4499 error ("invalid position or size in BIT_INSERT_EXPR");
4502 if (INTEGRAL_TYPE_P (rhs1_type
))
4504 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4505 if (bitpos
>= TYPE_PRECISION (rhs1_type
)
4506 || (bitpos
+ TYPE_PRECISION (rhs2_type
)
4507 > TYPE_PRECISION (rhs1_type
)))
4509 error ("insertion out of range in BIT_INSERT_EXPR");
4513 else if (VECTOR_TYPE_P (rhs1_type
))
4515 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4516 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (TYPE_SIZE (rhs2_type
));
4517 if (bitpos
% bitsize
!= 0)
4519 error ("vector insertion not at element boundary");
4527 if (((TREE_CODE (rhs1_type
) != VECTOR_TYPE
4528 || TREE_CODE (lhs_type
) != VECTOR_TYPE
)
4529 && ((!INTEGRAL_TYPE_P (rhs1_type
)
4530 && !SCALAR_FLOAT_TYPE_P (rhs1_type
))
4531 || (!INTEGRAL_TYPE_P (lhs_type
)
4532 && !SCALAR_FLOAT_TYPE_P (lhs_type
))))
4533 || !types_compatible_p (rhs1_type
, rhs2_type
)
4534 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4535 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type
)),
4536 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4538 error ("type mismatch in dot product reduction");
4539 debug_generic_expr (lhs_type
);
4540 debug_generic_expr (rhs1_type
);
4541 debug_generic_expr (rhs2_type
);
4547 case REALIGN_LOAD_EXPR
:
4557 /* Verify a gimple assignment statement STMT with a single rhs.
4558 Returns true if anything is wrong. */
4561 verify_gimple_assign_single (gassign
*stmt
)
4563 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4564 tree lhs
= gimple_assign_lhs (stmt
);
4565 tree lhs_type
= TREE_TYPE (lhs
);
4566 tree rhs1
= gimple_assign_rhs1 (stmt
);
4567 tree rhs1_type
= TREE_TYPE (rhs1
);
4570 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
4572 error ("non-trivial conversion at assignment");
4573 debug_generic_expr (lhs_type
);
4574 debug_generic_expr (rhs1_type
);
4578 if (gimple_clobber_p (stmt
)
4579 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
4581 error ("non-decl/MEM_REF LHS in clobber statement");
4582 debug_generic_expr (lhs
);
4586 if (handled_component_p (lhs
)
4587 || TREE_CODE (lhs
) == MEM_REF
4588 || TREE_CODE (lhs
) == TARGET_MEM_REF
)
4589 res
|= verify_types_in_gimple_reference (lhs
, true);
4591 /* Special codes we cannot handle via their class. */
4596 tree op
= TREE_OPERAND (rhs1
, 0);
4597 if (!is_gimple_addressable (op
))
4599 error ("invalid operand in unary expression");
4603 /* Technically there is no longer a need for matching types, but
4604 gimple hygiene asks for this check. In LTO we can end up
4605 combining incompatible units and thus end up with addresses
4606 of globals that change their type to a common one. */
4608 && !types_compatible_p (TREE_TYPE (op
),
4609 TREE_TYPE (TREE_TYPE (rhs1
)))
4610 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
4613 error ("type mismatch in address expression");
4614 debug_generic_stmt (TREE_TYPE (rhs1
));
4615 debug_generic_stmt (TREE_TYPE (op
));
4619 return verify_types_in_gimple_reference (op
, true);
4624 error ("INDIRECT_REF in gimple IL");
4630 case ARRAY_RANGE_REF
:
4631 case VIEW_CONVERT_EXPR
:
4634 case TARGET_MEM_REF
:
4636 if (!is_gimple_reg (lhs
)
4637 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4639 error ("invalid rhs for gimple memory store");
4640 debug_generic_stmt (lhs
);
4641 debug_generic_stmt (rhs1
);
4644 return res
|| verify_types_in_gimple_reference (rhs1
, false);
4656 /* tcc_declaration */
4661 if (!is_gimple_reg (lhs
)
4662 && !is_gimple_reg (rhs1
)
4663 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4665 error ("invalid rhs for gimple memory store");
4666 debug_generic_stmt (lhs
);
4667 debug_generic_stmt (rhs1
);
4673 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
4676 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
4678 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
4680 /* For vector CONSTRUCTORs we require that either it is empty
4681 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4682 (then the element count must be correct to cover the whole
4683 outer vector and index must be NULL on all elements, or it is
4684 a CONSTRUCTOR of scalar elements, where we as an exception allow
4685 smaller number of elements (assuming zero filling) and
4686 consecutive indexes as compared to NULL indexes (such
4687 CONSTRUCTORs can appear in the IL from FEs). */
4688 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4690 if (elt_t
== NULL_TREE
)
4692 elt_t
= TREE_TYPE (elt_v
);
4693 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4695 tree elt_t
= TREE_TYPE (elt_v
);
4696 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4699 error ("incorrect type of vector CONSTRUCTOR"
4701 debug_generic_stmt (rhs1
);
4704 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1
)
4705 * TYPE_VECTOR_SUBPARTS (elt_t
),
4706 TYPE_VECTOR_SUBPARTS (rhs1_type
)))
4708 error ("incorrect number of vector CONSTRUCTOR"
4710 debug_generic_stmt (rhs1
);
4714 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4717 error ("incorrect type of vector CONSTRUCTOR elements");
4718 debug_generic_stmt (rhs1
);
4721 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1
),
4722 TYPE_VECTOR_SUBPARTS (rhs1_type
)))
4724 error ("incorrect number of vector CONSTRUCTOR elements");
4725 debug_generic_stmt (rhs1
);
4729 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4731 error ("incorrect type of vector CONSTRUCTOR elements");
4732 debug_generic_stmt (rhs1
);
4735 if (elt_i
!= NULL_TREE
4736 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4737 || TREE_CODE (elt_i
) != INTEGER_CST
4738 || compare_tree_int (elt_i
, i
) != 0))
4740 error ("vector CONSTRUCTOR with non-NULL element index");
4741 debug_generic_stmt (rhs1
);
4744 if (!is_gimple_val (elt_v
))
4746 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4747 debug_generic_stmt (rhs1
);
4752 else if (CONSTRUCTOR_NELTS (rhs1
) != 0)
4754 error ("non-vector CONSTRUCTOR with elements");
4755 debug_generic_stmt (rhs1
);
4761 case WITH_SIZE_EXPR
:
4771 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4772 is a problem, otherwise false. */
4775 verify_gimple_assign (gassign
*stmt
)
4777 switch (gimple_assign_rhs_class (stmt
))
4779 case GIMPLE_SINGLE_RHS
:
4780 return verify_gimple_assign_single (stmt
);
4782 case GIMPLE_UNARY_RHS
:
4783 return verify_gimple_assign_unary (stmt
);
4785 case GIMPLE_BINARY_RHS
:
4786 return verify_gimple_assign_binary (stmt
);
4788 case GIMPLE_TERNARY_RHS
:
4789 return verify_gimple_assign_ternary (stmt
);
4796 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4797 is a problem, otherwise false. */
4800 verify_gimple_return (greturn
*stmt
)
4802 tree op
= gimple_return_retval (stmt
);
4803 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4805 /* We cannot test for present return values as we do not fix up missing
4806 return values from the original source. */
4810 if (!is_gimple_val (op
)
4811 && TREE_CODE (op
) != RESULT_DECL
)
4813 error ("invalid operand in return statement");
4814 debug_generic_stmt (op
);
4818 if ((TREE_CODE (op
) == RESULT_DECL
4819 && DECL_BY_REFERENCE (op
))
4820 || (TREE_CODE (op
) == SSA_NAME
4821 && SSA_NAME_VAR (op
)
4822 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4823 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4824 op
= TREE_TYPE (op
);
4826 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4828 error ("invalid conversion in return statement");
4829 debug_generic_stmt (restype
);
4830 debug_generic_stmt (TREE_TYPE (op
));
4838 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4839 is a problem, otherwise false. */
4842 verify_gimple_goto (ggoto
*stmt
)
4844 tree dest
= gimple_goto_dest (stmt
);
4846 /* ??? We have two canonical forms of direct goto destinations, a
4847 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4848 if (TREE_CODE (dest
) != LABEL_DECL
4849 && (!is_gimple_val (dest
)
4850 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4852 error ("goto destination is neither a label nor a pointer");
4859 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4860 is a problem, otherwise false. */
4863 verify_gimple_switch (gswitch
*stmt
)
4866 tree elt
, prev_upper_bound
= NULL_TREE
;
4867 tree index_type
, elt_type
= NULL_TREE
;
4869 if (!is_gimple_val (gimple_switch_index (stmt
)))
4871 error ("invalid operand to switch statement");
4872 debug_generic_stmt (gimple_switch_index (stmt
));
4876 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4877 if (! INTEGRAL_TYPE_P (index_type
))
4879 error ("non-integral type switch statement");
4880 debug_generic_expr (index_type
);
4884 elt
= gimple_switch_label (stmt
, 0);
4885 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4887 error ("invalid default case label in switch statement");
4888 debug_generic_expr (elt
);
4892 n
= gimple_switch_num_labels (stmt
);
4893 for (i
= 1; i
< n
; i
++)
4895 elt
= gimple_switch_label (stmt
, i
);
4897 if (! CASE_LOW (elt
))
4899 error ("invalid case label in switch statement");
4900 debug_generic_expr (elt
);
4904 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4906 error ("invalid case range in switch statement");
4907 debug_generic_expr (elt
);
4913 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4914 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4916 error ("type mismatch for case label in switch statement");
4917 debug_generic_expr (elt
);
4923 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4924 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4926 error ("type precision mismatch in switch statement");
4931 if (prev_upper_bound
)
4933 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4935 error ("case labels not sorted in switch statement");
4940 prev_upper_bound
= CASE_HIGH (elt
);
4941 if (! prev_upper_bound
)
4942 prev_upper_bound
= CASE_LOW (elt
);
4948 /* Verify a gimple debug statement STMT.
4949 Returns true if anything is wrong. */
4952 verify_gimple_debug (gimple
*stmt ATTRIBUTE_UNUSED
)
4954 /* There isn't much that could be wrong in a gimple debug stmt. A
4955 gimple debug bind stmt, for example, maps a tree, that's usually
4956 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4957 component or member of an aggregate type, to another tree, that
4958 can be an arbitrary expression. These stmts expand into debug
4959 insns, and are converted to debug notes by var-tracking.c. */
4963 /* Verify a gimple label statement STMT.
4964 Returns true if anything is wrong. */
4967 verify_gimple_label (glabel
*stmt
)
4969 tree decl
= gimple_label_label (stmt
);
4973 if (TREE_CODE (decl
) != LABEL_DECL
)
4975 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4976 && DECL_CONTEXT (decl
) != current_function_decl
)
4978 error ("label's context is not the current function decl");
4982 uid
= LABEL_DECL_UID (decl
);
4985 || (*label_to_block_map_for_fn (cfun
))[uid
] != gimple_bb (stmt
)))
4987 error ("incorrect entry in label_to_block_map");
4991 uid
= EH_LANDING_PAD_NR (decl
);
4994 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4995 if (decl
!= lp
->post_landing_pad
)
4997 error ("incorrect setting of landing pad number");
5005 /* Verify a gimple cond statement STMT.
5006 Returns true if anything is wrong. */
5009 verify_gimple_cond (gcond
*stmt
)
5011 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
5013 error ("invalid comparison code in gimple cond");
5016 if (!(!gimple_cond_true_label (stmt
)
5017 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
5018 || !(!gimple_cond_false_label (stmt
)
5019 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
5021 error ("invalid labels in gimple cond");
5025 return verify_gimple_comparison (boolean_type_node
,
5026 gimple_cond_lhs (stmt
),
5027 gimple_cond_rhs (stmt
),
5028 gimple_cond_code (stmt
));
5031 /* Verify the GIMPLE statement STMT. Returns true if there is an
5032 error, otherwise false. */
5035 verify_gimple_stmt (gimple
*stmt
)
5037 switch (gimple_code (stmt
))
5040 return verify_gimple_assign (as_a
<gassign
*> (stmt
));
5043 return verify_gimple_label (as_a
<glabel
*> (stmt
));
5046 return verify_gimple_call (as_a
<gcall
*> (stmt
));
5049 return verify_gimple_cond (as_a
<gcond
*> (stmt
));
5052 return verify_gimple_goto (as_a
<ggoto
*> (stmt
));
5055 return verify_gimple_switch (as_a
<gswitch
*> (stmt
));
5058 return verify_gimple_return (as_a
<greturn
*> (stmt
));
5063 case GIMPLE_TRANSACTION
:
5064 return verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
5066 /* Tuples that do not have tree operands. */
5068 case GIMPLE_PREDICT
:
5070 case GIMPLE_EH_DISPATCH
:
5071 case GIMPLE_EH_MUST_NOT_THROW
:
5075 /* OpenMP directives are validated by the FE and never operated
5076 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
5077 non-gimple expressions when the main index variable has had
5078 its address taken. This does not affect the loop itself
5079 because the header of an GIMPLE_OMP_FOR is merely used to determine
5080 how to setup the parallel iteration. */
5084 return verify_gimple_debug (stmt
);
5091 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
5092 and false otherwise. */
5095 verify_gimple_phi (gimple
*phi
)
5099 tree phi_result
= gimple_phi_result (phi
);
5104 error ("invalid PHI result");
5108 virtual_p
= virtual_operand_p (phi_result
);
5109 if (TREE_CODE (phi_result
) != SSA_NAME
5111 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
5113 error ("invalid PHI result");
5117 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5119 tree t
= gimple_phi_arg_def (phi
, i
);
5123 error ("missing PHI def");
5127 /* Addressable variables do have SSA_NAMEs but they
5128 are not considered gimple values. */
5129 else if ((TREE_CODE (t
) == SSA_NAME
5130 && virtual_p
!= virtual_operand_p (t
))
5132 && (TREE_CODE (t
) != SSA_NAME
5133 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
5135 && !is_gimple_val (t
)))
5137 error ("invalid PHI argument");
5138 debug_generic_expr (t
);
5141 #ifdef ENABLE_TYPES_CHECKING
5142 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
5144 error ("incompatible types in PHI argument %u", i
);
5145 debug_generic_stmt (TREE_TYPE (phi_result
));
5146 debug_generic_stmt (TREE_TYPE (t
));
5155 /* Verify the GIMPLE statements inside the sequence STMTS. */
5158 verify_gimple_in_seq_2 (gimple_seq stmts
)
5160 gimple_stmt_iterator ittr
;
5163 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
5165 gimple
*stmt
= gsi_stmt (ittr
);
5167 switch (gimple_code (stmt
))
5170 err
|= verify_gimple_in_seq_2 (
5171 gimple_bind_body (as_a
<gbind
*> (stmt
)));
5175 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
5176 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
5179 case GIMPLE_EH_FILTER
:
5180 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
5183 case GIMPLE_EH_ELSE
:
5185 geh_else
*eh_else
= as_a
<geh_else
*> (stmt
);
5186 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else
));
5187 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else
));
5192 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (
5193 as_a
<gcatch
*> (stmt
)));
5196 case GIMPLE_TRANSACTION
:
5197 err
|= verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
5202 bool err2
= verify_gimple_stmt (stmt
);
5204 debug_gimple_stmt (stmt
);
5213 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5214 is a problem, otherwise false. */
5217 verify_gimple_transaction (gtransaction
*stmt
)
5221 lab
= gimple_transaction_label_norm (stmt
);
5222 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5224 lab
= gimple_transaction_label_uninst (stmt
);
5225 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5227 lab
= gimple_transaction_label_over (stmt
);
5228 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5231 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
5235 /* Verify the GIMPLE statements inside the statement list STMTS. */
5238 verify_gimple_in_seq (gimple_seq stmts
)
5240 timevar_push (TV_TREE_STMT_VERIFY
);
5241 if (verify_gimple_in_seq_2 (stmts
))
5242 internal_error ("verify_gimple failed");
5243 timevar_pop (TV_TREE_STMT_VERIFY
);
5246 /* Return true when the T can be shared. */
5249 tree_node_can_be_shared (tree t
)
5251 if (IS_TYPE_OR_DECL_P (t
)
5252 || is_gimple_min_invariant (t
)
5253 || TREE_CODE (t
) == SSA_NAME
5254 || t
== error_mark_node
5255 || TREE_CODE (t
) == IDENTIFIER_NODE
)
5258 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
5267 /* Called via walk_tree. Verify tree sharing. */
5270 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
5272 hash_set
<void *> *visited
= (hash_set
<void *> *) data
;
5274 if (tree_node_can_be_shared (*tp
))
5276 *walk_subtrees
= false;
5280 if (visited
->add (*tp
))
5286 /* Called via walk_gimple_stmt. Verify tree sharing. */
5289 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
5291 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5292 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
5295 static bool eh_error_found
;
5297 verify_eh_throw_stmt_node (gimple
*const &stmt
, const int &,
5298 hash_set
<gimple
*> *visited
)
5300 if (!visited
->contains (stmt
))
5302 error ("dead STMT in EH table");
5303 debug_gimple_stmt (stmt
);
5304 eh_error_found
= true;
5309 /* Verify if the location LOCs block is in BLOCKS. */
5312 verify_location (hash_set
<tree
> *blocks
, location_t loc
)
5314 tree block
= LOCATION_BLOCK (loc
);
5315 if (block
!= NULL_TREE
5316 && !blocks
->contains (block
))
5318 error ("location references block not in block tree");
5321 if (block
!= NULL_TREE
)
5322 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
5326 /* Called via walk_tree. Verify that expressions have no blocks. */
5329 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
5333 *walk_subtrees
= false;
5337 location_t loc
= EXPR_LOCATION (*tp
);
5338 if (LOCATION_BLOCK (loc
) != NULL
)
5344 /* Called via walk_tree. Verify locations of expressions. */
5347 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
5349 hash_set
<tree
> *blocks
= (hash_set
<tree
> *) data
;
5351 if (VAR_P (*tp
) && DECL_HAS_DEBUG_EXPR_P (*tp
))
5353 tree t
= DECL_DEBUG_EXPR (*tp
);
5354 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
5359 || TREE_CODE (*tp
) == PARM_DECL
5360 || TREE_CODE (*tp
) == RESULT_DECL
)
5361 && DECL_HAS_VALUE_EXPR_P (*tp
))
5363 tree t
= DECL_VALUE_EXPR (*tp
);
5364 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
5371 *walk_subtrees
= false;
5375 location_t loc
= EXPR_LOCATION (*tp
);
5376 if (verify_location (blocks
, loc
))
5382 /* Called via walk_gimple_op. Verify locations of expressions. */
5385 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
5387 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5388 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
5391 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5394 collect_subblocks (hash_set
<tree
> *blocks
, tree block
)
5397 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
5400 collect_subblocks (blocks
, t
);
5404 /* Verify the GIMPLE statements in the CFG of FN. */
5407 verify_gimple_in_cfg (struct function
*fn
, bool verify_nothrow
)
5412 timevar_push (TV_TREE_STMT_VERIFY
);
5413 hash_set
<void *> visited
;
5414 hash_set
<gimple
*> visited_stmts
;
5416 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5417 hash_set
<tree
> blocks
;
5418 if (DECL_INITIAL (fn
->decl
))
5420 blocks
.add (DECL_INITIAL (fn
->decl
));
5421 collect_subblocks (&blocks
, DECL_INITIAL (fn
->decl
));
5424 FOR_EACH_BB_FN (bb
, fn
)
5426 gimple_stmt_iterator gsi
;
5428 for (gphi_iterator gpi
= gsi_start_phis (bb
);
5432 gphi
*phi
= gpi
.phi ();
5436 visited_stmts
.add (phi
);
5438 if (gimple_bb (phi
) != bb
)
5440 error ("gimple_bb (phi) is set to a wrong basic block");
5444 err2
|= verify_gimple_phi (phi
);
5446 /* Only PHI arguments have locations. */
5447 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
5449 error ("PHI node with location");
5453 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5455 tree arg
= gimple_phi_arg_def (phi
, i
);
5456 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
5460 error ("incorrect sharing of tree nodes");
5461 debug_generic_expr (addr
);
5464 location_t loc
= gimple_phi_arg_location (phi
, i
);
5465 if (virtual_operand_p (gimple_phi_result (phi
))
5466 && loc
!= UNKNOWN_LOCATION
)
5468 error ("virtual PHI with argument locations");
5471 addr
= walk_tree (&arg
, verify_expr_location_1
, &blocks
, NULL
);
5474 debug_generic_expr (addr
);
5477 err2
|= verify_location (&blocks
, loc
);
5481 debug_gimple_stmt (phi
);
5485 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5487 gimple
*stmt
= gsi_stmt (gsi
);
5489 struct walk_stmt_info wi
;
5493 visited_stmts
.add (stmt
);
5495 if (gimple_bb (stmt
) != bb
)
5497 error ("gimple_bb (stmt) is set to a wrong basic block");
5501 err2
|= verify_gimple_stmt (stmt
);
5502 err2
|= verify_location (&blocks
, gimple_location (stmt
));
5504 memset (&wi
, 0, sizeof (wi
));
5505 wi
.info
= (void *) &visited
;
5506 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
5509 error ("incorrect sharing of tree nodes");
5510 debug_generic_expr (addr
);
5514 memset (&wi
, 0, sizeof (wi
));
5515 wi
.info
= (void *) &blocks
;
5516 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
5519 debug_generic_expr (addr
);
5523 /* ??? Instead of not checking these stmts at all the walker
5524 should know its context via wi. */
5525 if (!is_gimple_debug (stmt
)
5526 && !is_gimple_omp (stmt
))
5528 memset (&wi
, 0, sizeof (wi
));
5529 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
5532 debug_generic_expr (addr
);
5533 inform (gimple_location (stmt
), "in statement");
5538 /* If the statement is marked as part of an EH region, then it is
5539 expected that the statement could throw. Verify that when we
5540 have optimizations that simplify statements such that we prove
5541 that they cannot throw, that we update other data structures
5543 lp_nr
= lookup_stmt_eh_lp (stmt
);
5546 if (!stmt_could_throw_p (stmt
))
5550 error ("statement marked for throw, but doesn%'t");
5554 else if (!gsi_one_before_end_p (gsi
))
5556 error ("statement marked for throw in middle of block");
5562 debug_gimple_stmt (stmt
);
5567 eh_error_found
= false;
5568 hash_map
<gimple
*, int> *eh_table
= get_eh_throw_stmt_table (cfun
);
5570 eh_table
->traverse
<hash_set
<gimple
*> *, verify_eh_throw_stmt_node
>
5573 if (err
|| eh_error_found
)
5574 internal_error ("verify_gimple failed");
5576 verify_histograms ();
5577 timevar_pop (TV_TREE_STMT_VERIFY
);
5581 /* Verifies that the flow information is OK. */
5584 gimple_verify_flow_info (void)
5588 gimple_stmt_iterator gsi
;
5593 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5594 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5596 error ("ENTRY_BLOCK has IL associated with it");
5600 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5601 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5603 error ("EXIT_BLOCK has IL associated with it");
5607 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5608 if (e
->flags
& EDGE_FALLTHRU
)
5610 error ("fallthru to exit from bb %d", e
->src
->index
);
5614 FOR_EACH_BB_FN (bb
, cfun
)
5616 bool found_ctrl_stmt
= false;
5620 /* Skip labels on the start of basic block. */
5621 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5624 gimple
*prev_stmt
= stmt
;
5626 stmt
= gsi_stmt (gsi
);
5628 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5631 label
= gimple_label_label (as_a
<glabel
*> (stmt
));
5632 if (prev_stmt
&& DECL_NONLOCAL (label
))
5634 error ("nonlocal label ");
5635 print_generic_expr (stderr
, label
);
5636 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5641 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
5643 error ("EH landing pad label ");
5644 print_generic_expr (stderr
, label
);
5645 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5650 if (label_to_block (label
) != bb
)
5653 print_generic_expr (stderr
, label
);
5654 fprintf (stderr
, " to block does not match in bb %d",
5659 if (decl_function_context (label
) != current_function_decl
)
5662 print_generic_expr (stderr
, label
);
5663 fprintf (stderr
, " has incorrect context in bb %d",
5669 /* Verify that body of basic block BB is free of control flow. */
5670 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5672 gimple
*stmt
= gsi_stmt (gsi
);
5674 if (found_ctrl_stmt
)
5676 error ("control flow in the middle of basic block %d",
5681 if (stmt_ends_bb_p (stmt
))
5682 found_ctrl_stmt
= true;
5684 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
5687 print_generic_expr (stderr
, gimple_label_label (label_stmt
));
5688 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
5693 gsi
= gsi_last_nondebug_bb (bb
);
5694 if (gsi_end_p (gsi
))
5697 stmt
= gsi_stmt (gsi
);
5699 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5702 err
|= verify_eh_edges (stmt
);
5704 if (is_ctrl_stmt (stmt
))
5706 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5707 if (e
->flags
& EDGE_FALLTHRU
)
5709 error ("fallthru edge after a control statement in bb %d",
5715 if (gimple_code (stmt
) != GIMPLE_COND
)
5717 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5718 after anything else but if statement. */
5719 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5720 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5722 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5728 switch (gimple_code (stmt
))
5735 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5739 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5740 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5741 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5742 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5743 || EDGE_COUNT (bb
->succs
) >= 3)
5745 error ("wrong outgoing edge flags at end of bb %d",
5753 if (simple_goto_p (stmt
))
5755 error ("explicit goto at end of bb %d", bb
->index
);
5760 /* FIXME. We should double check that the labels in the
5761 destination blocks have their address taken. */
5762 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5763 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5764 | EDGE_FALSE_VALUE
))
5765 || !(e
->flags
& EDGE_ABNORMAL
))
5767 error ("wrong outgoing edge flags at end of bb %d",
5775 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5779 if (!single_succ_p (bb
)
5780 || (single_succ_edge (bb
)->flags
5781 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5782 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5784 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5787 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5789 error ("return edge does not point to exit in bb %d",
5797 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5802 n
= gimple_switch_num_labels (switch_stmt
);
5804 /* Mark all the destination basic blocks. */
5805 for (i
= 0; i
< n
; ++i
)
5807 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5808 basic_block label_bb
= label_to_block (lab
);
5809 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5810 label_bb
->aux
= (void *)1;
5813 /* Verify that the case labels are sorted. */
5814 prev
= gimple_switch_label (switch_stmt
, 0);
5815 for (i
= 1; i
< n
; ++i
)
5817 tree c
= gimple_switch_label (switch_stmt
, i
);
5820 error ("found default case not at the start of "
5826 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5828 error ("case labels not sorted: ");
5829 print_generic_expr (stderr
, prev
);
5830 fprintf (stderr
," is greater than ");
5831 print_generic_expr (stderr
, c
);
5832 fprintf (stderr
," but comes before it.\n");
5837 /* VRP will remove the default case if it can prove it will
5838 never be executed. So do not verify there always exists
5839 a default case here. */
5841 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5845 error ("extra outgoing edge %d->%d",
5846 bb
->index
, e
->dest
->index
);
5850 e
->dest
->aux
= (void *)2;
5851 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5852 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5854 error ("wrong outgoing edge flags at end of bb %d",
5860 /* Check that we have all of them. */
5861 for (i
= 0; i
< n
; ++i
)
5863 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5864 basic_block label_bb
= label_to_block (lab
);
5866 if (label_bb
->aux
!= (void *)2)
5868 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5873 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5874 e
->dest
->aux
= (void *)0;
5878 case GIMPLE_EH_DISPATCH
:
5879 err
|= verify_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
));
5887 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5888 verify_dominators (CDI_DOMINATORS
);
5894 /* Updates phi nodes after creating a forwarder block joined
5895 by edge FALLTHRU. */
5898 gimple_make_forwarder_block (edge fallthru
)
5902 basic_block dummy
, bb
;
5906 dummy
= fallthru
->src
;
5907 bb
= fallthru
->dest
;
5909 if (single_pred_p (bb
))
5912 /* If we redirected a branch we must create new PHI nodes at the
5914 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5916 gphi
*phi
, *new_phi
;
5919 var
= gimple_phi_result (phi
);
5920 new_phi
= create_phi_node (var
, bb
);
5921 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5922 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5926 /* Add the arguments we have stored on edges. */
5927 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5932 flush_pending_stmts (e
);
5937 /* Return a non-special label in the head of basic block BLOCK.
5938 Create one if it doesn't exist. */
5941 gimple_block_label (basic_block bb
)
5943 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5948 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5950 stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
5953 label
= gimple_label_label (stmt
);
5954 if (!DECL_NONLOCAL (label
))
5957 gsi_move_before (&i
, &s
);
5962 label
= create_artificial_label (UNKNOWN_LOCATION
);
5963 stmt
= gimple_build_label (label
);
5964 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5969 /* Attempt to perform edge redirection by replacing a possibly complex
5970 jump instruction by a goto or by removing the jump completely.
5971 This can apply only if all edges now point to the same block. The
5972 parameters and return values are equivalent to
5973 redirect_edge_and_branch. */
5976 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5978 basic_block src
= e
->src
;
5979 gimple_stmt_iterator i
;
5982 /* We can replace or remove a complex jump only when we have exactly
5984 if (EDGE_COUNT (src
->succs
) != 2
5985 /* Verify that all targets will be TARGET. Specifically, the
5986 edge that is not E must also go to TARGET. */
5987 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5990 i
= gsi_last_bb (src
);
5994 stmt
= gsi_stmt (i
);
5996 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5998 gsi_remove (&i
, true);
5999 e
= ssa_redirect_edge (e
, target
);
6000 e
->flags
= EDGE_FALLTHRU
;
6008 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
6009 edge representing the redirected branch. */
6012 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
6014 basic_block bb
= e
->src
;
6015 gimple_stmt_iterator gsi
;
6019 if (e
->flags
& EDGE_ABNORMAL
)
6022 if (e
->dest
== dest
)
6025 if (e
->flags
& EDGE_EH
)
6026 return redirect_eh_edge (e
, dest
);
6028 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
6030 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
6035 gsi
= gsi_last_nondebug_bb (bb
);
6036 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
6038 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
6041 /* For COND_EXPR, we only need to redirect the edge. */
6045 /* No non-abnormal edges should lead from a non-simple goto, and
6046 simple ones should be represented implicitly. */
6051 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
6052 tree label
= gimple_block_label (dest
);
6053 tree cases
= get_cases_for_edge (e
, switch_stmt
);
6055 /* If we have a list of cases associated with E, then use it
6056 as it's a lot faster than walking the entire case vector. */
6059 edge e2
= find_edge (e
->src
, dest
);
6066 CASE_LABEL (cases
) = label
;
6067 cases
= CASE_CHAIN (cases
);
6070 /* If there was already an edge in the CFG, then we need
6071 to move all the cases associated with E to E2. */
6074 tree cases2
= get_cases_for_edge (e2
, switch_stmt
);
6076 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
6077 CASE_CHAIN (cases2
) = first
;
6079 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
6083 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
6085 for (i
= 0; i
< n
; i
++)
6087 tree elt
= gimple_switch_label (switch_stmt
, i
);
6088 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
6089 CASE_LABEL (elt
) = label
;
6097 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
6098 int i
, n
= gimple_asm_nlabels (asm_stmt
);
6101 for (i
= 0; i
< n
; ++i
)
6103 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
6104 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
6107 label
= gimple_block_label (dest
);
6108 TREE_VALUE (cons
) = label
;
6112 /* If we didn't find any label matching the former edge in the
6113 asm labels, we must be redirecting the fallthrough
6115 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
6120 gsi_remove (&gsi
, true);
6121 e
->flags
|= EDGE_FALLTHRU
;
6124 case GIMPLE_OMP_RETURN
:
6125 case GIMPLE_OMP_CONTINUE
:
6126 case GIMPLE_OMP_SECTIONS_SWITCH
:
6127 case GIMPLE_OMP_FOR
:
6128 /* The edges from OMP constructs can be simply redirected. */
6131 case GIMPLE_EH_DISPATCH
:
6132 if (!(e
->flags
& EDGE_FALLTHRU
))
6133 redirect_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
), e
, dest
);
6136 case GIMPLE_TRANSACTION
:
6137 if (e
->flags
& EDGE_TM_ABORT
)
6138 gimple_transaction_set_label_over (as_a
<gtransaction
*> (stmt
),
6139 gimple_block_label (dest
));
6140 else if (e
->flags
& EDGE_TM_UNINSTRUMENTED
)
6141 gimple_transaction_set_label_uninst (as_a
<gtransaction
*> (stmt
),
6142 gimple_block_label (dest
));
6144 gimple_transaction_set_label_norm (as_a
<gtransaction
*> (stmt
),
6145 gimple_block_label (dest
));
6149 /* Otherwise it must be a fallthru edge, and we don't need to
6150 do anything besides redirecting it. */
6151 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
6155 /* Update/insert PHI nodes as necessary. */
6157 /* Now update the edges in the CFG. */
6158 e
= ssa_redirect_edge (e
, dest
);
6163 /* Returns true if it is possible to remove edge E by redirecting
6164 it to the destination of the other edge from E->src. */
6167 gimple_can_remove_branch_p (const_edge e
)
6169 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
6175 /* Simple wrapper, as we can always redirect fallthru edges. */
6178 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
6180 e
= gimple_redirect_edge_and_branch (e
, dest
);
6187 /* Splits basic block BB after statement STMT (but at least after the
6188 labels). If STMT is NULL, BB is split just after the labels. */
6191 gimple_split_block (basic_block bb
, void *stmt
)
6193 gimple_stmt_iterator gsi
;
6194 gimple_stmt_iterator gsi_tgt
;
6200 new_bb
= create_empty_bb (bb
);
6202 /* Redirect the outgoing edges. */
6203 new_bb
->succs
= bb
->succs
;
6205 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
6208 /* Get a stmt iterator pointing to the first stmt to move. */
6209 if (!stmt
|| gimple_code ((gimple
*) stmt
) == GIMPLE_LABEL
)
6210 gsi
= gsi_after_labels (bb
);
6213 gsi
= gsi_for_stmt ((gimple
*) stmt
);
6217 /* Move everything from GSI to the new basic block. */
6218 if (gsi_end_p (gsi
))
6221 /* Split the statement list - avoid re-creating new containers as this
6222 brings ugly quadratic memory consumption in the inliner.
6223 (We are still quadratic since we need to update stmt BB pointers,
6225 gsi_split_seq_before (&gsi
, &list
);
6226 set_bb_seq (new_bb
, list
);
6227 for (gsi_tgt
= gsi_start (list
);
6228 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
6229 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
6235 /* Moves basic block BB after block AFTER. */
6238 gimple_move_block_after (basic_block bb
, basic_block after
)
6240 if (bb
->prev_bb
== after
)
6244 link_block (bb
, after
);
6250 /* Return TRUE if block BB has no executable statements, otherwise return
6254 gimple_empty_block_p (basic_block bb
)
6256 /* BB must have no executable statements. */
6257 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
6260 if (gsi_end_p (gsi
))
6262 if (is_gimple_debug (gsi_stmt (gsi
)))
6263 gsi_next_nondebug (&gsi
);
6264 return gsi_end_p (gsi
);
6268 /* Split a basic block if it ends with a conditional branch and if the
6269 other part of the block is not empty. */
6272 gimple_split_block_before_cond_jump (basic_block bb
)
6274 gimple
*last
, *split_point
;
6275 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
6276 if (gsi_end_p (gsi
))
6278 last
= gsi_stmt (gsi
);
6279 if (gimple_code (last
) != GIMPLE_COND
6280 && gimple_code (last
) != GIMPLE_SWITCH
)
6283 split_point
= gsi_stmt (gsi
);
6284 return split_block (bb
, split_point
)->dest
;
6288 /* Return true if basic_block can be duplicated. */
6291 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
6296 /* Create a duplicate of the basic block BB. NOTE: This does not
6297 preserve SSA form. */
6300 gimple_duplicate_bb (basic_block bb
)
6303 gimple_stmt_iterator gsi_tgt
;
6305 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
6307 /* Copy the PHI nodes. We ignore PHI node arguments here because
6308 the incoming edges have not been setup yet. */
6309 for (gphi_iterator gpi
= gsi_start_phis (bb
);
6315 copy
= create_phi_node (NULL_TREE
, new_bb
);
6316 create_new_def_for (gimple_phi_result (phi
), copy
,
6317 gimple_phi_result_ptr (copy
));
6318 gimple_set_uid (copy
, gimple_uid (phi
));
6321 gsi_tgt
= gsi_start_bb (new_bb
);
6322 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
6326 def_operand_p def_p
;
6327 ssa_op_iter op_iter
;
6329 gimple
*stmt
, *copy
;
6331 stmt
= gsi_stmt (gsi
);
6332 if (gimple_code (stmt
) == GIMPLE_LABEL
)
6335 /* Don't duplicate label debug stmts. */
6336 if (gimple_debug_bind_p (stmt
)
6337 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
6341 /* Create a new copy of STMT and duplicate STMT's virtual
6343 copy
= gimple_copy (stmt
);
6344 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
6346 maybe_duplicate_eh_stmt (copy
, stmt
);
6347 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
6349 /* When copying around a stmt writing into a local non-user
6350 aggregate, make sure it won't share stack slot with other
6352 lhs
= gimple_get_lhs (stmt
);
6353 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
6355 tree base
= get_base_address (lhs
);
6357 && (VAR_P (base
) || TREE_CODE (base
) == RESULT_DECL
)
6358 && DECL_IGNORED_P (base
)
6359 && !TREE_STATIC (base
)
6360 && !DECL_EXTERNAL (base
)
6361 && (!VAR_P (base
) || !DECL_HAS_VALUE_EXPR_P (base
)))
6362 DECL_NONSHAREABLE (base
) = 1;
6365 /* Create new names for all the definitions created by COPY and
6366 add replacement mappings for each new name. */
6367 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
6368 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
6374 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6377 add_phi_args_after_copy_edge (edge e_copy
)
6379 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
6382 gphi
*phi
, *phi_copy
;
6384 gphi_iterator psi
, psi_copy
;
6386 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
6389 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
6391 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
6392 dest
= get_bb_original (e_copy
->dest
);
6394 dest
= e_copy
->dest
;
6396 e
= find_edge (bb
, dest
);
6399 /* During loop unrolling the target of the latch edge is copied.
6400 In this case we are not looking for edge to dest, but to
6401 duplicated block whose original was dest. */
6402 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6404 if ((e
->dest
->flags
& BB_DUPLICATED
)
6405 && get_bb_original (e
->dest
) == dest
)
6409 gcc_assert (e
!= NULL
);
6412 for (psi
= gsi_start_phis (e
->dest
),
6413 psi_copy
= gsi_start_phis (e_copy
->dest
);
6415 gsi_next (&psi
), gsi_next (&psi_copy
))
6418 phi_copy
= psi_copy
.phi ();
6419 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
6420 add_phi_arg (phi_copy
, def
, e_copy
,
6421 gimple_phi_arg_location_from_edge (phi
, e
));
6426 /* Basic block BB_COPY was created by code duplication. Add phi node
6427 arguments for edges going out of BB_COPY. The blocks that were
6428 duplicated have BB_DUPLICATED set. */
6431 add_phi_args_after_copy_bb (basic_block bb_copy
)
6436 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
6438 add_phi_args_after_copy_edge (e_copy
);
6442 /* Blocks in REGION_COPY array of length N_REGION were created by
6443 duplication of basic blocks. Add phi node arguments for edges
6444 going from these blocks. If E_COPY is not NULL, also add
6445 phi node arguments for its destination.*/
6448 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
6453 for (i
= 0; i
< n_region
; i
++)
6454 region_copy
[i
]->flags
|= BB_DUPLICATED
;
6456 for (i
= 0; i
< n_region
; i
++)
6457 add_phi_args_after_copy_bb (region_copy
[i
]);
6459 add_phi_args_after_copy_edge (e_copy
);
6461 for (i
= 0; i
< n_region
; i
++)
6462 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
6465 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6466 important exit edge EXIT. By important we mean that no SSA name defined
6467 inside region is live over the other exit edges of the region. All entry
6468 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6469 to the duplicate of the region. Dominance and loop information is
6470 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6471 UPDATE_DOMINANCE is false then we assume that the caller will update the
6472 dominance information after calling this function. The new basic
6473 blocks are stored to REGION_COPY in the same order as they had in REGION,
6474 provided that REGION_COPY is not NULL.
6475 The function returns false if it is unable to copy the region,
6479 gimple_duplicate_sese_region (edge entry
, edge exit
,
6480 basic_block
*region
, unsigned n_region
,
6481 basic_block
*region_copy
,
6482 bool update_dominance
)
6485 bool free_region_copy
= false, copying_header
= false;
6486 struct loop
*loop
= entry
->dest
->loop_father
;
6488 vec
<basic_block
> doms
= vNULL
;
6490 profile_count total_count
= profile_count::uninitialized ();
6491 profile_count entry_count
= profile_count::uninitialized ();
6493 if (!can_copy_bbs_p (region
, n_region
))
6496 /* Some sanity checking. Note that we do not check for all possible
6497 missuses of the functions. I.e. if you ask to copy something weird,
6498 it will work, but the state of structures probably will not be
6500 for (i
= 0; i
< n_region
; i
++)
6502 /* We do not handle subloops, i.e. all the blocks must belong to the
6504 if (region
[i
]->loop_father
!= loop
)
6507 if (region
[i
] != entry
->dest
6508 && region
[i
] == loop
->header
)
6512 /* In case the function is used for loop header copying (which is the primary
6513 use), ensure that EXIT and its copy will be new latch and entry edges. */
6514 if (loop
->header
== entry
->dest
)
6516 copying_header
= true;
6518 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
6521 for (i
= 0; i
< n_region
; i
++)
6522 if (region
[i
] != exit
->src
6523 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
6527 initialize_original_copy_tables ();
6530 set_loop_copy (loop
, loop_outer (loop
));
6532 set_loop_copy (loop
, loop
);
6536 region_copy
= XNEWVEC (basic_block
, n_region
);
6537 free_region_copy
= true;
6540 /* Record blocks outside the region that are dominated by something
6542 if (update_dominance
)
6545 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6548 if (entry
->dest
->count
.initialized_p ())
6550 total_count
= entry
->dest
->count
;
6551 entry_count
= entry
->count ();
6552 /* Fix up corner cases, to avoid division by zero or creation of negative
6554 if (entry_count
> total_count
)
6555 entry_count
= total_count
;
6558 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
6559 split_edge_bb_loc (entry
), update_dominance
);
6560 if (total_count
.initialized_p () && entry_count
.initialized_p ())
6562 scale_bbs_frequencies_profile_count (region
, n_region
,
6563 total_count
- entry_count
,
6565 scale_bbs_frequencies_profile_count (region_copy
, n_region
, entry_count
,
6571 loop
->header
= exit
->dest
;
6572 loop
->latch
= exit
->src
;
6575 /* Redirect the entry and add the phi node arguments. */
6576 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
6577 gcc_assert (redirected
!= NULL
);
6578 flush_pending_stmts (entry
);
6580 /* Concerning updating of dominators: We must recount dominators
6581 for entry block and its copy. Anything that is outside of the
6582 region, but was dominated by something inside needs recounting as
6584 if (update_dominance
)
6586 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
6587 doms
.safe_push (get_bb_original (entry
->dest
));
6588 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6592 /* Add the other PHI node arguments. */
6593 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
6595 if (free_region_copy
)
6598 free_original_copy_tables ();
6602 /* Checks if BB is part of the region defined by N_REGION BBS. */
6604 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
6608 for (n
= 0; n
< n_region
; n
++)
6616 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6617 are stored to REGION_COPY in the same order in that they appear
6618 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6619 the region, EXIT an exit from it. The condition guarding EXIT
6620 is moved to ENTRY. Returns true if duplication succeeds, false
6646 gimple_duplicate_sese_tail (edge entry
, edge exit
,
6647 basic_block
*region
, unsigned n_region
,
6648 basic_block
*region_copy
)
6651 bool free_region_copy
= false;
6652 struct loop
*loop
= exit
->dest
->loop_father
;
6653 struct loop
*orig_loop
= entry
->dest
->loop_father
;
6654 basic_block switch_bb
, entry_bb
, nentry_bb
;
6655 vec
<basic_block
> doms
;
6656 profile_count total_count
= profile_count::uninitialized (),
6657 exit_count
= profile_count::uninitialized ();
6658 edge exits
[2], nexits
[2], e
;
6659 gimple_stmt_iterator gsi
;
6662 basic_block exit_bb
;
6666 struct loop
*target
, *aloop
, *cloop
;
6668 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
6670 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
6672 if (!can_copy_bbs_p (region
, n_region
))
6675 initialize_original_copy_tables ();
6676 set_loop_copy (orig_loop
, loop
);
6679 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
6681 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
6683 cloop
= duplicate_loop (aloop
, target
);
6684 duplicate_subloops (aloop
, cloop
);
6690 region_copy
= XNEWVEC (basic_block
, n_region
);
6691 free_region_copy
= true;
6694 gcc_assert (!need_ssa_update_p (cfun
));
6696 /* Record blocks outside the region that are dominated by something
6698 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6700 total_count
= exit
->src
->count
;
6701 exit_count
= exit
->count ();
6702 /* Fix up corner cases, to avoid division by zero or creation of negative
6704 if (exit_count
> total_count
)
6705 exit_count
= total_count
;
6707 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6708 split_edge_bb_loc (exit
), true);
6709 if (total_count
.initialized_p () && exit_count
.initialized_p ())
6711 scale_bbs_frequencies_profile_count (region
, n_region
,
6712 total_count
- exit_count
,
6714 scale_bbs_frequencies_profile_count (region_copy
, n_region
, exit_count
,
6718 /* Create the switch block, and put the exit condition to it. */
6719 entry_bb
= entry
->dest
;
6720 nentry_bb
= get_bb_copy (entry_bb
);
6721 if (!last_stmt (entry
->src
)
6722 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6723 switch_bb
= entry
->src
;
6725 switch_bb
= split_edge (entry
);
6726 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6728 gsi
= gsi_last_bb (switch_bb
);
6729 cond_stmt
= last_stmt (exit
->src
);
6730 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6731 cond_stmt
= gimple_copy (cond_stmt
);
6733 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6735 sorig
= single_succ_edge (switch_bb
);
6736 sorig
->flags
= exits
[1]->flags
;
6737 sorig
->probability
= exits
[1]->probability
;
6738 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6739 snew
->probability
= exits
[0]->probability
;
6742 /* Register the new edge from SWITCH_BB in loop exit lists. */
6743 rescan_loop_exit (snew
, true, false);
6745 /* Add the PHI node arguments. */
6746 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6748 /* Get rid of now superfluous conditions and associated edges (and phi node
6750 exit_bb
= exit
->dest
;
6752 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6753 PENDING_STMT (e
) = NULL
;
6755 /* The latch of ORIG_LOOP was copied, and so was the backedge
6756 to the original header. We redirect this backedge to EXIT_BB. */
6757 for (i
= 0; i
< n_region
; i
++)
6758 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6760 gcc_assert (single_succ_edge (region_copy
[i
]));
6761 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6762 PENDING_STMT (e
) = NULL
;
6763 for (psi
= gsi_start_phis (exit_bb
);
6768 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6769 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6772 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6773 PENDING_STMT (e
) = NULL
;
6775 /* Anything that is outside of the region, but was dominated by something
6776 inside needs to update dominance info. */
6777 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6779 /* Update the SSA web. */
6780 update_ssa (TODO_update_ssa
);
6782 if (free_region_copy
)
6785 free_original_copy_tables ();
6789 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6790 adding blocks when the dominator traversal reaches EXIT. This
6791 function silently assumes that ENTRY strictly dominates EXIT. */
6794 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6795 vec
<basic_block
> *bbs_p
)
6799 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6801 son
= next_dom_son (CDI_DOMINATORS
, son
))
6803 bbs_p
->safe_push (son
);
6805 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6809 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6810 The duplicates are recorded in VARS_MAP. */
6813 replace_by_duplicate_decl (tree
*tp
, hash_map
<tree
, tree
> *vars_map
,
6816 tree t
= *tp
, new_t
;
6817 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6819 if (DECL_CONTEXT (t
) == to_context
)
6823 tree
&loc
= vars_map
->get_or_insert (t
, &existed
);
6829 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6830 add_local_decl (f
, new_t
);
6834 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6835 new_t
= copy_node (t
);
6837 DECL_CONTEXT (new_t
) = to_context
;
6848 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6849 VARS_MAP maps old ssa names and var_decls to the new ones. */
6852 replace_ssa_name (tree name
, hash_map
<tree
, tree
> *vars_map
,
6857 gcc_assert (!virtual_operand_p (name
));
6859 tree
*loc
= vars_map
->get (name
);
6863 tree decl
= SSA_NAME_VAR (name
);
6866 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name
));
6867 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6868 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6869 decl
, SSA_NAME_DEF_STMT (name
));
6872 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6873 name
, SSA_NAME_DEF_STMT (name
));
6875 /* Now that we've used the def stmt to define new_name, make sure it
6876 doesn't define name anymore. */
6877 SSA_NAME_DEF_STMT (name
) = NULL
;
6879 vars_map
->put (name
, new_name
);
6893 hash_map
<tree
, tree
> *vars_map
;
6894 htab_t new_label_map
;
6895 hash_map
<void *, void *> *eh_map
;
6899 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6900 contained in *TP if it has been ORIG_BLOCK previously and change the
6901 DECL_CONTEXT of every local variable referenced in *TP. */
6904 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6906 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6907 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6912 tree block
= TREE_BLOCK (t
);
6913 if (block
== NULL_TREE
)
6915 else if (block
== p
->orig_block
6916 || p
->orig_block
== NULL_TREE
)
6917 TREE_SET_BLOCK (t
, p
->new_block
);
6918 else if (flag_checking
)
6920 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6921 block
= BLOCK_SUPERCONTEXT (block
);
6922 gcc_assert (block
== p
->orig_block
);
6925 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6927 if (TREE_CODE (t
) == SSA_NAME
)
6928 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6929 else if (TREE_CODE (t
) == PARM_DECL
6930 && gimple_in_ssa_p (cfun
))
6931 *tp
= *(p
->vars_map
->get (t
));
6932 else if (TREE_CODE (t
) == LABEL_DECL
)
6934 if (p
->new_label_map
)
6936 struct tree_map in
, *out
;
6938 out
= (struct tree_map
*)
6939 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6944 /* For FORCED_LABELs we can end up with references from other
6945 functions if some SESE regions are outlined. It is UB to
6946 jump in between them, but they could be used just for printing
6947 addresses etc. In that case, DECL_CONTEXT on the label should
6948 be the function containing the glabel stmt with that LABEL_DECL,
6949 rather than whatever function a reference to the label was seen
6951 if (!FORCED_LABEL (t
) && !DECL_NONLOCAL (t
))
6952 DECL_CONTEXT (t
) = p
->to_context
;
6954 else if (p
->remap_decls_p
)
6956 /* Replace T with its duplicate. T should no longer appear in the
6957 parent function, so this looks wasteful; however, it may appear
6958 in referenced_vars, and more importantly, as virtual operands of
6959 statements, and in alias lists of other variables. It would be
6960 quite difficult to expunge it from all those places. ??? It might
6961 suffice to do this for addressable variables. */
6962 if ((VAR_P (t
) && !is_global_var (t
))
6963 || TREE_CODE (t
) == CONST_DECL
)
6964 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6968 else if (TYPE_P (t
))
6974 /* Helper for move_stmt_r. Given an EH region number for the source
6975 function, map that to the duplicate EH regio number in the dest. */
6978 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6980 eh_region old_r
, new_r
;
6982 old_r
= get_eh_region_from_number (old_nr
);
6983 new_r
= static_cast<eh_region
> (*p
->eh_map
->get (old_r
));
6985 return new_r
->index
;
6988 /* Similar, but operate on INTEGER_CSTs. */
6991 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6995 old_nr
= tree_to_shwi (old_t_nr
);
6996 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6998 return build_int_cst (integer_type_node
, new_nr
);
7001 /* Like move_stmt_op, but for gimple statements.
7003 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
7004 contained in the current statement in *GSI_P and change the
7005 DECL_CONTEXT of every local variable referenced in the current
7009 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
7010 struct walk_stmt_info
*wi
)
7012 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
7013 gimple
*stmt
= gsi_stmt (*gsi_p
);
7014 tree block
= gimple_block (stmt
);
7016 if (block
== p
->orig_block
7017 || (p
->orig_block
== NULL_TREE
7018 && block
!= NULL_TREE
))
7019 gimple_set_block (stmt
, p
->new_block
);
7021 switch (gimple_code (stmt
))
7024 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7026 tree r
, fndecl
= gimple_call_fndecl (stmt
);
7027 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
7028 switch (DECL_FUNCTION_CODE (fndecl
))
7030 case BUILT_IN_EH_COPY_VALUES
:
7031 r
= gimple_call_arg (stmt
, 1);
7032 r
= move_stmt_eh_region_tree_nr (r
, p
);
7033 gimple_call_set_arg (stmt
, 1, r
);
7036 case BUILT_IN_EH_POINTER
:
7037 case BUILT_IN_EH_FILTER
:
7038 r
= gimple_call_arg (stmt
, 0);
7039 r
= move_stmt_eh_region_tree_nr (r
, p
);
7040 gimple_call_set_arg (stmt
, 0, r
);
7051 gresx
*resx_stmt
= as_a
<gresx
*> (stmt
);
7052 int r
= gimple_resx_region (resx_stmt
);
7053 r
= move_stmt_eh_region_nr (r
, p
);
7054 gimple_resx_set_region (resx_stmt
, r
);
7058 case GIMPLE_EH_DISPATCH
:
7060 geh_dispatch
*eh_dispatch_stmt
= as_a
<geh_dispatch
*> (stmt
);
7061 int r
= gimple_eh_dispatch_region (eh_dispatch_stmt
);
7062 r
= move_stmt_eh_region_nr (r
, p
);
7063 gimple_eh_dispatch_set_region (eh_dispatch_stmt
, r
);
7067 case GIMPLE_OMP_RETURN
:
7068 case GIMPLE_OMP_CONTINUE
:
7073 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7074 so that such labels can be referenced from other regions.
7075 Make sure to update it when seeing a GIMPLE_LABEL though,
7076 that is the owner of the label. */
7077 walk_gimple_op (stmt
, move_stmt_op
, wi
);
7078 *handled_ops_p
= true;
7079 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
7080 if (FORCED_LABEL (label
) || DECL_NONLOCAL (label
))
7081 DECL_CONTEXT (label
) = p
->to_context
;
7086 if (is_gimple_omp (stmt
))
7088 /* Do not remap variables inside OMP directives. Variables
7089 referenced in clauses and directive header belong to the
7090 parent function and should not be moved into the child
7092 bool save_remap_decls_p
= p
->remap_decls_p
;
7093 p
->remap_decls_p
= false;
7094 *handled_ops_p
= true;
7096 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
7099 p
->remap_decls_p
= save_remap_decls_p
;
7107 /* Move basic block BB from function CFUN to function DEST_FN. The
7108 block is moved out of the original linked list and placed after
7109 block AFTER in the new list. Also, the block is removed from the
7110 original array of blocks and placed in DEST_FN's array of blocks.
7111 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7112 updated to reflect the moved edges.
7114 The local variables are remapped to new instances, VARS_MAP is used
7115 to record the mapping. */
7118 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
7119 basic_block after
, bool update_edge_count_p
,
7120 struct move_stmt_d
*d
)
7122 struct control_flow_graph
*cfg
;
7125 gimple_stmt_iterator si
;
7126 unsigned old_len
, new_len
;
7128 /* Remove BB from dominance structures. */
7129 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
7131 /* Move BB from its current loop to the copy in the new function. */
7134 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
7136 bb
->loop_father
= new_loop
;
7139 /* Link BB to the new linked list. */
7140 move_block_after (bb
, after
);
7142 /* Update the edge count in the corresponding flowgraphs. */
7143 if (update_edge_count_p
)
7144 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7146 cfun
->cfg
->x_n_edges
--;
7147 dest_cfun
->cfg
->x_n_edges
++;
7150 /* Remove BB from the original basic block array. */
7151 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
7152 cfun
->cfg
->x_n_basic_blocks
--;
7154 /* Grow DEST_CFUN's basic block array if needed. */
7155 cfg
= dest_cfun
->cfg
;
7156 cfg
->x_n_basic_blocks
++;
7157 if (bb
->index
>= cfg
->x_last_basic_block
)
7158 cfg
->x_last_basic_block
= bb
->index
+ 1;
7160 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
7161 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
7163 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
7164 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
7167 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
7169 /* Remap the variables in phi nodes. */
7170 for (gphi_iterator psi
= gsi_start_phis (bb
);
7173 gphi
*phi
= psi
.phi ();
7175 tree op
= PHI_RESULT (phi
);
7179 if (virtual_operand_p (op
))
7181 /* Remove the phi nodes for virtual operands (alias analysis will be
7182 run for the new function, anyway). */
7183 remove_phi_node (&psi
, true);
7187 SET_PHI_RESULT (phi
,
7188 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
7189 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
7191 op
= USE_FROM_PTR (use
);
7192 if (TREE_CODE (op
) == SSA_NAME
)
7193 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
7196 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
7198 location_t locus
= gimple_phi_arg_location (phi
, i
);
7199 tree block
= LOCATION_BLOCK (locus
);
7201 if (locus
== UNKNOWN_LOCATION
)
7203 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
7205 locus
= set_block (locus
, d
->new_block
);
7206 gimple_phi_arg_set_location (phi
, i
, locus
);
7213 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
7215 gimple
*stmt
= gsi_stmt (si
);
7216 struct walk_stmt_info wi
;
7218 memset (&wi
, 0, sizeof (wi
));
7220 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
7222 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
7224 tree label
= gimple_label_label (label_stmt
);
7225 int uid
= LABEL_DECL_UID (label
);
7227 gcc_assert (uid
> -1);
7229 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
7230 if (old_len
<= (unsigned) uid
)
7232 new_len
= 3 * uid
/ 2 + 1;
7233 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
7236 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
7237 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
7239 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
7241 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
7242 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
7245 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
7246 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
7248 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
7249 gimple_remove_stmt_histograms (cfun
, stmt
);
7251 /* We cannot leave any operands allocated from the operand caches of
7252 the current function. */
7253 free_stmt_operands (cfun
, stmt
);
7254 push_cfun (dest_cfun
);
7259 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7260 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
7262 tree block
= LOCATION_BLOCK (e
->goto_locus
);
7263 if (d
->orig_block
== NULL_TREE
7264 || block
== d
->orig_block
)
7265 e
->goto_locus
= set_block (e
->goto_locus
, d
->new_block
);
7269 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7270 the outermost EH region. Use REGION as the incoming base EH region. */
7273 find_outermost_region_in_block (struct function
*src_cfun
,
7274 basic_block bb
, eh_region region
)
7276 gimple_stmt_iterator si
;
7278 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
7280 gimple
*stmt
= gsi_stmt (si
);
7281 eh_region stmt_region
;
7284 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
7285 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
7289 region
= stmt_region
;
7290 else if (stmt_region
!= region
)
7292 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
7293 gcc_assert (region
!= NULL
);
7302 new_label_mapper (tree decl
, void *data
)
7304 htab_t hash
= (htab_t
) data
;
7308 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
7310 m
= XNEW (struct tree_map
);
7311 m
->hash
= DECL_UID (decl
);
7312 m
->base
.from
= decl
;
7313 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
7314 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
7315 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
7316 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
7318 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
7319 gcc_assert (*slot
== NULL
);
7326 /* Tree walker to replace the decls used inside value expressions by
7330 replace_block_vars_by_duplicates_1 (tree
*tp
, int *walk_subtrees
, void *data
)
7332 struct replace_decls_d
*rd
= (struct replace_decls_d
*)data
;
7334 switch (TREE_CODE (*tp
))
7339 replace_by_duplicate_decl (tp
, rd
->vars_map
, rd
->to_context
);
7345 if (IS_TYPE_OR_DECL_P (*tp
))
7346 *walk_subtrees
= false;
7351 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7355 replace_block_vars_by_duplicates (tree block
, hash_map
<tree
, tree
> *vars_map
,
7360 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
7363 if (!VAR_P (t
) && TREE_CODE (t
) != CONST_DECL
)
7365 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
7368 if (VAR_P (*tp
) && DECL_HAS_VALUE_EXPR_P (*tp
))
7370 tree x
= DECL_VALUE_EXPR (*tp
);
7371 struct replace_decls_d rd
= { vars_map
, to_context
};
7373 walk_tree (&x
, replace_block_vars_by_duplicates_1
, &rd
, NULL
);
7374 SET_DECL_VALUE_EXPR (t
, x
);
7375 DECL_HAS_VALUE_EXPR_P (t
) = 1;
7377 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
7382 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
7383 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
7386 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7390 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
7393 /* Discard it from the old loop array. */
7394 (*get_loops (fn1
))[loop
->num
] = NULL
;
7396 /* Place it in the new loop array, assigning it a new number. */
7397 loop
->num
= number_of_loops (fn2
);
7398 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
7400 /* Recurse to children. */
7401 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
7402 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
7405 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7406 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7409 verify_sese (basic_block entry
, basic_block exit
, vec
<basic_block
> *bbs_p
)
7414 bitmap bbs
= BITMAP_ALLOC (NULL
);
7417 gcc_assert (entry
!= NULL
);
7418 gcc_assert (entry
!= exit
);
7419 gcc_assert (bbs_p
!= NULL
);
7421 gcc_assert (bbs_p
->length () > 0);
7423 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7424 bitmap_set_bit (bbs
, bb
->index
);
7426 gcc_assert (bitmap_bit_p (bbs
, entry
->index
));
7427 gcc_assert (exit
== NULL
|| bitmap_bit_p (bbs
, exit
->index
));
7429 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7433 gcc_assert (single_pred_p (entry
));
7434 gcc_assert (!bitmap_bit_p (bbs
, single_pred (entry
)->index
));
7437 for (ei
= ei_start (bb
->preds
); !ei_end_p (ei
); ei_next (&ei
))
7440 gcc_assert (bitmap_bit_p (bbs
, e
->src
->index
));
7445 gcc_assert (single_succ_p (exit
));
7446 gcc_assert (!bitmap_bit_p (bbs
, single_succ (exit
)->index
));
7449 for (ei
= ei_start (bb
->succs
); !ei_end_p (ei
); ei_next (&ei
))
7452 gcc_assert (bitmap_bit_p (bbs
, e
->dest
->index
));
7459 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7462 gather_ssa_name_hash_map_from (tree
const &from
, tree
const &, void *data
)
7464 bitmap release_names
= (bitmap
)data
;
7466 if (TREE_CODE (from
) != SSA_NAME
)
7469 bitmap_set_bit (release_names
, SSA_NAME_VERSION (from
));
7473 /* Return LOOP_DIST_ALIAS call if present in BB. */
7476 find_loop_dist_alias (basic_block bb
)
7478 gimple
*g
= last_stmt (bb
);
7479 if (g
== NULL
|| gimple_code (g
) != GIMPLE_COND
)
7482 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
7484 if (gsi_end_p (gsi
))
7488 if (gimple_call_internal_p (g
, IFN_LOOP_DIST_ALIAS
))
7493 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7494 to VALUE and update any immediate uses of it's LHS. */
7497 fold_loop_internal_call (gimple
*g
, tree value
)
7499 tree lhs
= gimple_call_lhs (g
);
7500 use_operand_p use_p
;
7501 imm_use_iterator iter
;
7503 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
7505 update_call_from_tree (&gsi
, value
);
7506 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
7508 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
7509 SET_USE (use_p
, value
);
7510 update_stmt (use_stmt
);
7514 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7515 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7516 single basic block in the original CFG and the new basic block is
7517 returned. DEST_CFUN must not have a CFG yet.
7519 Note that the region need not be a pure SESE region. Blocks inside
7520 the region may contain calls to abort/exit. The only restriction
7521 is that ENTRY_BB should be the only entry point and it must
7524 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7525 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7526 to the new function.
7528 All local variables referenced in the region are assumed to be in
7529 the corresponding BLOCK_VARS and unexpanded variable lists
7530 associated with DEST_CFUN.
7532 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7533 reimplement move_sese_region_to_fn by duplicating the region rather than
7537 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
7538 basic_block exit_bb
, tree orig_block
)
7540 vec
<basic_block
> bbs
, dom_bbs
;
7541 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
7542 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
7543 struct function
*saved_cfun
= cfun
;
7544 int *entry_flag
, *exit_flag
;
7545 profile_probability
*entry_prob
, *exit_prob
;
7546 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
7549 htab_t new_label_map
;
7550 hash_map
<void *, void *> *eh_map
;
7551 struct loop
*loop
= entry_bb
->loop_father
;
7552 struct loop
*loop0
= get_loop (saved_cfun
, 0);
7553 struct move_stmt_d d
;
7555 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7557 gcc_assert (entry_bb
!= exit_bb
7559 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
7561 /* Collect all the blocks in the region. Manually add ENTRY_BB
7562 because it won't be added by dfs_enumerate_from. */
7564 bbs
.safe_push (entry_bb
);
7565 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
7568 verify_sese (entry_bb
, exit_bb
, &bbs
);
7570 /* The blocks that used to be dominated by something in BBS will now be
7571 dominated by the new block. */
7572 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
7576 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7577 the predecessor edges to ENTRY_BB and the successor edges to
7578 EXIT_BB so that we can re-attach them to the new basic block that
7579 will replace the region. */
7580 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
7581 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
7582 entry_flag
= XNEWVEC (int, num_entry_edges
);
7583 entry_prob
= XNEWVEC (profile_probability
, num_entry_edges
);
7585 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
7587 entry_prob
[i
] = e
->probability
;
7588 entry_flag
[i
] = e
->flags
;
7589 entry_pred
[i
++] = e
->src
;
7595 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
7596 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
7597 exit_flag
= XNEWVEC (int, num_exit_edges
);
7598 exit_prob
= XNEWVEC (profile_probability
, num_exit_edges
);
7600 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
7602 exit_prob
[i
] = e
->probability
;
7603 exit_flag
[i
] = e
->flags
;
7604 exit_succ
[i
++] = e
->dest
;
7616 /* Switch context to the child function to initialize DEST_FN's CFG. */
7617 gcc_assert (dest_cfun
->cfg
== NULL
);
7618 push_cfun (dest_cfun
);
7620 init_empty_tree_cfg ();
7622 /* Initialize EH information for the new function. */
7624 new_label_map
= NULL
;
7627 eh_region region
= NULL
;
7629 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7630 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
7632 init_eh_for_function ();
7635 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
7636 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
7637 new_label_mapper
, new_label_map
);
7641 /* Initialize an empty loop tree. */
7642 struct loops
*loops
= ggc_cleared_alloc
<struct loops
> ();
7643 init_loops_structure (dest_cfun
, loops
, 1);
7644 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
7645 set_loops_for_fn (dest_cfun
, loops
);
7647 vec
<loop_p
, va_gc
> *larray
= get_loops (saved_cfun
)->copy ();
7649 /* Move the outlined loop tree part. */
7650 num_nodes
= bbs
.length ();
7651 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7653 if (bb
->loop_father
->header
== bb
)
7655 struct loop
*this_loop
= bb
->loop_father
;
7656 struct loop
*outer
= loop_outer (this_loop
);
7658 /* If the SESE region contains some bbs ending with
7659 a noreturn call, those are considered to belong
7660 to the outermost loop in saved_cfun, rather than
7661 the entry_bb's loop_father. */
7665 num_nodes
-= this_loop
->num_nodes
;
7666 flow_loop_tree_node_remove (bb
->loop_father
);
7667 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
7668 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
7671 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
7674 /* Remove loop exits from the outlined region. */
7675 if (loops_for_fn (saved_cfun
)->exits
)
7676 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7678 struct loops
*l
= loops_for_fn (saved_cfun
);
7680 = l
->exits
->find_slot_with_hash (e
, htab_hash_pointer (e
),
7683 l
->exits
->clear_slot (slot
);
7687 /* Adjust the number of blocks in the tree root of the outlined part. */
7688 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
7690 /* Setup a mapping to be used by move_block_to_fn. */
7691 loop
->aux
= current_loops
->tree_root
;
7692 loop0
->aux
= current_loops
->tree_root
;
7694 /* Fix up orig_loop_num. If the block referenced in it has been moved
7695 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7697 signed char *moved_orig_loop_num
= NULL
;
7698 FOR_EACH_LOOP_FN (dest_cfun
, dloop
, 0)
7699 if (dloop
->orig_loop_num
)
7701 if (moved_orig_loop_num
== NULL
)
7703 = XCNEWVEC (signed char, vec_safe_length (larray
));
7704 if ((*larray
)[dloop
->orig_loop_num
] != NULL
7705 && get_loop (saved_cfun
, dloop
->orig_loop_num
) == NULL
)
7707 if (moved_orig_loop_num
[dloop
->orig_loop_num
] >= 0
7708 && moved_orig_loop_num
[dloop
->orig_loop_num
] < 2)
7709 moved_orig_loop_num
[dloop
->orig_loop_num
]++;
7710 dloop
->orig_loop_num
= (*larray
)[dloop
->orig_loop_num
]->num
;
7714 moved_orig_loop_num
[dloop
->orig_loop_num
] = -1;
7715 dloop
->orig_loop_num
= 0;
7720 if (moved_orig_loop_num
)
7722 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7724 gimple
*g
= find_loop_dist_alias (bb
);
7728 int orig_loop_num
= tree_to_shwi (gimple_call_arg (g
, 0));
7729 gcc_assert (orig_loop_num
7730 && (unsigned) orig_loop_num
< vec_safe_length (larray
));
7731 if (moved_orig_loop_num
[orig_loop_num
] == 2)
7733 /* If we have moved both loops with this orig_loop_num into
7734 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7735 too, update the first argument. */
7736 gcc_assert ((*larray
)[dloop
->orig_loop_num
] != NULL
7737 && (get_loop (saved_cfun
, dloop
->orig_loop_num
)
7739 tree t
= build_int_cst (integer_type_node
,
7740 (*larray
)[dloop
->orig_loop_num
]->num
);
7741 gimple_call_set_arg (g
, 0, t
);
7743 /* Make sure the following loop will not update it. */
7744 moved_orig_loop_num
[orig_loop_num
] = 0;
7747 /* Otherwise at least one of the loops stayed in saved_cfun.
7748 Remove the LOOP_DIST_ALIAS call. */
7749 fold_loop_internal_call (g
, gimple_call_arg (g
, 1));
7751 FOR_EACH_BB_FN (bb
, saved_cfun
)
7753 gimple
*g
= find_loop_dist_alias (bb
);
7756 int orig_loop_num
= tree_to_shwi (gimple_call_arg (g
, 0));
7757 gcc_assert (orig_loop_num
7758 && (unsigned) orig_loop_num
< vec_safe_length (larray
));
7759 if (moved_orig_loop_num
[orig_loop_num
])
7760 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7761 of the corresponding loops was moved, remove it. */
7762 fold_loop_internal_call (g
, gimple_call_arg (g
, 1));
7764 XDELETEVEC (moved_orig_loop_num
);
7768 /* Move blocks from BBS into DEST_CFUN. */
7769 gcc_assert (bbs
.length () >= 2);
7770 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
7771 hash_map
<tree
, tree
> vars_map
;
7773 memset (&d
, 0, sizeof (d
));
7774 d
.orig_block
= orig_block
;
7775 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
7776 d
.from_context
= cfun
->decl
;
7777 d
.to_context
= dest_cfun
->decl
;
7778 d
.vars_map
= &vars_map
;
7779 d
.new_label_map
= new_label_map
;
7781 d
.remap_decls_p
= true;
7783 if (gimple_in_ssa_p (cfun
))
7784 for (tree arg
= DECL_ARGUMENTS (d
.to_context
); arg
; arg
= DECL_CHAIN (arg
))
7786 tree narg
= make_ssa_name_fn (dest_cfun
, arg
, gimple_build_nop ());
7787 set_ssa_default_def (dest_cfun
, arg
, narg
);
7788 vars_map
.put (arg
, narg
);
7791 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7793 /* No need to update edge counts on the last block. It has
7794 already been updated earlier when we detached the region from
7795 the original CFG. */
7796 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
7802 /* Loop sizes are no longer correct, fix them up. */
7803 loop
->num_nodes
-= num_nodes
;
7804 for (struct loop
*outer
= loop_outer (loop
);
7805 outer
; outer
= loop_outer (outer
))
7806 outer
->num_nodes
-= num_nodes
;
7807 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
7809 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vectorize_loops
)
7812 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
7817 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
7819 dest_cfun
->has_simduid_loops
= true;
7821 if (aloop
->force_vectorize
)
7822 dest_cfun
->has_force_vectorize_loops
= true;
7826 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7830 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7832 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7833 = BLOCK_SUBBLOCKS (orig_block
);
7834 for (block
= BLOCK_SUBBLOCKS (orig_block
);
7835 block
; block
= BLOCK_CHAIN (block
))
7836 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
7837 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
7840 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
7841 &vars_map
, dest_cfun
->decl
);
7844 htab_delete (new_label_map
);
7848 if (gimple_in_ssa_p (cfun
))
7850 /* We need to release ssa-names in a defined order, so first find them,
7851 and then iterate in ascending version order. */
7852 bitmap release_names
= BITMAP_ALLOC (NULL
);
7853 vars_map
.traverse
<void *, gather_ssa_name_hash_map_from
> (release_names
);
7856 EXECUTE_IF_SET_IN_BITMAP (release_names
, 0, i
, bi
)
7857 release_ssa_name (ssa_name (i
));
7858 BITMAP_FREE (release_names
);
7861 /* Rewire the entry and exit blocks. The successor to the entry
7862 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7863 the child function. Similarly, the predecessor of DEST_FN's
7864 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7865 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7866 various CFG manipulation function get to the right CFG.
7868 FIXME, this is silly. The CFG ought to become a parameter to
7870 push_cfun (dest_cfun
);
7871 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= entry_bb
->count
;
7872 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
7875 make_single_succ_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
7876 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
= exit_bb
->count
;
7879 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
= profile_count::zero ();
7882 /* Back in the original function, the SESE region has disappeared,
7883 create a new basic block in its place. */
7884 bb
= create_empty_bb (entry_pred
[0]);
7886 add_bb_to_loop (bb
, loop
);
7887 for (i
= 0; i
< num_entry_edges
; i
++)
7889 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
7890 e
->probability
= entry_prob
[i
];
7893 for (i
= 0; i
< num_exit_edges
; i
++)
7895 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
7896 e
->probability
= exit_prob
[i
];
7899 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
7900 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
7901 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
7918 /* Dump default def DEF to file FILE using FLAGS and indentation
7922 dump_default_def (FILE *file
, tree def
, int spc
, dump_flags_t flags
)
7924 for (int i
= 0; i
< spc
; ++i
)
7925 fprintf (file
, " ");
7926 dump_ssaname_info_to_file (file
, def
, spc
);
7928 print_generic_expr (file
, TREE_TYPE (def
), flags
);
7929 fprintf (file
, " ");
7930 print_generic_expr (file
, def
, flags
);
7931 fprintf (file
, " = ");
7932 print_generic_expr (file
, SSA_NAME_VAR (def
), flags
);
7933 fprintf (file
, ";\n");
7936 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7939 print_no_sanitize_attr_value (FILE *file
, tree value
)
7941 unsigned int flags
= tree_to_uhwi (value
);
7943 for (int i
= 0; sanitizer_opts
[i
].name
!= NULL
; ++i
)
7945 if ((sanitizer_opts
[i
].flag
& flags
) == sanitizer_opts
[i
].flag
)
7948 fprintf (file
, " | ");
7949 fprintf (file
, "%s", sanitizer_opts
[i
].name
);
7955 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7959 dump_function_to_file (tree fndecl
, FILE *file
, dump_flags_t flags
)
7961 tree arg
, var
, old_current_fndecl
= current_function_decl
;
7962 struct function
*dsf
;
7963 bool ignore_topmost_bind
= false, any_var
= false;
7966 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
7967 && decl_is_tm_clone (fndecl
));
7968 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
7970 if (DECL_ATTRIBUTES (fndecl
) != NULL_TREE
)
7972 fprintf (file
, "__attribute__((");
7976 for (chain
= DECL_ATTRIBUTES (fndecl
); chain
;
7977 first
= false, chain
= TREE_CHAIN (chain
))
7980 fprintf (file
, ", ");
7982 tree name
= get_attribute_name (chain
);
7983 print_generic_expr (file
, name
, dump_flags
);
7984 if (TREE_VALUE (chain
) != NULL_TREE
)
7986 fprintf (file
, " (");
7988 if (strstr (IDENTIFIER_POINTER (name
), "no_sanitize"))
7989 print_no_sanitize_attr_value (file
, TREE_VALUE (chain
));
7991 print_generic_expr (file
, TREE_VALUE (chain
), dump_flags
);
7992 fprintf (file
, ")");
7996 fprintf (file
, "))\n");
7999 current_function_decl
= fndecl
;
8000 if (flags
& TDF_GIMPLE
)
8002 print_generic_expr (file
, TREE_TYPE (TREE_TYPE (fndecl
)),
8003 dump_flags
| TDF_SLIM
);
8004 fprintf (file
, " __GIMPLE ()\n%s (", function_name (fun
));
8007 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
8009 arg
= DECL_ARGUMENTS (fndecl
);
8012 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
8013 fprintf (file
, " ");
8014 print_generic_expr (file
, arg
, dump_flags
);
8015 if (DECL_CHAIN (arg
))
8016 fprintf (file
, ", ");
8017 arg
= DECL_CHAIN (arg
);
8019 fprintf (file
, ")\n");
8021 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
8022 if (dsf
&& (flags
& TDF_EH
))
8023 dump_eh_tree (file
, dsf
);
8025 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
8027 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
8028 current_function_decl
= old_current_fndecl
;
8032 /* When GIMPLE is lowered, the variables are no longer available in
8033 BIND_EXPRs, so display them separately. */
8034 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
8037 ignore_topmost_bind
= true;
8039 fprintf (file
, "{\n");
8040 if (gimple_in_ssa_p (fun
)
8041 && (flags
& TDF_ALIAS
))
8043 for (arg
= DECL_ARGUMENTS (fndecl
); arg
!= NULL
;
8044 arg
= DECL_CHAIN (arg
))
8046 tree def
= ssa_default_def (fun
, arg
);
8048 dump_default_def (file
, def
, 2, flags
);
8051 tree res
= DECL_RESULT (fun
->decl
);
8052 if (res
!= NULL_TREE
8053 && DECL_BY_REFERENCE (res
))
8055 tree def
= ssa_default_def (fun
, res
);
8057 dump_default_def (file
, def
, 2, flags
);
8060 tree static_chain
= fun
->static_chain_decl
;
8061 if (static_chain
!= NULL_TREE
)
8063 tree def
= ssa_default_def (fun
, static_chain
);
8065 dump_default_def (file
, def
, 2, flags
);
8069 if (!vec_safe_is_empty (fun
->local_decls
))
8070 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
8072 print_generic_decl (file
, var
, flags
);
8073 fprintf (file
, "\n");
8080 if (gimple_in_ssa_p (cfun
))
8081 FOR_EACH_SSA_NAME (ix
, name
, cfun
)
8083 if (!SSA_NAME_VAR (name
))
8085 fprintf (file
, " ");
8086 print_generic_expr (file
, TREE_TYPE (name
), flags
);
8087 fprintf (file
, " ");
8088 print_generic_expr (file
, name
, flags
);
8089 fprintf (file
, ";\n");
8096 if (fun
&& fun
->decl
== fndecl
8098 && basic_block_info_for_fn (fun
))
8100 /* If the CFG has been built, emit a CFG-based dump. */
8101 if (!ignore_topmost_bind
)
8102 fprintf (file
, "{\n");
8104 if (any_var
&& n_basic_blocks_for_fn (fun
))
8105 fprintf (file
, "\n");
8107 FOR_EACH_BB_FN (bb
, fun
)
8108 dump_bb (file
, bb
, 2, flags
);
8110 fprintf (file
, "}\n");
8112 else if (fun
->curr_properties
& PROP_gimple_any
)
8114 /* The function is now in GIMPLE form but the CFG has not been
8115 built yet. Emit the single sequence of GIMPLE statements
8116 that make up its body. */
8117 gimple_seq body
= gimple_body (fndecl
);
8119 if (gimple_seq_first_stmt (body
)
8120 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
8121 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
8122 print_gimple_seq (file
, body
, 0, flags
);
8125 if (!ignore_topmost_bind
)
8126 fprintf (file
, "{\n");
8129 fprintf (file
, "\n");
8131 print_gimple_seq (file
, body
, 2, flags
);
8132 fprintf (file
, "}\n");
8139 /* Make a tree based dump. */
8140 chain
= DECL_SAVED_TREE (fndecl
);
8141 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
8143 if (ignore_topmost_bind
)
8145 chain
= BIND_EXPR_BODY (chain
);
8153 if (!ignore_topmost_bind
)
8155 fprintf (file
, "{\n");
8156 /* No topmost bind, pretend it's ignored for later. */
8157 ignore_topmost_bind
= true;
8163 fprintf (file
, "\n");
8165 print_generic_stmt_indented (file
, chain
, flags
, indent
);
8166 if (ignore_topmost_bind
)
8167 fprintf (file
, "}\n");
8170 if (flags
& TDF_ENUMERATE_LOCALS
)
8171 dump_enumerated_decls (file
, flags
);
8172 fprintf (file
, "\n\n");
8174 current_function_decl
= old_current_fndecl
;
8177 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8180 debug_function (tree fn
, dump_flags_t flags
)
8182 dump_function_to_file (fn
, stderr
, flags
);
8186 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8189 print_pred_bbs (FILE *file
, basic_block bb
)
8194 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
8195 fprintf (file
, "bb_%d ", e
->src
->index
);
8199 /* Print on FILE the indexes for the successors of basic_block BB. */
8202 print_succ_bbs (FILE *file
, basic_block bb
)
8207 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8208 fprintf (file
, "bb_%d ", e
->dest
->index
);
8211 /* Print to FILE the basic block BB following the VERBOSITY level. */
8214 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
8216 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
8217 memset ((void *) s_indent
, ' ', (size_t) indent
);
8218 s_indent
[indent
] = '\0';
8220 /* Print basic_block's header. */
8223 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
8224 print_pred_bbs (file
, bb
);
8225 fprintf (file
, "}, succs = {");
8226 print_succ_bbs (file
, bb
);
8227 fprintf (file
, "})\n");
8230 /* Print basic_block's body. */
8233 fprintf (file
, "%s {\n", s_indent
);
8234 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
8235 fprintf (file
, "%s }\n", s_indent
);
8239 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
8241 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8242 VERBOSITY level this outputs the contents of the loop, or just its
8246 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
8254 s_indent
= (char *) alloca ((size_t) indent
+ 1);
8255 memset ((void *) s_indent
, ' ', (size_t) indent
);
8256 s_indent
[indent
] = '\0';
8258 /* Print loop's header. */
8259 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
8261 fprintf (file
, "header = %d", loop
->header
->index
);
8264 fprintf (file
, "deleted)\n");
8268 fprintf (file
, ", latch = %d", loop
->latch
->index
);
8270 fprintf (file
, ", multiple latches");
8271 fprintf (file
, ", niter = ");
8272 print_generic_expr (file
, loop
->nb_iterations
);
8274 if (loop
->any_upper_bound
)
8276 fprintf (file
, ", upper_bound = ");
8277 print_decu (loop
->nb_iterations_upper_bound
, file
);
8279 if (loop
->any_likely_upper_bound
)
8281 fprintf (file
, ", likely_upper_bound = ");
8282 print_decu (loop
->nb_iterations_likely_upper_bound
, file
);
8285 if (loop
->any_estimate
)
8287 fprintf (file
, ", estimate = ");
8288 print_decu (loop
->nb_iterations_estimate
, file
);
8291 fprintf (file
, ", unroll = %d", loop
->unroll
);
8292 fprintf (file
, ")\n");
8294 /* Print loop's body. */
8297 fprintf (file
, "%s{\n", s_indent
);
8298 FOR_EACH_BB_FN (bb
, cfun
)
8299 if (bb
->loop_father
== loop
)
8300 print_loops_bb (file
, bb
, indent
, verbosity
);
8302 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
8303 fprintf (file
, "%s}\n", s_indent
);
8307 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8308 spaces. Following VERBOSITY level this outputs the contents of the
8309 loop, or just its structure. */
8312 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
8318 print_loop (file
, loop
, indent
, verbosity
);
8319 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
8322 /* Follow a CFG edge from the entry point of the program, and on entry
8323 of a loop, pretty print the loop structure on FILE. */
8326 print_loops (FILE *file
, int verbosity
)
8330 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
8331 fprintf (file
, "\nLoops in function: %s\n", current_function_name ());
8332 if (bb
&& bb
->loop_father
)
8333 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
8339 debug (struct loop
&ref
)
8341 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
8345 debug (struct loop
*ptr
)
8350 fprintf (stderr
, "<nil>\n");
8353 /* Dump a loop verbosely. */
8356 debug_verbose (struct loop
&ref
)
8358 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
8362 debug_verbose (struct loop
*ptr
)
8367 fprintf (stderr
, "<nil>\n");
8371 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8374 debug_loops (int verbosity
)
8376 print_loops (stderr
, verbosity
);
8379 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8382 debug_loop (struct loop
*loop
, int verbosity
)
8384 print_loop (stderr
, loop
, 0, verbosity
);
8387 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8391 debug_loop_num (unsigned num
, int verbosity
)
8393 debug_loop (get_loop (cfun
, num
), verbosity
);
8396 /* Return true if BB ends with a call, possibly followed by some
8397 instructions that must stay with the call. Return false,
8401 gimple_block_ends_with_call_p (basic_block bb
)
8403 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
8404 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
8408 /* Return true if BB ends with a conditional branch. Return false,
8412 gimple_block_ends_with_condjump_p (const_basic_block bb
)
8414 gimple
*stmt
= last_stmt (CONST_CAST_BB (bb
));
8415 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
8419 /* Return true if statement T may terminate execution of BB in ways not
8420 explicitly represtented in the CFG. */
8423 stmt_can_terminate_bb_p (gimple
*t
)
8425 tree fndecl
= NULL_TREE
;
8428 /* Eh exception not handled internally terminates execution of the whole
8430 if (stmt_can_throw_external (t
))
8433 /* NORETURN and LONGJMP calls already have an edge to exit.
8434 CONST and PURE calls do not need one.
8435 We don't currently check for CONST and PURE here, although
8436 it would be a good idea, because those attributes are
8437 figured out from the RTL in mark_constant_function, and
8438 the counter incrementation code from -fprofile-arcs
8439 leads to different results from -fbranch-probabilities. */
8440 if (is_gimple_call (t
))
8442 fndecl
= gimple_call_fndecl (t
);
8443 call_flags
= gimple_call_flags (t
);
8446 if (is_gimple_call (t
)
8448 && DECL_BUILT_IN (fndecl
)
8449 && (call_flags
& ECF_NOTHROW
)
8450 && !(call_flags
& ECF_RETURNS_TWICE
)
8451 /* fork() doesn't really return twice, but the effect of
8452 wrapping it in __gcov_fork() which calls __gcov_flush()
8453 and clears the counters before forking has the same
8454 effect as returning twice. Force a fake edge. */
8455 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
8456 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
8459 if (is_gimple_call (t
))
8465 if (call_flags
& (ECF_PURE
| ECF_CONST
)
8466 && !(call_flags
& ECF_LOOPING_CONST_OR_PURE
))
8469 /* Function call may do longjmp, terminate program or do other things.
8470 Special case noreturn that have non-abnormal edges out as in this case
8471 the fact is sufficiently represented by lack of edges out of T. */
8472 if (!(call_flags
& ECF_NORETURN
))
8476 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8477 if ((e
->flags
& EDGE_FAKE
) == 0)
8481 if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (t
))
8482 if (gimple_asm_volatile_p (asm_stmt
) || gimple_asm_input_p (asm_stmt
))
8489 /* Add fake edges to the function exit for any non constant and non
8490 noreturn calls (or noreturn calls with EH/abnormal edges),
8491 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8492 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8495 The goal is to expose cases in which entering a basic block does
8496 not imply that all subsequent instructions must be executed. */
8499 gimple_flow_call_edges_add (sbitmap blocks
)
8502 int blocks_split
= 0;
8503 int last_bb
= last_basic_block_for_fn (cfun
);
8504 bool check_last_block
= false;
8506 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
8510 check_last_block
= true;
8512 check_last_block
= bitmap_bit_p (blocks
,
8513 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
8515 /* In the last basic block, before epilogue generation, there will be
8516 a fallthru edge to EXIT. Special care is required if the last insn
8517 of the last basic block is a call because make_edge folds duplicate
8518 edges, which would result in the fallthru edge also being marked
8519 fake, which would result in the fallthru edge being removed by
8520 remove_fake_edges, which would result in an invalid CFG.
8522 Moreover, we can't elide the outgoing fake edge, since the block
8523 profiler needs to take this into account in order to solve the minimal
8524 spanning tree in the case that the call doesn't return.
8526 Handle this by adding a dummy instruction in a new last basic block. */
8527 if (check_last_block
)
8529 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
8530 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
8533 if (!gsi_end_p (gsi
))
8536 if (t
&& stmt_can_terminate_bb_p (t
))
8540 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8543 gsi_insert_on_edge (e
, gimple_build_nop ());
8544 gsi_commit_edge_inserts ();
8549 /* Now add fake edges to the function exit for any non constant
8550 calls since there is no way that we can determine if they will
8552 for (i
= 0; i
< last_bb
; i
++)
8554 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8555 gimple_stmt_iterator gsi
;
8556 gimple
*stmt
, *last_stmt
;
8561 if (blocks
&& !bitmap_bit_p (blocks
, i
))
8564 gsi
= gsi_last_nondebug_bb (bb
);
8565 if (!gsi_end_p (gsi
))
8567 last_stmt
= gsi_stmt (gsi
);
8570 stmt
= gsi_stmt (gsi
);
8571 if (stmt_can_terminate_bb_p (stmt
))
8575 /* The handling above of the final block before the
8576 epilogue should be enough to verify that there is
8577 no edge to the exit block in CFG already.
8578 Calling make_edge in such case would cause us to
8579 mark that edge as fake and remove it later. */
8580 if (flag_checking
&& stmt
== last_stmt
)
8582 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8583 gcc_assert (e
== NULL
);
8586 /* Note that the following may create a new basic block
8587 and renumber the existing basic blocks. */
8588 if (stmt
!= last_stmt
)
8590 e
= split_block (bb
, stmt
);
8594 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
8595 e
->probability
= profile_probability::guessed_never ();
8599 while (!gsi_end_p (gsi
));
8604 checking_verify_flow_info ();
8606 return blocks_split
;
8609 /* Removes edge E and all the blocks dominated by it, and updates dominance
8610 information. The IL in E->src needs to be updated separately.
8611 If dominance info is not available, only the edge E is removed.*/
8614 remove_edge_and_dominated_blocks (edge e
)
8616 vec
<basic_block
> bbs_to_remove
= vNULL
;
8617 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
8620 bool none_removed
= false;
8622 basic_block bb
, dbb
;
8625 /* If we are removing a path inside a non-root loop that may change
8626 loop ownership of blocks or remove loops. Mark loops for fixup. */
8628 && loop_outer (e
->src
->loop_father
) != NULL
8629 && e
->src
->loop_father
== e
->dest
->loop_father
)
8630 loops_state_set (LOOPS_NEED_FIXUP
);
8632 if (!dom_info_available_p (CDI_DOMINATORS
))
8638 /* No updating is needed for edges to exit. */
8639 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8641 if (cfgcleanup_altered_bbs
)
8642 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
8647 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8648 that is not dominated by E->dest, then this set is empty. Otherwise,
8649 all the basic blocks dominated by E->dest are removed.
8651 Also, to DF_IDOM we store the immediate dominators of the blocks in
8652 the dominance frontier of E (i.e., of the successors of the
8653 removed blocks, if there are any, and of E->dest otherwise). */
8654 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
8659 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
8661 none_removed
= true;
8666 auto_bitmap df
, df_idom
;
8668 bitmap_set_bit (df_idom
,
8669 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
8672 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
8673 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
8675 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
8677 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
8678 bitmap_set_bit (df
, f
->dest
->index
);
8681 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
8682 bitmap_clear_bit (df
, bb
->index
);
8684 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
8686 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8687 bitmap_set_bit (df_idom
,
8688 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
8692 if (cfgcleanup_altered_bbs
)
8694 /* Record the set of the altered basic blocks. */
8695 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
8696 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
8699 /* Remove E and the cancelled blocks. */
8704 /* Walk backwards so as to get a chance to substitute all
8705 released DEFs into debug stmts. See
8706 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8708 for (i
= bbs_to_remove
.length (); i
-- > 0; )
8709 delete_basic_block (bbs_to_remove
[i
]);
8712 /* Update the dominance information. The immediate dominator may change only
8713 for blocks whose immediate dominator belongs to DF_IDOM:
8715 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8716 removal. Let Z the arbitrary block such that idom(Z) = Y and
8717 Z dominates X after the removal. Before removal, there exists a path P
8718 from Y to X that avoids Z. Let F be the last edge on P that is
8719 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8720 dominates W, and because of P, Z does not dominate W), and W belongs to
8721 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8722 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
8724 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8725 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
8727 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
8728 bbs_to_fix_dom
.safe_push (dbb
);
8731 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
8733 bbs_to_remove
.release ();
8734 bbs_to_fix_dom
.release ();
8737 /* Purge dead EH edges from basic block BB. */
8740 gimple_purge_dead_eh_edges (basic_block bb
)
8742 bool changed
= false;
8745 gimple
*stmt
= last_stmt (bb
);
8747 if (stmt
&& stmt_can_throw_internal (stmt
))
8750 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8752 if (e
->flags
& EDGE_EH
)
8754 remove_edge_and_dominated_blocks (e
);
8764 /* Purge dead EH edges from basic block listed in BLOCKS. */
8767 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
8769 bool changed
= false;
8773 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8775 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8777 /* Earlier gimple_purge_dead_eh_edges could have removed
8778 this basic block already. */
8779 gcc_assert (bb
|| changed
);
8781 changed
|= gimple_purge_dead_eh_edges (bb
);
8787 /* Purge dead abnormal call edges from basic block BB. */
8790 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
8792 bool changed
= false;
8795 gimple
*stmt
= last_stmt (bb
);
8797 if (!cfun
->has_nonlocal_label
8798 && !cfun
->calls_setjmp
)
8801 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
8804 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8806 if (e
->flags
& EDGE_ABNORMAL
)
8808 if (e
->flags
& EDGE_FALLTHRU
)
8809 e
->flags
&= ~EDGE_ABNORMAL
;
8811 remove_edge_and_dominated_blocks (e
);
8821 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8824 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
8826 bool changed
= false;
8830 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8832 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8834 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8835 this basic block already. */
8836 gcc_assert (bb
|| changed
);
8838 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
8844 /* This function is called whenever a new edge is created or
8848 gimple_execute_on_growing_pred (edge e
)
8850 basic_block bb
= e
->dest
;
8852 if (!gimple_seq_empty_p (phi_nodes (bb
)))
8853 reserve_phi_args_for_new_edge (bb
);
8856 /* This function is called immediately before edge E is removed from
8857 the edge vector E->dest->preds. */
8860 gimple_execute_on_shrinking_pred (edge e
)
8862 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
8863 remove_phi_args (e
);
8866 /*---------------------------------------------------------------------------
8867 Helper functions for Loop versioning
8868 ---------------------------------------------------------------------------*/
8870 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8871 of 'first'. Both of them are dominated by 'new_head' basic block. When
8872 'new_head' was created by 'second's incoming edge it received phi arguments
8873 on the edge by split_edge(). Later, additional edge 'e' was created to
8874 connect 'new_head' and 'first'. Now this routine adds phi args on this
8875 additional edge 'e' that new_head to second edge received as part of edge
8879 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
8880 basic_block new_head
, edge e
)
8883 gphi_iterator psi1
, psi2
;
8885 edge e2
= find_edge (new_head
, second
);
8887 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8888 edge, we should always have an edge from NEW_HEAD to SECOND. */
8889 gcc_assert (e2
!= NULL
);
8891 /* Browse all 'second' basic block phi nodes and add phi args to
8892 edge 'e' for 'first' head. PHI args are always in correct order. */
8894 for (psi2
= gsi_start_phis (second
),
8895 psi1
= gsi_start_phis (first
);
8896 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
8897 gsi_next (&psi2
), gsi_next (&psi1
))
8901 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
8902 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
8907 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8908 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8909 the destination of the ELSE part. */
8912 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
8913 basic_block second_head ATTRIBUTE_UNUSED
,
8914 basic_block cond_bb
, void *cond_e
)
8916 gimple_stmt_iterator gsi
;
8917 gimple
*new_cond_expr
;
8918 tree cond_expr
= (tree
) cond_e
;
8921 /* Build new conditional expr */
8922 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
8923 NULL_TREE
, NULL_TREE
);
8925 /* Add new cond in cond_bb. */
8926 gsi
= gsi_last_bb (cond_bb
);
8927 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
8929 /* Adjust edges appropriately to connect new head with first head
8930 as well as second head. */
8931 e0
= single_succ_edge (cond_bb
);
8932 e0
->flags
&= ~EDGE_FALLTHRU
;
8933 e0
->flags
|= EDGE_FALSE_VALUE
;
8937 /* Do book-keeping of basic block BB for the profile consistency checker.
8938 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8939 then do post-pass accounting. Store the counting in RECORD. */
8941 gimple_account_profile_record (basic_block bb
, int after_pass
,
8942 struct profile_record
*record
)
8944 gimple_stmt_iterator i
;
8945 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
8947 record
->size
[after_pass
]
8948 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
8949 if (bb
->count
.initialized_p ())
8950 record
->time
[after_pass
]
8951 += estimate_num_insns (gsi_stmt (i
),
8952 &eni_time_weights
) * bb
->count
.to_gcov_type ();
8953 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
8954 record
->time
[after_pass
]
8955 += estimate_num_insns (gsi_stmt (i
),
8956 &eni_time_weights
) * bb
->count
.to_frequency (cfun
);
8960 struct cfg_hooks gimple_cfg_hooks
= {
8962 gimple_verify_flow_info
,
8963 gimple_dump_bb
, /* dump_bb */
8964 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
8965 create_bb
, /* create_basic_block */
8966 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
8967 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
8968 gimple_can_remove_branch_p
, /* can_remove_branch_p */
8969 remove_bb
, /* delete_basic_block */
8970 gimple_split_block
, /* split_block */
8971 gimple_move_block_after
, /* move_block_after */
8972 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
8973 gimple_merge_blocks
, /* merge_blocks */
8974 gimple_predict_edge
, /* predict_edge */
8975 gimple_predicted_by_p
, /* predicted_by_p */
8976 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
8977 gimple_duplicate_bb
, /* duplicate_block */
8978 gimple_split_edge
, /* split_edge */
8979 gimple_make_forwarder_block
, /* make_forward_block */
8980 NULL
, /* tidy_fallthru_edge */
8981 NULL
, /* force_nonfallthru */
8982 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
8983 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
8984 gimple_flow_call_edges_add
, /* flow_call_edges_add */
8985 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
8986 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
8987 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
8988 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
8989 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
8990 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
8991 flush_pending_stmts
, /* flush_pending_stmts */
8992 gimple_empty_block_p
, /* block_empty_p */
8993 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
8994 gimple_account_profile_record
,
8998 /* Split all critical edges. */
9001 split_critical_edges (void)
9007 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9008 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9009 mappings around the calls to split_edge. */
9010 start_recording_case_labels ();
9011 FOR_ALL_BB_FN (bb
, cfun
)
9013 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
9015 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
9017 /* PRE inserts statements to edges and expects that
9018 since split_critical_edges was done beforehand, committing edge
9019 insertions will not split more edges. In addition to critical
9020 edges we must split edges that have multiple successors and
9021 end by control flow statements, such as RESX.
9022 Go ahead and split them too. This matches the logic in
9023 gimple_find_edge_insert_loc. */
9024 else if ((!single_pred_p (e
->dest
)
9025 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
9026 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
9027 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
9028 && !(e
->flags
& EDGE_ABNORMAL
))
9030 gimple_stmt_iterator gsi
;
9032 gsi
= gsi_last_bb (e
->src
);
9033 if (!gsi_end_p (gsi
)
9034 && stmt_ends_bb_p (gsi_stmt (gsi
))
9035 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
9036 && !gimple_call_builtin_p (gsi_stmt (gsi
),
9042 end_recording_case_labels ();
9048 const pass_data pass_data_split_crit_edges
=
9050 GIMPLE_PASS
, /* type */
9051 "crited", /* name */
9052 OPTGROUP_NONE
, /* optinfo_flags */
9053 TV_TREE_SPLIT_EDGES
, /* tv_id */
9054 PROP_cfg
, /* properties_required */
9055 PROP_no_crit_edges
, /* properties_provided */
9056 0, /* properties_destroyed */
9057 0, /* todo_flags_start */
9058 0, /* todo_flags_finish */
9061 class pass_split_crit_edges
: public gimple_opt_pass
9064 pass_split_crit_edges (gcc::context
*ctxt
)
9065 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
9068 /* opt_pass methods: */
9069 virtual unsigned int execute (function
*) { return split_critical_edges (); }
9071 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
9072 }; // class pass_split_crit_edges
9077 make_pass_split_crit_edges (gcc::context
*ctxt
)
9079 return new pass_split_crit_edges (ctxt
);
9083 /* Insert COND expression which is GIMPLE_COND after STMT
9084 in basic block BB with appropriate basic block split
9085 and creation of a new conditionally executed basic block.
9086 Update profile so the new bb is visited with probability PROB.
9087 Return created basic block. */
9089 insert_cond_bb (basic_block bb
, gimple
*stmt
, gimple
*cond
,
9090 profile_probability prob
)
9092 edge fall
= split_block (bb
, stmt
);
9093 gimple_stmt_iterator iter
= gsi_last_bb (bb
);
9096 /* Insert cond statement. */
9097 gcc_assert (gimple_code (cond
) == GIMPLE_COND
);
9098 if (gsi_end_p (iter
))
9099 gsi_insert_before (&iter
, cond
, GSI_CONTINUE_LINKING
);
9101 gsi_insert_after (&iter
, cond
, GSI_CONTINUE_LINKING
);
9103 /* Create conditionally executed block. */
9104 new_bb
= create_empty_bb (bb
);
9105 edge e
= make_edge (bb
, new_bb
, EDGE_TRUE_VALUE
);
9106 e
->probability
= prob
;
9107 new_bb
->count
= e
->count ();
9108 make_single_succ_edge (new_bb
, fall
->dest
, EDGE_FALLTHRU
);
9110 /* Fix edge for split bb. */
9111 fall
->flags
= EDGE_FALSE_VALUE
;
9112 fall
->probability
-= e
->probability
;
9114 /* Update dominance info. */
9115 if (dom_info_available_p (CDI_DOMINATORS
))
9117 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, bb
);
9118 set_immediate_dominator (CDI_DOMINATORS
, fall
->dest
, bb
);
9121 /* Update loop info. */
9123 add_bb_to_loop (new_bb
, bb
->loop_father
);
9128 /* Build a ternary operation and gimplify it. Emit code before GSI.
9129 Return the gimple_val holding the result. */
9132 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
9133 tree type
, tree a
, tree b
, tree c
)
9136 location_t loc
= gimple_location (gsi_stmt (*gsi
));
9138 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
9141 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
9145 /* Build a binary operation and gimplify it. Emit code before GSI.
9146 Return the gimple_val holding the result. */
9149 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
9150 tree type
, tree a
, tree b
)
9154 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
9157 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
9161 /* Build a unary operation and gimplify it. Emit code before GSI.
9162 Return the gimple_val holding the result. */
9165 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
9170 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
9173 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
9179 /* Given a basic block B which ends with a conditional and has
9180 precisely two successors, determine which of the edges is taken if
9181 the conditional is true and which is taken if the conditional is
9182 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9185 extract_true_false_edges_from_block (basic_block b
,
9189 edge e
= EDGE_SUCC (b
, 0);
9191 if (e
->flags
& EDGE_TRUE_VALUE
)
9194 *false_edge
= EDGE_SUCC (b
, 1);
9199 *true_edge
= EDGE_SUCC (b
, 1);
9204 /* From a controlling predicate in the immediate dominator DOM of
9205 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9206 predicate evaluates to true and false and store them to
9207 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9208 they are non-NULL. Returns true if the edges can be determined,
9209 else return false. */
9212 extract_true_false_controlled_edges (basic_block dom
, basic_block phiblock
,
9213 edge
*true_controlled_edge
,
9214 edge
*false_controlled_edge
)
9216 basic_block bb
= phiblock
;
9217 edge true_edge
, false_edge
, tem
;
9218 edge e0
= NULL
, e1
= NULL
;
9220 /* We have to verify that one edge into the PHI node is dominated
9221 by the true edge of the predicate block and the other edge
9222 dominated by the false edge. This ensures that the PHI argument
9223 we are going to take is completely determined by the path we
9224 take from the predicate block.
9225 We can only use BB dominance checks below if the destination of
9226 the true/false edges are dominated by their edge, thus only
9227 have a single predecessor. */
9228 extract_true_false_edges_from_block (dom
, &true_edge
, &false_edge
);
9229 tem
= EDGE_PRED (bb
, 0);
9230 if (tem
== true_edge
9231 || (single_pred_p (true_edge
->dest
)
9232 && (tem
->src
== true_edge
->dest
9233 || dominated_by_p (CDI_DOMINATORS
,
9234 tem
->src
, true_edge
->dest
))))
9236 else if (tem
== false_edge
9237 || (single_pred_p (false_edge
->dest
)
9238 && (tem
->src
== false_edge
->dest
9239 || dominated_by_p (CDI_DOMINATORS
,
9240 tem
->src
, false_edge
->dest
))))
9244 tem
= EDGE_PRED (bb
, 1);
9245 if (tem
== true_edge
9246 || (single_pred_p (true_edge
->dest
)
9247 && (tem
->src
== true_edge
->dest
9248 || dominated_by_p (CDI_DOMINATORS
,
9249 tem
->src
, true_edge
->dest
))))
9251 else if (tem
== false_edge
9252 || (single_pred_p (false_edge
->dest
)
9253 && (tem
->src
== false_edge
->dest
9254 || dominated_by_p (CDI_DOMINATORS
,
9255 tem
->src
, false_edge
->dest
))))
9262 if (true_controlled_edge
)
9263 *true_controlled_edge
= e0
;
9264 if (false_controlled_edge
)
9265 *false_controlled_edge
= e1
;
9270 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9271 range [low, high]. Place associated stmts before *GSI. */
9274 generate_range_test (basic_block bb
, tree index
, tree low
, tree high
,
9275 tree
*lhs
, tree
*rhs
)
9277 tree type
= TREE_TYPE (index
);
9278 tree utype
= unsigned_type_for (type
);
9280 low
= fold_convert (type
, low
);
9281 high
= fold_convert (type
, high
);
9283 tree tmp
= make_ssa_name (type
);
9285 = gimple_build_assign (tmp
, MINUS_EXPR
, index
, low
);
9287 *lhs
= make_ssa_name (utype
);
9288 gassign
*a
= gimple_build_assign (*lhs
, NOP_EXPR
, tmp
);
9290 *rhs
= fold_build2 (MINUS_EXPR
, utype
, high
, low
);
9291 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
9292 gsi_insert_before (&gsi
, sub1
, GSI_SAME_STMT
);
9293 gsi_insert_before (&gsi
, a
, GSI_SAME_STMT
);
9296 /* Emit return warnings. */
9300 const pass_data pass_data_warn_function_return
=
9302 GIMPLE_PASS
, /* type */
9303 "*warn_function_return", /* name */
9304 OPTGROUP_NONE
, /* optinfo_flags */
9305 TV_NONE
, /* tv_id */
9306 PROP_cfg
, /* properties_required */
9307 0, /* properties_provided */
9308 0, /* properties_destroyed */
9309 0, /* todo_flags_start */
9310 0, /* todo_flags_finish */
9313 class pass_warn_function_return
: public gimple_opt_pass
9316 pass_warn_function_return (gcc::context
*ctxt
)
9317 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
9320 /* opt_pass methods: */
9321 virtual unsigned int execute (function
*);
9323 }; // class pass_warn_function_return
9326 pass_warn_function_return::execute (function
*fun
)
9328 source_location location
;
9333 if (!targetm
.warn_func_return (fun
->decl
))
9336 /* If we have a path to EXIT, then we do return. */
9337 if (TREE_THIS_VOLATILE (fun
->decl
)
9338 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0)
9340 location
= UNKNOWN_LOCATION
;
9341 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
);
9342 (e
= ei_safe_edge (ei
)); )
9344 last
= last_stmt (e
->src
);
9345 if ((gimple_code (last
) == GIMPLE_RETURN
9346 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
9347 && location
== UNKNOWN_LOCATION
9348 && ((location
= LOCATION_LOCUS (gimple_location (last
)))
9349 != UNKNOWN_LOCATION
)
9352 /* When optimizing, replace return stmts in noreturn functions
9353 with __builtin_unreachable () call. */
9354 if (optimize
&& gimple_code (last
) == GIMPLE_RETURN
)
9356 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
9357 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
9358 gimple_set_location (new_stmt
, gimple_location (last
));
9359 gimple_stmt_iterator gsi
= gsi_for_stmt (last
);
9360 gsi_replace (&gsi
, new_stmt
, true);
9366 if (location
== UNKNOWN_LOCATION
)
9367 location
= cfun
->function_end_locus
;
9368 warning_at (location
, 0, "%<noreturn%> function does return");
9371 /* If we see "return;" in some basic block, then we do reach the end
9372 without returning a value. */
9373 else if (warn_return_type
> 0
9374 && !TREE_NO_WARNING (fun
->decl
)
9375 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun
->decl
))))
9377 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
9379 gimple
*last
= last_stmt (e
->src
);
9380 greturn
*return_stmt
= dyn_cast
<greturn
*> (last
);
9382 && gimple_return_retval (return_stmt
) == NULL
9383 && !gimple_no_warning_p (last
))
9385 location
= gimple_location (last
);
9386 if (LOCATION_LOCUS (location
) == UNKNOWN_LOCATION
)
9387 location
= fun
->function_end_locus
;
9388 warning_at (location
, OPT_Wreturn_type
,
9389 "control reaches end of non-void function");
9390 TREE_NO_WARNING (fun
->decl
) = 1;
9394 /* The C++ FE turns fallthrough from the end of non-void function
9395 into __builtin_unreachable () call with BUILTINS_LOCATION.
9396 Recognize those too. */
9398 if (!TREE_NO_WARNING (fun
->decl
))
9399 FOR_EACH_BB_FN (bb
, fun
)
9400 if (EDGE_COUNT (bb
->succs
) == 0)
9402 gimple
*last
= last_stmt (bb
);
9403 const enum built_in_function ubsan_missing_ret
9404 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN
;
9406 && ((LOCATION_LOCUS (gimple_location (last
))
9407 == BUILTINS_LOCATION
9408 && gimple_call_builtin_p (last
, BUILT_IN_UNREACHABLE
))
9409 || gimple_call_builtin_p (last
, ubsan_missing_ret
)))
9411 gimple_stmt_iterator gsi
= gsi_for_stmt (last
);
9412 gsi_prev_nondebug (&gsi
);
9413 gimple
*prev
= gsi_stmt (gsi
);
9415 location
= UNKNOWN_LOCATION
;
9417 location
= gimple_location (prev
);
9418 if (LOCATION_LOCUS (location
) == UNKNOWN_LOCATION
)
9419 location
= fun
->function_end_locus
;
9420 warning_at (location
, OPT_Wreturn_type
,
9421 "control reaches end of non-void function");
9422 TREE_NO_WARNING (fun
->decl
) = 1;
9433 make_pass_warn_function_return (gcc::context
*ctxt
)
9435 return new pass_warn_function_return (ctxt
);
9438 /* Walk a gimplified function and warn for functions whose return value is
9439 ignored and attribute((warn_unused_result)) is set. This is done before
9440 inlining, so we don't have to worry about that. */
9443 do_warn_unused_result (gimple_seq seq
)
9446 gimple_stmt_iterator i
;
9448 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
9450 gimple
*g
= gsi_stmt (i
);
9452 switch (gimple_code (g
))
9455 do_warn_unused_result (gimple_bind_body (as_a
<gbind
*>(g
)));
9458 do_warn_unused_result (gimple_try_eval (g
));
9459 do_warn_unused_result (gimple_try_cleanup (g
));
9462 do_warn_unused_result (gimple_catch_handler (
9463 as_a
<gcatch
*> (g
)));
9465 case GIMPLE_EH_FILTER
:
9466 do_warn_unused_result (gimple_eh_filter_failure (g
));
9470 if (gimple_call_lhs (g
))
9472 if (gimple_call_internal_p (g
))
9475 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9476 LHS. All calls whose value is ignored should be
9477 represented like this. Look for the attribute. */
9478 fdecl
= gimple_call_fndecl (g
);
9479 ftype
= gimple_call_fntype (g
);
9481 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
9483 location_t loc
= gimple_location (g
);
9486 warning_at (loc
, OPT_Wunused_result
,
9487 "ignoring return value of %qD, "
9488 "declared with attribute warn_unused_result",
9491 warning_at (loc
, OPT_Wunused_result
,
9492 "ignoring return value of function "
9493 "declared with attribute warn_unused_result");
9498 /* Not a container, not a call, or a call whose value is used. */
9506 const pass_data pass_data_warn_unused_result
=
9508 GIMPLE_PASS
, /* type */
9509 "*warn_unused_result", /* name */
9510 OPTGROUP_NONE
, /* optinfo_flags */
9511 TV_NONE
, /* tv_id */
9512 PROP_gimple_any
, /* properties_required */
9513 0, /* properties_provided */
9514 0, /* properties_destroyed */
9515 0, /* todo_flags_start */
9516 0, /* todo_flags_finish */
9519 class pass_warn_unused_result
: public gimple_opt_pass
9522 pass_warn_unused_result (gcc::context
*ctxt
)
9523 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
9526 /* opt_pass methods: */
9527 virtual bool gate (function
*) { return flag_warn_unused_result
; }
9528 virtual unsigned int execute (function
*)
9530 do_warn_unused_result (gimple_body (current_function_decl
));
9534 }; // class pass_warn_unused_result
9539 make_pass_warn_unused_result (gcc::context
*ctxt
)
9541 return new pass_warn_unused_result (ctxt
);
9544 /* IPA passes, compilation of earlier functions or inlining
9545 might have changed some properties, such as marked functions nothrow,
9546 pure, const or noreturn.
9547 Remove redundant edges and basic blocks, and create new ones if necessary.
9549 This pass can't be executed as stand alone pass from pass manager, because
9550 in between inlining and this fixup the verify_flow_info would fail. */
9553 execute_fixup_cfg (void)
9556 gimple_stmt_iterator gsi
;
9558 cgraph_node
*node
= cgraph_node::get (current_function_decl
);
9559 profile_count num
= node
->count
;
9560 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
9561 bool scale
= num
.initialized_p () && !(num
== den
);
9565 profile_count::adjust_for_ipa_scaling (&num
, &den
);
9566 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= node
->count
;
9567 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
9568 = EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
.apply_scale (num
, den
);
9571 FOR_EACH_BB_FN (bb
, cfun
)
9574 bb
->count
= bb
->count
.apply_scale (num
, den
);
9575 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
9577 gimple
*stmt
= gsi_stmt (gsi
);
9578 tree decl
= is_gimple_call (stmt
)
9579 ? gimple_call_fndecl (stmt
)
9583 int flags
= gimple_call_flags (stmt
);
9584 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
9586 if (gimple_purge_dead_abnormal_call_edges (bb
))
9587 todo
|= TODO_cleanup_cfg
;
9589 if (gimple_in_ssa_p (cfun
))
9591 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9596 if (flags
& ECF_NORETURN
9597 && fixup_noreturn_call (stmt
))
9598 todo
|= TODO_cleanup_cfg
;
9601 /* Remove stores to variables we marked write-only.
9602 Keep access when store has side effect, i.e. in case when source
9604 if (gimple_store_p (stmt
)
9605 && !gimple_has_side_effects (stmt
))
9607 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
9610 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
9611 && varpool_node::get (lhs
)->writeonly
)
9613 unlink_stmt_vdef (stmt
);
9614 gsi_remove (&gsi
, true);
9615 release_defs (stmt
);
9616 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9620 /* For calls we can simply remove LHS when it is known
9621 to be write-only. */
9622 if (is_gimple_call (stmt
)
9623 && gimple_get_lhs (stmt
))
9625 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
9628 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
9629 && varpool_node::get (lhs
)->writeonly
)
9631 gimple_call_set_lhs (stmt
, NULL
);
9633 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9637 if (maybe_clean_eh_stmt (stmt
)
9638 && gimple_purge_dead_eh_edges (bb
))
9639 todo
|= TODO_cleanup_cfg
;
9643 /* If we have a basic block with no successors that does not
9644 end with a control statement or a noreturn call end it with
9645 a call to __builtin_unreachable. This situation can occur
9646 when inlining a noreturn call that does in fact return. */
9647 if (EDGE_COUNT (bb
->succs
) == 0)
9649 gimple
*stmt
= last_stmt (bb
);
9651 || (!is_ctrl_stmt (stmt
)
9652 && (!is_gimple_call (stmt
)
9653 || !gimple_call_noreturn_p (stmt
))))
9655 if (stmt
&& is_gimple_call (stmt
))
9656 gimple_call_set_ctrl_altering (stmt
, false);
9657 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
9658 stmt
= gimple_build_call (fndecl
, 0);
9659 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
9660 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
9661 if (!cfun
->after_inlining
)
9663 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
9664 node
->create_edge (cgraph_node::get_create (fndecl
),
9665 call_stmt
, bb
->count
);
9671 compute_function_frequency ();
9674 && (todo
& TODO_cleanup_cfg
))
9675 loops_state_set (LOOPS_NEED_FIXUP
);
9682 const pass_data pass_data_fixup_cfg
=
9684 GIMPLE_PASS
, /* type */
9685 "fixup_cfg", /* name */
9686 OPTGROUP_NONE
, /* optinfo_flags */
9687 TV_NONE
, /* tv_id */
9688 PROP_cfg
, /* properties_required */
9689 0, /* properties_provided */
9690 0, /* properties_destroyed */
9691 0, /* todo_flags_start */
9692 0, /* todo_flags_finish */
9695 class pass_fixup_cfg
: public gimple_opt_pass
9698 pass_fixup_cfg (gcc::context
*ctxt
)
9699 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
9702 /* opt_pass methods: */
9703 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
9704 virtual unsigned int execute (function
*) { return execute_fixup_cfg (); }
9706 }; // class pass_fixup_cfg
9711 make_pass_fixup_cfg (gcc::context
*ctxt
)
9713 return new pass_fixup_cfg (ctxt
);
9716 /* Garbage collection support for edge_def. */
9718 extern void gt_ggc_mx (tree
&);
9719 extern void gt_ggc_mx (gimple
*&);
9720 extern void gt_ggc_mx (rtx
&);
9721 extern void gt_ggc_mx (basic_block
&);
9724 gt_ggc_mx (rtx_insn
*& x
)
9727 gt_ggc_mx_rtx_def ((void *) x
);
9731 gt_ggc_mx (edge_def
*e
)
9733 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9735 gt_ggc_mx (e
->dest
);
9736 if (current_ir_type () == IR_GIMPLE
)
9737 gt_ggc_mx (e
->insns
.g
);
9739 gt_ggc_mx (e
->insns
.r
);
9743 /* PCH support for edge_def. */
9745 extern void gt_pch_nx (tree
&);
9746 extern void gt_pch_nx (gimple
*&);
9747 extern void gt_pch_nx (rtx
&);
9748 extern void gt_pch_nx (basic_block
&);
9751 gt_pch_nx (rtx_insn
*& x
)
9754 gt_pch_nx_rtx_def ((void *) x
);
9758 gt_pch_nx (edge_def
*e
)
9760 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9762 gt_pch_nx (e
->dest
);
9763 if (current_ir_type () == IR_GIMPLE
)
9764 gt_pch_nx (e
->insns
.g
);
9766 gt_pch_nx (e
->insns
.r
);
9771 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
9773 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9774 op (&(e
->src
), cookie
);
9775 op (&(e
->dest
), cookie
);
9776 if (current_ir_type () == IR_GIMPLE
)
9777 op (&(e
->insns
.g
), cookie
);
9779 op (&(e
->insns
.r
), cookie
);
9780 op (&(block
), cookie
);
9785 namespace selftest
{
9787 /* Helper function for CFG selftests: create a dummy function decl
9788 and push it as cfun. */
9791 push_fndecl (const char *name
)
9793 tree fn_type
= build_function_type_array (integer_type_node
, 0, NULL
);
9794 /* FIXME: this uses input_location: */
9795 tree fndecl
= build_fn_decl (name
, fn_type
);
9796 tree retval
= build_decl (UNKNOWN_LOCATION
, RESULT_DECL
,
9797 NULL_TREE
, integer_type_node
);
9798 DECL_RESULT (fndecl
) = retval
;
9799 push_struct_function (fndecl
);
9800 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9801 ASSERT_TRUE (fun
!= NULL
);
9802 init_empty_tree_cfg_for_function (fun
);
9803 ASSERT_EQ (2, n_basic_blocks_for_fn (fun
));
9804 ASSERT_EQ (0, n_edges_for_fn (fun
));
9808 /* These tests directly create CFGs.
9809 Compare with the static fns within tree-cfg.c:
9811 - make_blocks: calls create_basic_block (seq, bb);
9814 /* Verify a simple cfg of the form:
9815 ENTRY -> A -> B -> C -> EXIT. */
9818 test_linear_chain ()
9820 gimple_register_cfg_hooks ();
9822 tree fndecl
= push_fndecl ("cfg_test_linear_chain");
9823 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9825 /* Create some empty blocks. */
9826 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
9827 basic_block bb_b
= create_empty_bb (bb_a
);
9828 basic_block bb_c
= create_empty_bb (bb_b
);
9830 ASSERT_EQ (5, n_basic_blocks_for_fn (fun
));
9831 ASSERT_EQ (0, n_edges_for_fn (fun
));
9833 /* Create some edges: a simple linear chain of BBs. */
9834 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
9835 make_edge (bb_a
, bb_b
, 0);
9836 make_edge (bb_b
, bb_c
, 0);
9837 make_edge (bb_c
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9839 /* Verify the edges. */
9840 ASSERT_EQ (4, n_edges_for_fn (fun
));
9841 ASSERT_EQ (NULL
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->preds
);
9842 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
->length ());
9843 ASSERT_EQ (1, bb_a
->preds
->length ());
9844 ASSERT_EQ (1, bb_a
->succs
->length ());
9845 ASSERT_EQ (1, bb_b
->preds
->length ());
9846 ASSERT_EQ (1, bb_b
->succs
->length ());
9847 ASSERT_EQ (1, bb_c
->preds
->length ());
9848 ASSERT_EQ (1, bb_c
->succs
->length ());
9849 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
->length ());
9850 ASSERT_EQ (NULL
, EXIT_BLOCK_PTR_FOR_FN (fun
)->succs
);
9852 /* Verify the dominance information
9853 Each BB in our simple chain should be dominated by the one before
9855 calculate_dominance_info (CDI_DOMINATORS
);
9856 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
9857 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
9858 vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
9859 ASSERT_EQ (1, dom_by_b
.length ());
9860 ASSERT_EQ (bb_c
, dom_by_b
[0]);
9861 free_dominance_info (CDI_DOMINATORS
);
9862 dom_by_b
.release ();
9864 /* Similarly for post-dominance: each BB in our chain is post-dominated
9865 by the one after it. */
9866 calculate_dominance_info (CDI_POST_DOMINATORS
);
9867 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
9868 ASSERT_EQ (bb_c
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
9869 vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
9870 ASSERT_EQ (1, postdom_by_b
.length ());
9871 ASSERT_EQ (bb_a
, postdom_by_b
[0]);
9872 free_dominance_info (CDI_POST_DOMINATORS
);
9873 postdom_by_b
.release ();
9878 /* Verify a simple CFG of the form:
9894 gimple_register_cfg_hooks ();
9896 tree fndecl
= push_fndecl ("cfg_test_diamond");
9897 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9899 /* Create some empty blocks. */
9900 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
9901 basic_block bb_b
= create_empty_bb (bb_a
);
9902 basic_block bb_c
= create_empty_bb (bb_a
);
9903 basic_block bb_d
= create_empty_bb (bb_b
);
9905 ASSERT_EQ (6, n_basic_blocks_for_fn (fun
));
9906 ASSERT_EQ (0, n_edges_for_fn (fun
));
9908 /* Create the edges. */
9909 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
9910 make_edge (bb_a
, bb_b
, EDGE_TRUE_VALUE
);
9911 make_edge (bb_a
, bb_c
, EDGE_FALSE_VALUE
);
9912 make_edge (bb_b
, bb_d
, 0);
9913 make_edge (bb_c
, bb_d
, 0);
9914 make_edge (bb_d
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9916 /* Verify the edges. */
9917 ASSERT_EQ (6, n_edges_for_fn (fun
));
9918 ASSERT_EQ (1, bb_a
->preds
->length ());
9919 ASSERT_EQ (2, bb_a
->succs
->length ());
9920 ASSERT_EQ (1, bb_b
->preds
->length ());
9921 ASSERT_EQ (1, bb_b
->succs
->length ());
9922 ASSERT_EQ (1, bb_c
->preds
->length ());
9923 ASSERT_EQ (1, bb_c
->succs
->length ());
9924 ASSERT_EQ (2, bb_d
->preds
->length ());
9925 ASSERT_EQ (1, bb_d
->succs
->length ());
9927 /* Verify the dominance information. */
9928 calculate_dominance_info (CDI_DOMINATORS
);
9929 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
9930 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
9931 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_d
));
9932 vec
<basic_block
> dom_by_a
= get_dominated_by (CDI_DOMINATORS
, bb_a
);
9933 ASSERT_EQ (3, dom_by_a
.length ()); /* B, C, D, in some order. */
9934 dom_by_a
.release ();
9935 vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
9936 ASSERT_EQ (0, dom_by_b
.length ());
9937 dom_by_b
.release ();
9938 free_dominance_info (CDI_DOMINATORS
);
9940 /* Similarly for post-dominance. */
9941 calculate_dominance_info (CDI_POST_DOMINATORS
);
9942 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
9943 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
9944 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_c
));
9945 vec
<basic_block
> postdom_by_d
= get_dominated_by (CDI_POST_DOMINATORS
, bb_d
);
9946 ASSERT_EQ (3, postdom_by_d
.length ()); /* A, B, C in some order. */
9947 postdom_by_d
.release ();
9948 vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
9949 ASSERT_EQ (0, postdom_by_b
.length ());
9950 postdom_by_b
.release ();
9951 free_dominance_info (CDI_POST_DOMINATORS
);
9956 /* Verify that we can handle a CFG containing a "complete" aka
9957 fully-connected subgraph (where A B C D below all have edges
9958 pointing to each other node, also to themselves).
9976 test_fully_connected ()
9978 gimple_register_cfg_hooks ();
9980 tree fndecl
= push_fndecl ("cfg_fully_connected");
9981 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9985 /* Create some empty blocks. */
9986 auto_vec
<basic_block
> subgraph_nodes
;
9987 for (int i
= 0; i
< n
; i
++)
9988 subgraph_nodes
.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
)));
9990 ASSERT_EQ (n
+ 2, n_basic_blocks_for_fn (fun
));
9991 ASSERT_EQ (0, n_edges_for_fn (fun
));
9993 /* Create the edges. */
9994 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), subgraph_nodes
[0], EDGE_FALLTHRU
);
9995 make_edge (subgraph_nodes
[0], EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9996 for (int i
= 0; i
< n
; i
++)
9997 for (int j
= 0; j
< n
; j
++)
9998 make_edge (subgraph_nodes
[i
], subgraph_nodes
[j
], 0);
10000 /* Verify the edges. */
10001 ASSERT_EQ (2 + (n
* n
), n_edges_for_fn (fun
));
10002 /* The first one is linked to ENTRY/EXIT as well as itself and
10003 everything else. */
10004 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->preds
->length ());
10005 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->succs
->length ());
10006 /* The other ones in the subgraph are linked to everything in
10007 the subgraph (including themselves). */
10008 for (int i
= 1; i
< n
; i
++)
10010 ASSERT_EQ (n
, subgraph_nodes
[i
]->preds
->length ());
10011 ASSERT_EQ (n
, subgraph_nodes
[i
]->succs
->length ());
10014 /* Verify the dominance information. */
10015 calculate_dominance_info (CDI_DOMINATORS
);
10016 /* The initial block in the subgraph should be dominated by ENTRY. */
10017 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun
),
10018 get_immediate_dominator (CDI_DOMINATORS
,
10019 subgraph_nodes
[0]));
10020 /* Every other block in the subgraph should be dominated by the
10022 for (int i
= 1; i
< n
; i
++)
10023 ASSERT_EQ (subgraph_nodes
[0],
10024 get_immediate_dominator (CDI_DOMINATORS
,
10025 subgraph_nodes
[i
]));
10026 free_dominance_info (CDI_DOMINATORS
);
10028 /* Similarly for post-dominance. */
10029 calculate_dominance_info (CDI_POST_DOMINATORS
);
10030 /* The initial block in the subgraph should be postdominated by EXIT. */
10031 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun
),
10032 get_immediate_dominator (CDI_POST_DOMINATORS
,
10033 subgraph_nodes
[0]));
10034 /* Every other block in the subgraph should be postdominated by the
10035 initial block, since that leads to EXIT. */
10036 for (int i
= 1; i
< n
; i
++)
10037 ASSERT_EQ (subgraph_nodes
[0],
10038 get_immediate_dominator (CDI_POST_DOMINATORS
,
10039 subgraph_nodes
[i
]));
10040 free_dominance_info (CDI_POST_DOMINATORS
);
10045 /* Run all of the selftests within this file. */
10048 tree_cfg_c_tests ()
10050 test_linear_chain ();
10052 test_fully_connected ();
10055 } // namespace selftest
10057 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10060 - switch statement (a block with many out-edges)
10061 - something that jumps to itself
10064 #endif /* CHECKING_P */