1 /* Control flow functions for trees.
2 Copyright (C) 2001-2024 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "tree-ssa-dce.h"
58 #include "omp-general.h"
59 #include "omp-expand.h"
60 #include "tree-cfgcleanup.h"
69 /* This file contains functions for building the Control Flow Graph (CFG)
70 for a function tree. */
72 /* Local declarations. */
74 /* Initial capacity for the basic block array. */
75 static const int initial_cfg_capacity
= 20;
77 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
78 which use a particular edge. The CASE_LABEL_EXPRs are chained together
79 via their CASE_CHAIN field, which we clear after we're done with the
80 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
82 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
83 update the case vector in response to edge redirections.
85 Right now this table is set up and torn down at key points in the
86 compilation process. It would be nice if we could make the table
87 more persistent. The key is getting notification of changes to
88 the CFG (particularly edge removal, creation and redirection). */
90 static hash_map
<edge
, tree
> *edge_to_cases
;
92 /* If we record edge_to_cases, this bitmap will hold indexes
93 of basic blocks that end in a GIMPLE_SWITCH which we touched
94 due to edge manipulations. */
96 static bitmap touched_switch_bbs
;
98 /* OpenMP region idxs for blocks during cfg pass. */
99 static vec
<int> bb_to_omp_idx
;
101 /* CFG statistics. */
104 long num_merged_labels
;
107 static struct cfg_stats_d cfg_stats
;
109 /* Data to pass to replace_block_vars_by_duplicates_1. */
110 struct replace_decls_d
112 hash_map
<tree
, tree
> *vars_map
;
116 /* Hash table to store last discriminator assigned for each locus. */
117 struct locus_discrim_map
123 /* Hashtable helpers. */
125 struct locus_discrim_hasher
: free_ptr_hash
<locus_discrim_map
>
127 static inline hashval_t
hash (const locus_discrim_map
*);
128 static inline bool equal (const locus_discrim_map
*,
129 const locus_discrim_map
*);
132 /* Trivial hash function for a location_t. ITEM is a pointer to
133 a hash table entry that maps a location_t to a discriminator. */
136 locus_discrim_hasher::hash (const locus_discrim_map
*item
)
138 return item
->location_line
;
141 /* Equality function for the locus-to-discriminator map. A and B
142 point to the two hash table entries to compare. */
145 locus_discrim_hasher::equal (const locus_discrim_map
*a
,
146 const locus_discrim_map
*b
)
148 return a
->location_line
== b
->location_line
;
151 static hash_table
<locus_discrim_hasher
> *discriminator_per_locus
;
153 /* Basic blocks and flowgraphs. */
154 static void make_blocks (gimple_seq
);
157 static void make_edges (void);
158 static void assign_discriminators (void);
159 static void make_cond_expr_edges (basic_block
);
160 static void make_gimple_switch_edges (gswitch
*, basic_block
);
161 static bool make_goto_expr_edges (basic_block
);
162 static void make_gimple_asm_edges (basic_block
);
163 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
164 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
166 /* Various helpers. */
167 static inline bool stmt_starts_bb_p (gimple
*, gimple
*);
168 static bool gimple_verify_flow_info (void);
169 static void gimple_make_forwarder_block (edge
);
170 static gimple
*first_non_label_stmt (basic_block
);
171 static bool verify_gimple_transaction (gtransaction
*);
172 static bool call_can_make_abnormal_goto (gimple
*);
174 /* Flowgraph optimization and cleanup. */
175 static void gimple_merge_blocks (basic_block
, basic_block
);
176 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
177 static void remove_bb (basic_block
);
178 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
179 static edge
find_taken_edge_cond_expr (const gcond
*, tree
);
182 init_empty_tree_cfg_for_function (struct function
*fn
)
184 /* Initialize the basic block array. */
186 profile_status_for_fn (fn
) = PROFILE_ABSENT
;
187 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
188 last_basic_block_for_fn (fn
) = NUM_FIXED_BLOCKS
;
189 vec_safe_grow_cleared (basic_block_info_for_fn (fn
),
190 initial_cfg_capacity
, true);
192 /* Build a mapping of labels to their associated blocks. */
193 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
),
194 initial_cfg_capacity
, true);
196 SET_BASIC_BLOCK_FOR_FN (fn
, ENTRY_BLOCK
, ENTRY_BLOCK_PTR_FOR_FN (fn
));
197 SET_BASIC_BLOCK_FOR_FN (fn
, EXIT_BLOCK
, EXIT_BLOCK_PTR_FOR_FN (fn
));
199 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
200 = EXIT_BLOCK_PTR_FOR_FN (fn
);
201 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
202 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
206 init_empty_tree_cfg (void)
208 init_empty_tree_cfg_for_function (cfun
);
211 /*---------------------------------------------------------------------------
213 ---------------------------------------------------------------------------*/
215 /* Entry point to the CFG builder for trees. SEQ is the sequence of
216 statements to be added to the flowgraph. */
219 build_gimple_cfg (gimple_seq seq
)
221 /* Register specific gimple functions. */
222 gimple_register_cfg_hooks ();
224 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
226 init_empty_tree_cfg ();
230 /* Make sure there is always at least one block, even if it's empty. */
231 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
232 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
234 /* Adjust the size of the array. */
235 if (basic_block_info_for_fn (cfun
)->length ()
236 < (size_t) n_basic_blocks_for_fn (cfun
))
237 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
238 n_basic_blocks_for_fn (cfun
));
240 /* To speed up statement iterator walks, we first purge dead labels. */
241 cleanup_dead_labels ();
243 /* Group case nodes to reduce the number of edges.
244 We do this after cleaning up dead labels because otherwise we miss
245 a lot of obvious case merging opportunities. */
246 group_case_labels ();
248 /* Create the edges of the flowgraph. */
249 discriminator_per_locus
= new hash_table
<locus_discrim_hasher
> (13);
251 assign_discriminators ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus
;
254 discriminator_per_locus
= NULL
;
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
262 replace_loop_annotate_in_block (basic_block bb
, class loop
*loop
)
264 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
265 gimple
*stmt
= gsi_stmt (gsi
);
267 if (!(stmt
&& gimple_code (stmt
) == GIMPLE_COND
))
270 for (gsi_prev_nondebug (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
272 stmt
= gsi_stmt (gsi
);
273 if (gimple_code (stmt
) != GIMPLE_CALL
)
275 if (!gimple_call_internal_p (stmt
)
276 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
279 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
281 case annot_expr_ivdep_kind
:
282 loop
->safelen
= INT_MAX
;
284 case annot_expr_unroll_kind
:
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt
, 2));
287 cfun
->has_unroll
= true;
289 case annot_expr_no_vector_kind
:
290 loop
->dont_vectorize
= true;
292 case annot_expr_vector_kind
:
293 loop
->force_vectorize
= true;
294 cfun
->has_force_vectorize_loops
= true;
296 case annot_expr_parallel_kind
:
297 loop
->can_be_parallel
= true;
298 loop
->safelen
= INT_MAX
;
300 case annot_expr_maybe_infinite_kind
:
301 loop
->finite_p
= false;
307 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
308 gimple_call_arg (stmt
, 0));
309 gsi_replace (&gsi
, stmt
, true);
313 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
314 them and propagate the information to the loop. We assume that the
315 annotations come immediately before the condition of the loop. */
318 replace_loop_annotate (void)
321 gimple_stmt_iterator gsi
;
324 for (auto loop
: loops_list (cfun
, 0))
326 /* Push the global flag_finite_loops state down to individual loops. */
327 loop
->finite_p
= flag_finite_loops
;
329 /* Check all exit source blocks for annotations. */
330 for (auto e
: get_loop_exit_edges (loop
))
331 replace_loop_annotate_in_block (e
->src
, loop
);
334 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
335 FOR_EACH_BB_FN (bb
, cfun
)
337 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
339 stmt
= gsi_stmt (gsi
);
340 if (gimple_code (stmt
) != GIMPLE_CALL
)
342 if (!gimple_call_internal_p (stmt
)
343 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
346 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
348 case annot_expr_ivdep_kind
:
349 case annot_expr_unroll_kind
:
350 case annot_expr_no_vector_kind
:
351 case annot_expr_vector_kind
:
352 case annot_expr_parallel_kind
:
353 case annot_expr_maybe_infinite_kind
:
359 warning_at (gimple_location (stmt
), 0, "ignoring loop annotation");
360 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
361 gimple_call_arg (stmt
, 0));
362 gsi_replace (&gsi
, stmt
, true);
368 execute_build_cfg (void)
370 gimple_seq body
= gimple_body (current_function_decl
);
372 build_gimple_cfg (body
);
373 gimple_set_body (current_function_decl
, NULL
);
374 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
376 fprintf (dump_file
, "Scope blocks:\n");
377 dump_scope_blocks (dump_file
, dump_flags
);
381 bb_to_omp_idx
.release ();
383 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
384 replace_loop_annotate ();
390 const pass_data pass_data_build_cfg
=
392 GIMPLE_PASS
, /* type */
394 OPTGROUP_NONE
, /* optinfo_flags */
395 TV_TREE_CFG
, /* tv_id */
396 PROP_gimple_leh
, /* properties_required */
397 ( PROP_cfg
| PROP_loops
), /* properties_provided */
398 0, /* properties_destroyed */
399 0, /* todo_flags_start */
400 0, /* todo_flags_finish */
403 class pass_build_cfg
: public gimple_opt_pass
406 pass_build_cfg (gcc::context
*ctxt
)
407 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
410 /* opt_pass methods: */
411 unsigned int execute (function
*) final override
413 return execute_build_cfg ();
416 }; // class pass_build_cfg
421 make_pass_build_cfg (gcc::context
*ctxt
)
423 return new pass_build_cfg (ctxt
);
427 /* Return true if T is a computed goto. */
430 computed_goto_p (gimple
*t
)
432 return (gimple_code (t
) == GIMPLE_GOTO
433 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
436 /* Returns true if the sequence of statements STMTS only contains
437 a call to __builtin_unreachable (). */
440 gimple_seq_unreachable_p (gimple_seq stmts
)
443 /* Return false if -fsanitize=unreachable, we don't want to
444 optimize away those calls, but rather turn them into
445 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
447 || sanitize_flags_p (SANITIZE_UNREACHABLE
))
450 gimple_stmt_iterator gsi
= gsi_last (stmts
);
452 if (!gimple_call_builtin_p (gsi_stmt (gsi
), BUILT_IN_UNREACHABLE
))
455 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
457 gimple
*stmt
= gsi_stmt (gsi
);
458 if (gimple_code (stmt
) != GIMPLE_LABEL
459 && !is_gimple_debug (stmt
)
460 && !gimple_clobber_p (stmt
))
466 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
467 the other edge points to a bb with just __builtin_unreachable ().
468 I.e. return true for C->M edge in:
476 __builtin_unreachable ();
480 assert_unreachable_fallthru_edge_p (edge e
)
482 basic_block pred_bb
= e
->src
;
483 if (safe_is_a
<gcond
*> (*gsi_last_bb (pred_bb
)))
485 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
486 if (other_bb
== e
->dest
)
487 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
488 if (EDGE_COUNT (other_bb
->succs
) == 0)
489 return gimple_seq_unreachable_p (bb_seq (other_bb
));
495 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
496 could alter control flow except via eh. We initialize the flag at
497 CFG build time and only ever clear it later. */
500 gimple_call_initialize_ctrl_altering (gimple
*stmt
)
502 int flags
= gimple_call_flags (stmt
);
504 /* A call alters control flow if it can make an abnormal goto. */
505 if (call_can_make_abnormal_goto (stmt
)
506 /* A call also alters control flow if it does not return. */
507 || flags
& ECF_NORETURN
508 /* TM ending statements have backedges out of the transaction.
509 Return true so we split the basic block containing them.
510 Note that the TM_BUILTIN test is merely an optimization. */
511 || ((flags
& ECF_TM_BUILTIN
)
512 && is_tm_ending_fndecl (gimple_call_fndecl (stmt
)))
513 /* BUILT_IN_RETURN call is same as return statement. */
514 || gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
)
515 /* IFN_UNIQUE should be the last insn, to make checking for it
516 as cheap as possible. */
517 || (gimple_call_internal_p (stmt
)
518 && gimple_call_internal_unique_p (stmt
)))
519 gimple_call_set_ctrl_altering (stmt
, true);
521 gimple_call_set_ctrl_altering (stmt
, false);
525 /* Insert SEQ after BB and build a flowgraph. */
528 make_blocks_1 (gimple_seq seq
, basic_block bb
)
530 gimple_stmt_iterator i
= gsi_start (seq
);
532 gimple
*prev_stmt
= NULL
;
533 bool start_new_block
= true;
534 bool first_stmt_of_seq
= true;
536 while (!gsi_end_p (i
))
538 /* PREV_STMT should only be set to a debug stmt if the debug
539 stmt is before nondebug stmts. Once stmt reaches a nondebug
540 nonlabel, prev_stmt will be set to it, so that
541 stmt_starts_bb_p will know to start a new block if a label is
542 found. However, if stmt was a label after debug stmts only,
543 keep the label in prev_stmt even if we find further debug
544 stmts, for there may be other labels after them, and they
545 should land in the same block. */
546 if (!prev_stmt
|| !stmt
|| !is_gimple_debug (stmt
))
550 if (stmt
&& is_gimple_call (stmt
))
551 gimple_call_initialize_ctrl_altering (stmt
);
553 /* If the statement starts a new basic block or if we have determined
554 in a previous pass that we need to create a new block for STMT, do
556 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
558 if (!first_stmt_of_seq
)
559 gsi_split_seq_before (&i
, &seq
);
560 bb
= create_basic_block (seq
, bb
);
561 start_new_block
= false;
565 /* Now add STMT to BB and create the subgraphs for special statement
567 gimple_set_bb (stmt
, bb
);
569 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
571 if (stmt_ends_bb_p (stmt
))
573 /* If the stmt can make abnormal goto use a new temporary
574 for the assignment to the LHS. This makes sure the old value
575 of the LHS is available on the abnormal edge. Otherwise
576 we will end up with overlapping life-ranges for abnormal
578 if (gimple_has_lhs (stmt
)
579 && stmt_can_make_abnormal_goto (stmt
)
580 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
582 tree lhs
= gimple_get_lhs (stmt
);
583 tree tmp
= create_tmp_var (TREE_TYPE (lhs
));
584 gimple
*s
= gimple_build_assign (lhs
, tmp
);
585 gimple_set_location (s
, gimple_location (stmt
));
586 gimple_set_block (s
, gimple_block (stmt
));
587 gimple_set_lhs (stmt
, tmp
);
588 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
590 start_new_block
= true;
594 first_stmt_of_seq
= false;
599 /* Build a flowgraph for the sequence of stmts SEQ. */
602 make_blocks (gimple_seq seq
)
604 /* Look for debug markers right before labels, and move the debug
605 stmts after the labels. Accepting labels among debug markers
606 adds no value, just complexity; if we wanted to annotate labels
607 with view numbers (so sequencing among markers would matter) or
608 somesuch, we're probably better off still moving the labels, but
609 adding other debug annotations in their original positions or
610 emitting nonbind or bind markers associated with the labels in
611 the original position of the labels.
613 Moving labels would probably be simpler, but we can't do that:
614 moving labels assigns label ids to them, and doing so because of
615 debug markers makes for -fcompare-debug and possibly even codegen
616 differences. So, we have to move the debug stmts instead. To
617 that end, we scan SEQ backwards, marking the position of the
618 latest (earliest we find) label, and moving debug stmts that are
619 not separated from it by nondebug nonlabel stmts after the
621 if (MAY_HAVE_DEBUG_MARKER_STMTS
)
623 gimple_stmt_iterator label
= gsi_none ();
625 for (gimple_stmt_iterator i
= gsi_last (seq
); !gsi_end_p (i
); gsi_prev (&i
))
627 gimple
*stmt
= gsi_stmt (i
);
629 /* If this is the first label we encounter (latest in SEQ)
630 before nondebug stmts, record its position. */
631 if (is_a
<glabel
*> (stmt
))
633 if (gsi_end_p (label
))
638 /* Without a recorded label position to move debug stmts to,
639 there's nothing to do. */
640 if (gsi_end_p (label
))
643 /* Move the debug stmt at I after LABEL. */
644 if (is_gimple_debug (stmt
))
646 gcc_assert (gimple_debug_nonbind_marker_p (stmt
));
647 /* As STMT is removed, I advances to the stmt after
648 STMT, so the gsi_prev in the for "increment"
649 expression gets us to the stmt we're to visit after
650 STMT. LABEL, however, would advance to the moved
651 stmt if we passed it to gsi_move_after, so pass it a
652 copy instead, so as to keep LABEL pointing to the
654 gimple_stmt_iterator copy
= label
;
655 gsi_move_after (&i
, ©
);
659 /* There aren't any (more?) debug stmts before label, so
660 there isn't anything else to move after it. */
665 make_blocks_1 (seq
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
668 /* Create and return a new empty basic block after bb AFTER. */
671 create_bb (void *h
, void *e
, basic_block after
)
677 /* Create and initialize a new basic block. Since alloc_block uses
678 GC allocation that clears memory to allocate a basic block, we do
679 not have to clear the newly allocated basic block here. */
682 bb
->index
= last_basic_block_for_fn (cfun
);
684 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
686 /* Add the new block to the linked list of blocks. */
687 link_block (bb
, after
);
689 /* Grow the basic block array if needed. */
690 if ((size_t) last_basic_block_for_fn (cfun
)
691 == basic_block_info_for_fn (cfun
)->length ())
692 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
693 last_basic_block_for_fn (cfun
) + 1);
695 /* Add the newly created block to the array. */
696 SET_BASIC_BLOCK_FOR_FN (cfun
, last_basic_block_for_fn (cfun
), bb
);
698 n_basic_blocks_for_fn (cfun
)++;
699 last_basic_block_for_fn (cfun
)++;
705 /*---------------------------------------------------------------------------
707 ---------------------------------------------------------------------------*/
709 /* If basic block BB has an abnormal edge to a basic block
710 containing IFN_ABNORMAL_DISPATCHER internal call, return
711 that the dispatcher's basic block, otherwise return NULL. */
714 get_abnormal_succ_dispatcher (basic_block bb
)
719 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
720 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)) == EDGE_ABNORMAL
)
722 gimple_stmt_iterator gsi
723 = gsi_start_nondebug_after_labels_bb (e
->dest
);
724 gimple
*g
= gsi_stmt (gsi
);
725 if (g
&& gimple_call_internal_p (g
, IFN_ABNORMAL_DISPATCHER
))
731 /* Helper function for make_edges. Create a basic block with
732 with ABNORMAL_DISPATCHER internal call in it if needed, and
733 create abnormal edges from BBS to it and from it to FOR_BB
734 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
737 handle_abnormal_edges (basic_block
*dispatcher_bbs
, basic_block for_bb
,
738 auto_vec
<basic_block
> *bbs
, bool computed_goto
)
740 basic_block
*dispatcher
= dispatcher_bbs
+ (computed_goto
? 1 : 0);
741 unsigned int idx
= 0;
745 if (!bb_to_omp_idx
.is_empty ())
747 dispatcher
= dispatcher_bbs
+ 2 * bb_to_omp_idx
[for_bb
->index
];
748 if (bb_to_omp_idx
[for_bb
->index
] != 0)
752 /* If the dispatcher has been created already, then there are basic
753 blocks with abnormal edges to it, so just make a new edge to
755 if (*dispatcher
== NULL
)
757 /* Check if there are any basic blocks that need to have
758 abnormal edges to this dispatcher. If there are none, return
760 if (bb_to_omp_idx
.is_empty ())
762 if (bbs
->is_empty ())
767 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
768 if (bb_to_omp_idx
[bb
->index
] == bb_to_omp_idx
[for_bb
->index
])
774 /* Create the dispatcher bb. */
775 *dispatcher
= create_basic_block (NULL
, for_bb
);
778 /* Factor computed gotos into a common computed goto site. Also
779 record the location of that site so that we can un-factor the
780 gotos after we have converted back to normal form. */
781 gimple_stmt_iterator gsi
= gsi_start_bb (*dispatcher
);
783 /* Create the destination of the factored goto. Each original
784 computed goto will put its desired destination into this
785 variable and jump to the label we create immediately below. */
786 tree var
= create_tmp_var (ptr_type_node
, "gotovar");
788 /* Build a label for the new block which will contain the
789 factored computed goto. */
790 tree factored_label_decl
791 = create_artificial_label (UNKNOWN_LOCATION
);
792 gimple
*factored_computed_goto_label
793 = gimple_build_label (factored_label_decl
);
794 gsi_insert_after (&gsi
, factored_computed_goto_label
, GSI_NEW_STMT
);
796 /* Build our new computed goto. */
797 gimple
*factored_computed_goto
= gimple_build_goto (var
);
798 gsi_insert_after (&gsi
, factored_computed_goto
, GSI_NEW_STMT
);
800 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
802 if (!bb_to_omp_idx
.is_empty ()
803 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
806 gsi
= gsi_last_bb (bb
);
807 gimple
*last
= gsi_stmt (gsi
);
809 gcc_assert (computed_goto_p (last
));
811 /* Copy the original computed goto's destination into VAR. */
813 = gimple_build_assign (var
, gimple_goto_dest (last
));
814 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
816 edge e
= make_edge (bb
, *dispatcher
, EDGE_FALLTHRU
);
817 e
->goto_locus
= gimple_location (last
);
818 gsi_remove (&gsi
, true);
823 tree arg
= inner
? boolean_true_node
: boolean_false_node
;
824 gcall
*g
= gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER
,
826 gimple_call_set_ctrl_altering (g
, true);
827 gimple_stmt_iterator gsi
= gsi_after_labels (*dispatcher
);
828 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
830 /* Create predecessor edges of the dispatcher. */
831 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
833 if (!bb_to_omp_idx
.is_empty ()
834 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
836 make_edge (bb
, *dispatcher
, EDGE_ABNORMAL
);
841 make_edge (*dispatcher
, for_bb
, EDGE_ABNORMAL
);
844 /* Creates outgoing edges for BB. Returns 1 when it ends with an
845 computed goto, returns 2 when it ends with a statement that
846 might return to this function via an nonlocal goto, otherwise
847 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
850 make_edges_bb (basic_block bb
, struct omp_region
**pcur_region
, int *pomp_index
)
852 gimple
*last
= *gsi_last_bb (bb
);
853 bool fallthru
= false;
859 switch (gimple_code (last
))
862 if (make_goto_expr_edges (bb
))
868 edge e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
869 e
->goto_locus
= gimple_location (last
);
874 make_cond_expr_edges (bb
);
878 make_gimple_switch_edges (as_a
<gswitch
*> (last
), bb
);
885 case GIMPLE_EH_DISPATCH
:
886 fallthru
= make_eh_dispatch_edges (as_a
<geh_dispatch
*> (last
));
890 /* If this function receives a nonlocal goto, then we need to
891 make edges from this call site to all the nonlocal goto
893 if (stmt_can_make_abnormal_goto (last
))
896 /* If this statement has reachable exception handlers, then
897 create abnormal edges to them. */
900 /* BUILTIN_RETURN is really a return statement. */
901 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
903 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
906 /* Some calls are known not to return. */
908 fallthru
= !gimple_call_noreturn_p (last
);
912 /* A GIMPLE_ASSIGN may throw internally and thus be considered
914 if (is_ctrl_altering_stmt (last
))
920 make_gimple_asm_edges (bb
);
925 fallthru
= omp_make_gimple_edges (bb
, pcur_region
, pomp_index
);
928 case GIMPLE_TRANSACTION
:
930 gtransaction
*txn
= as_a
<gtransaction
*> (last
);
931 tree label1
= gimple_transaction_label_norm (txn
);
932 tree label2
= gimple_transaction_label_uninst (txn
);
935 make_edge (bb
, label_to_block (cfun
, label1
), EDGE_FALLTHRU
);
937 make_edge (bb
, label_to_block (cfun
, label2
),
938 EDGE_TM_UNINSTRUMENTED
| (label1
? 0 : EDGE_FALLTHRU
));
940 tree label3
= gimple_transaction_label_over (txn
);
941 if (gimple_transaction_subcode (txn
)
942 & (GTMA_HAVE_ABORT
| GTMA_IS_OUTER
))
943 make_edge (bb
, label_to_block (cfun
, label3
), EDGE_TM_ABORT
);
950 gcc_assert (!stmt_ends_bb_p (last
));
956 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
961 /* Join all the blocks in the flowgraph. */
967 struct omp_region
*cur_region
= NULL
;
968 auto_vec
<basic_block
> ab_edge_goto
;
969 auto_vec
<basic_block
> ab_edge_call
;
970 int cur_omp_region_idx
= 0;
972 /* Create an edge from entry to the first block with executable
974 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
975 BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
),
978 /* Traverse the basic block array placing edges. */
979 FOR_EACH_BB_FN (bb
, cfun
)
983 if (!bb_to_omp_idx
.is_empty ())
984 bb_to_omp_idx
[bb
->index
] = cur_omp_region_idx
;
986 mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
988 ab_edge_goto
.safe_push (bb
);
990 ab_edge_call
.safe_push (bb
);
992 if (cur_region
&& bb_to_omp_idx
.is_empty ())
993 bb_to_omp_idx
.safe_grow_cleared (n_basic_blocks_for_fn (cfun
), true);
996 /* Computed gotos are hell to deal with, especially if there are
997 lots of them with a large number of destinations. So we factor
998 them to a common computed goto location before we build the
999 edge list. After we convert back to normal form, we will un-factor
1000 the computed gotos since factoring introduces an unwanted jump.
1001 For non-local gotos and abnormal edges from calls to calls that return
1002 twice or forced labels, factor the abnormal edges too, by having all
1003 abnormal edges from the calls go to a common artificial basic block
1004 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1005 basic block to all forced labels and calls returning twice.
1006 We do this per-OpenMP structured block, because those regions
1007 are guaranteed to be single entry single exit by the standard,
1008 so it is not allowed to enter or exit such regions abnormally this way,
1009 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1010 must not transfer control across SESE region boundaries. */
1011 if (!ab_edge_goto
.is_empty () || !ab_edge_call
.is_empty ())
1013 gimple_stmt_iterator gsi
;
1014 basic_block dispatcher_bb_array
[2] = { NULL
, NULL
};
1015 basic_block
*dispatcher_bbs
= dispatcher_bb_array
;
1016 int count
= n_basic_blocks_for_fn (cfun
);
1018 if (!bb_to_omp_idx
.is_empty ())
1019 dispatcher_bbs
= XCNEWVEC (basic_block
, 2 * count
);
1021 FOR_EACH_BB_FN (bb
, cfun
)
1023 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1025 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1031 target
= gimple_label_label (label_stmt
);
1033 /* Make an edge to every label block that has been marked as a
1034 potential target for a computed goto or a non-local goto. */
1035 if (FORCED_LABEL (target
))
1036 handle_abnormal_edges (dispatcher_bbs
, bb
, &ab_edge_goto
,
1038 if (DECL_NONLOCAL (target
))
1040 handle_abnormal_edges (dispatcher_bbs
, bb
, &ab_edge_call
,
1046 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
1047 gsi_next_nondebug (&gsi
);
1048 if (!gsi_end_p (gsi
))
1050 /* Make an edge to every setjmp-like call. */
1051 gimple
*call_stmt
= gsi_stmt (gsi
);
1052 if (is_gimple_call (call_stmt
)
1053 && ((gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
)
1054 || gimple_call_builtin_p (call_stmt
,
1055 BUILT_IN_SETJMP_RECEIVER
)))
1056 handle_abnormal_edges (dispatcher_bbs
, bb
, &ab_edge_call
,
1061 if (!bb_to_omp_idx
.is_empty ())
1062 XDELETE (dispatcher_bbs
);
1065 omp_free_regions ();
1068 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1069 needed. Returns true if new bbs were created.
1070 Note: This is transitional code, and should not be used for new code. We
1071 should be able to get rid of this by rewriting all target va-arg
1072 gimplification hooks to use an interface gimple_build_cond_value as described
1073 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1076 gimple_find_sub_bbs (gimple_seq seq
, gimple_stmt_iterator
*gsi
)
1078 gimple
*stmt
= gsi_stmt (*gsi
);
1079 basic_block bb
= gimple_bb (stmt
);
1080 basic_block lastbb
, afterbb
;
1081 int old_num_bbs
= n_basic_blocks_for_fn (cfun
);
1083 lastbb
= make_blocks_1 (seq
, bb
);
1084 if (old_num_bbs
== n_basic_blocks_for_fn (cfun
))
1086 e
= split_block (bb
, stmt
);
1087 /* Move e->dest to come after the new basic blocks. */
1089 unlink_block (afterbb
);
1090 link_block (afterbb
, lastbb
);
1091 redirect_edge_succ (e
, bb
->next_bb
);
1093 while (bb
!= afterbb
)
1095 struct omp_region
*cur_region
= NULL
;
1096 profile_count cnt
= profile_count::zero ();
1099 int cur_omp_region_idx
= 0;
1100 int mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
1101 gcc_assert (!mer
&& !cur_region
);
1102 add_bb_to_loop (bb
, afterbb
->loop_father
);
1106 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1108 if (e
->count ().initialized_p ())
1113 tree_guess_outgoing_edge_probabilities (bb
);
1114 if (all
|| profile_status_for_fn (cfun
) == PROFILE_READ
)
1122 /* Find the next available discriminator value for LOCUS. The
1123 discriminator distinguishes among several basic blocks that
1124 share a common locus, allowing for more accurate sample-based
1128 next_discriminator_for_locus (int line
)
1130 struct locus_discrim_map item
;
1131 struct locus_discrim_map
**slot
;
1133 item
.location_line
= line
;
1134 item
.discriminator
= 0;
1135 slot
= discriminator_per_locus
->find_slot_with_hash (&item
, line
, INSERT
);
1137 if (*slot
== HTAB_EMPTY_ENTRY
)
1139 *slot
= XNEW (struct locus_discrim_map
);
1141 (*slot
)->location_line
= line
;
1142 (*slot
)->discriminator
= 0;
1144 (*slot
)->discriminator
++;
1145 return (*slot
)->discriminator
;
1148 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1151 same_line_p (location_t locus1
, expanded_location
*from
, location_t locus2
)
1153 expanded_location to
;
1155 if (locus1
== locus2
)
1158 to
= expand_location (locus2
);
1160 if (from
->line
!= to
.line
)
1162 if (from
->file
== to
.file
)
1164 return (from
->file
!= NULL
1166 && filename_cmp (from
->file
, to
.file
) == 0);
1169 /* Assign a unique discriminator value to all statements in block bb that
1170 have the same line number as locus. */
1173 assign_discriminator (location_t locus
, basic_block bb
)
1175 gimple_stmt_iterator gsi
;
1178 if (locus
== UNKNOWN_LOCATION
)
1181 expanded_location locus_e
= expand_location (locus
);
1183 discriminator
= next_discriminator_for_locus (locus_e
.line
);
1185 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1187 gimple
*stmt
= gsi_stmt (gsi
);
1188 location_t stmt_locus
= gimple_location (stmt
);
1189 if (same_line_p (locus
, &locus_e
, stmt_locus
))
1190 gimple_set_location (stmt
,
1191 location_with_discriminator (stmt_locus
, discriminator
));
1195 /* Assign discriminators to statement locations. */
1198 assign_discriminators (void)
1202 FOR_EACH_BB_FN (bb
, cfun
)
1206 gimple_stmt_iterator gsi
;
1207 location_t curr_locus
= UNKNOWN_LOCATION
;
1208 expanded_location curr_locus_e
= {};
1211 /* Traverse the basic block, if two function calls within a basic block
1212 are mapped to the same line, assign a new discriminator because a call
1213 stmt could be a split point of a basic block. */
1214 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1216 gimple
*stmt
= gsi_stmt (gsi
);
1218 /* Don't allow debug stmts to affect discriminators, but
1219 allow them to take discriminators when they're on the
1220 same line as the preceding nondebug stmt. */
1221 if (is_gimple_debug (stmt
))
1223 if (curr_locus
!= UNKNOWN_LOCATION
1224 && same_line_p (curr_locus
, &curr_locus_e
,
1225 gimple_location (stmt
)))
1227 location_t loc
= gimple_location (stmt
);
1228 location_t dloc
= location_with_discriminator (loc
,
1230 gimple_set_location (stmt
, dloc
);
1234 if (curr_locus
== UNKNOWN_LOCATION
)
1236 curr_locus
= gimple_location (stmt
);
1237 curr_locus_e
= expand_location (curr_locus
);
1239 else if (!same_line_p (curr_locus
, &curr_locus_e
, gimple_location (stmt
)))
1241 curr_locus
= gimple_location (stmt
);
1242 curr_locus_e
= expand_location (curr_locus
);
1245 else if (curr_discr
!= 0)
1247 location_t loc
= gimple_location (stmt
);
1248 location_t dloc
= location_with_discriminator (loc
, curr_discr
);
1249 gimple_set_location (stmt
, dloc
);
1251 /* Allocate a new discriminator for CALL stmt. */
1252 if (gimple_code (stmt
) == GIMPLE_CALL
)
1253 curr_discr
= next_discriminator_for_locus (curr_locus
);
1256 gimple
*last
= last_nondebug_stmt (bb
);
1257 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
1258 if (locus
== UNKNOWN_LOCATION
)
1261 expanded_location locus_e
= expand_location (locus
);
1263 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1265 gimple
*first
= first_non_label_stmt (e
->dest
);
1266 gimple
*last
= last_nondebug_stmt (e
->dest
);
1268 gimple
*stmt_on_same_line
= NULL
;
1269 if (first
&& same_line_p (locus
, &locus_e
,
1270 gimple_location (first
)))
1271 stmt_on_same_line
= first
;
1272 else if (last
&& same_line_p (locus
, &locus_e
,
1273 gimple_location (last
)))
1274 stmt_on_same_line
= last
;
1276 if (stmt_on_same_line
)
1278 if (has_discriminator (gimple_location (stmt_on_same_line
))
1279 && !has_discriminator (locus
))
1280 assign_discriminator (locus
, bb
);
1282 assign_discriminator (locus
, e
->dest
);
1288 /* Create the edges for a GIMPLE_COND starting at block BB. */
1291 make_cond_expr_edges (basic_block bb
)
1293 gcond
*entry
= as_a
<gcond
*> (*gsi_last_bb (bb
));
1294 gimple
*then_stmt
, *else_stmt
;
1295 basic_block then_bb
, else_bb
;
1296 tree then_label
, else_label
;
1301 /* Entry basic blocks for each component. */
1302 then_label
= gimple_cond_true_label (entry
);
1303 else_label
= gimple_cond_false_label (entry
);
1304 then_bb
= label_to_block (cfun
, then_label
);
1305 else_bb
= label_to_block (cfun
, else_label
);
1306 then_stmt
= first_stmt (then_bb
);
1307 else_stmt
= first_stmt (else_bb
);
1309 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1310 e
->goto_locus
= gimple_location (then_stmt
);
1311 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1313 e
->goto_locus
= gimple_location (else_stmt
);
1315 /* We do not need the labels anymore. */
1316 gimple_cond_set_true_label (entry
, NULL_TREE
);
1317 gimple_cond_set_false_label (entry
, NULL_TREE
);
1321 /* Called for each element in the hash table (P) as we delete the
1322 edge to cases hash table.
1324 Clear all the CASE_CHAINs to prevent problems with copying of
1325 SWITCH_EXPRs and structure sharing rules, then free the hash table
1329 edge_to_cases_cleanup (edge
const &, tree
const &value
, void *)
1333 for (t
= value
; t
; t
= next
)
1335 next
= CASE_CHAIN (t
);
1336 CASE_CHAIN (t
) = NULL
;
1342 /* Start recording information mapping edges to case labels. */
1345 start_recording_case_labels (void)
1347 gcc_assert (edge_to_cases
== NULL
);
1348 edge_to_cases
= new hash_map
<edge
, tree
>;
1349 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
1352 /* Return nonzero if we are recording information for case labels. */
1355 recording_case_labels_p (void)
1357 return (edge_to_cases
!= NULL
);
1360 /* Stop recording information mapping edges to case labels and
1361 remove any information we have recorded. */
1363 end_recording_case_labels (void)
1367 edge_to_cases
->traverse
<void *, edge_to_cases_cleanup
> (NULL
);
1368 delete edge_to_cases
;
1369 edge_to_cases
= NULL
;
1370 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
1372 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
1375 if (gswitch
*stmt
= safe_dyn_cast
<gswitch
*> (*gsi_last_bb (bb
)))
1376 group_case_labels_stmt (stmt
);
1379 BITMAP_FREE (touched_switch_bbs
);
1382 /* If we are inside a {start,end}_recording_cases block, then return
1383 a chain of CASE_LABEL_EXPRs from T which reference E.
1385 Otherwise return NULL. */
1388 get_cases_for_edge (edge e
, gswitch
*t
)
1393 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1394 chains available. Return NULL so the caller can detect this case. */
1395 if (!recording_case_labels_p ())
1398 slot
= edge_to_cases
->get (e
);
1402 /* If we did not find E in the hash table, then this must be the first
1403 time we have been queried for information about E & T. Add all the
1404 elements from T to the hash table then perform the query again. */
1406 n
= gimple_switch_num_labels (t
);
1407 for (i
= 0; i
< n
; i
++)
1409 tree elt
= gimple_switch_label (t
, i
);
1410 tree lab
= CASE_LABEL (elt
);
1411 basic_block label_bb
= label_to_block (cfun
, lab
);
1412 edge this_edge
= find_edge (e
->src
, label_bb
);
1414 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1416 tree
&s
= edge_to_cases
->get_or_insert (this_edge
);
1417 CASE_CHAIN (elt
) = s
;
1421 return *edge_to_cases
->get (e
);
1424 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1427 make_gimple_switch_edges (gswitch
*entry
, basic_block bb
)
1431 n
= gimple_switch_num_labels (entry
);
1433 for (i
= 0; i
< n
; ++i
)
1435 basic_block label_bb
= gimple_switch_label_bb (cfun
, entry
, i
);
1436 make_edge (bb
, label_bb
, 0);
1441 /* Return the basic block holding label DEST. */
1444 label_to_block (struct function
*ifun
, tree dest
)
1446 int uid
= LABEL_DECL_UID (dest
);
1448 /* We would die hard when faced by an undefined label. Emit a label to
1449 the very first basic block. This will hopefully make even the dataflow
1450 and undefined variable warnings quite right. */
1451 if (seen_error () && uid
< 0)
1453 gimple_stmt_iterator gsi
=
1454 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
));
1457 stmt
= gimple_build_label (dest
);
1458 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1459 uid
= LABEL_DECL_UID (dest
);
1461 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1463 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1466 /* Create edges for a goto statement at block BB. Returns true
1467 if abnormal edges should be created. */
1470 make_goto_expr_edges (basic_block bb
)
1472 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1473 gimple
*goto_t
= gsi_stmt (last
);
1475 /* A simple GOTO creates normal edges. */
1476 if (simple_goto_p (goto_t
))
1478 tree dest
= gimple_goto_dest (goto_t
);
1479 basic_block label_bb
= label_to_block (cfun
, dest
);
1480 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1481 e
->goto_locus
= gimple_location (goto_t
);
1482 gsi_remove (&last
, true);
1486 /* A computed GOTO creates abnormal edges. */
1490 /* Create edges for an asm statement with labels at block BB. */
1493 make_gimple_asm_edges (basic_block bb
)
1495 gasm
*stmt
= as_a
<gasm
*> (*gsi_last_bb (bb
));
1496 int i
, n
= gimple_asm_nlabels (stmt
);
1498 for (i
= 0; i
< n
; ++i
)
1500 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1501 basic_block label_bb
= label_to_block (cfun
, label
);
1502 make_edge (bb
, label_bb
, 0);
1506 /*---------------------------------------------------------------------------
1508 ---------------------------------------------------------------------------*/
1510 /* Cleanup useless labels in basic blocks. This is something we wish
1511 to do early because it allows us to group case labels before creating
1512 the edges for the CFG, and it speeds up block statement iterators in
1513 all passes later on.
1514 We rerun this pass after CFG is created, to get rid of the labels that
1515 are no longer referenced. After then we do not run it any more, since
1516 (almost) no new labels should be created. */
1518 /* A map from basic block index to the leading label of that block. */
1524 /* True if the label is referenced from somewhere. */
1528 /* Given LABEL return the first label in the same basic block. */
1531 main_block_label (tree label
, label_record
*label_for_bb
)
1533 basic_block bb
= label_to_block (cfun
, label
);
1534 tree main_label
= label_for_bb
[bb
->index
].label
;
1536 /* label_to_block possibly inserted undefined label into the chain. */
1539 label_for_bb
[bb
->index
].label
= label
;
1543 label_for_bb
[bb
->index
].used
= true;
1547 /* Clean up redundant labels within the exception tree. */
1550 cleanup_dead_labels_eh (label_record
*label_for_bb
)
1557 if (cfun
->eh
== NULL
)
1560 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1561 if (lp
&& lp
->post_landing_pad
)
1563 lab
= main_block_label (lp
->post_landing_pad
, label_for_bb
);
1564 if (lab
!= lp
->post_landing_pad
)
1566 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1567 lp
->post_landing_pad
= lab
;
1568 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1572 FOR_ALL_EH_REGION (r
)
1576 case ERT_MUST_NOT_THROW
:
1582 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1586 c
->label
= main_block_label (lab
, label_for_bb
);
1591 case ERT_ALLOWED_EXCEPTIONS
:
1592 lab
= r
->u
.allowed
.label
;
1594 r
->u
.allowed
.label
= main_block_label (lab
, label_for_bb
);
1600 /* Cleanup redundant labels. This is a three-step process:
1601 1) Find the leading label for each block.
1602 2) Redirect all references to labels to the leading labels.
1603 3) Cleanup all useless labels. */
1606 cleanup_dead_labels (void)
1609 label_record
*label_for_bb
= XCNEWVEC (struct label_record
,
1610 last_basic_block_for_fn (cfun
));
1612 /* Find a suitable label for each block. We use the first user-defined
1613 label if there is one, or otherwise just the first label we see. */
1614 FOR_EACH_BB_FN (bb
, cfun
)
1616 gimple_stmt_iterator i
;
1618 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1621 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1626 label
= gimple_label_label (label_stmt
);
1628 /* If we have not yet seen a label for the current block,
1629 remember this one and see if there are more labels. */
1630 if (!label_for_bb
[bb
->index
].label
)
1632 label_for_bb
[bb
->index
].label
= label
;
1636 /* If we did see a label for the current block already, but it
1637 is an artificially created label, replace it if the current
1638 label is a user defined label. */
1639 if (!DECL_ARTIFICIAL (label
)
1640 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1642 label_for_bb
[bb
->index
].label
= label
;
1648 /* Now redirect all jumps/branches to the selected label.
1649 First do so for each block ending in a control statement. */
1650 FOR_EACH_BB_FN (bb
, cfun
)
1652 gimple
*stmt
= *gsi_last_bb (bb
);
1653 tree label
, new_label
;
1658 switch (gimple_code (stmt
))
1662 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
1663 label
= gimple_cond_true_label (cond_stmt
);
1666 new_label
= main_block_label (label
, label_for_bb
);
1667 if (new_label
!= label
)
1668 gimple_cond_set_true_label (cond_stmt
, new_label
);
1671 label
= gimple_cond_false_label (cond_stmt
);
1674 new_label
= main_block_label (label
, label_for_bb
);
1675 if (new_label
!= label
)
1676 gimple_cond_set_false_label (cond_stmt
, new_label
);
1683 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
1684 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
1686 /* Replace all destination labels. */
1687 for (i
= 0; i
< n
; ++i
)
1689 tree case_label
= gimple_switch_label (switch_stmt
, i
);
1690 label
= CASE_LABEL (case_label
);
1691 new_label
= main_block_label (label
, label_for_bb
);
1692 if (new_label
!= label
)
1693 CASE_LABEL (case_label
) = new_label
;
1700 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1701 int i
, n
= gimple_asm_nlabels (asm_stmt
);
1703 for (i
= 0; i
< n
; ++i
)
1705 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
1706 tree label
= main_block_label (TREE_VALUE (cons
), label_for_bb
);
1707 TREE_VALUE (cons
) = label
;
1712 /* We have to handle gotos until they're removed, and we don't
1713 remove them until after we've created the CFG edges. */
1715 if (!computed_goto_p (stmt
))
1717 ggoto
*goto_stmt
= as_a
<ggoto
*> (stmt
);
1718 label
= gimple_goto_dest (goto_stmt
);
1719 new_label
= main_block_label (label
, label_for_bb
);
1720 if (new_label
!= label
)
1721 gimple_goto_set_dest (goto_stmt
, new_label
);
1725 case GIMPLE_TRANSACTION
:
1727 gtransaction
*txn
= as_a
<gtransaction
*> (stmt
);
1729 label
= gimple_transaction_label_norm (txn
);
1732 new_label
= main_block_label (label
, label_for_bb
);
1733 if (new_label
!= label
)
1734 gimple_transaction_set_label_norm (txn
, new_label
);
1737 label
= gimple_transaction_label_uninst (txn
);
1740 new_label
= main_block_label (label
, label_for_bb
);
1741 if (new_label
!= label
)
1742 gimple_transaction_set_label_uninst (txn
, new_label
);
1745 label
= gimple_transaction_label_over (txn
);
1748 new_label
= main_block_label (label
, label_for_bb
);
1749 if (new_label
!= label
)
1750 gimple_transaction_set_label_over (txn
, new_label
);
1760 /* Do the same for the exception region tree labels. */
1761 cleanup_dead_labels_eh (label_for_bb
);
1763 /* Finally, purge dead labels. All user-defined labels and labels that
1764 can be the target of non-local gotos and labels which have their
1765 address taken are preserved. */
1766 FOR_EACH_BB_FN (bb
, cfun
)
1768 gimple_stmt_iterator i
;
1769 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1771 if (!label_for_this_bb
)
1774 /* If the main label of the block is unused, we may still remove it. */
1775 if (!label_for_bb
[bb
->index
].used
)
1776 label_for_this_bb
= NULL
;
1778 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1781 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1786 label
= gimple_label_label (label_stmt
);
1788 if (label
== label_for_this_bb
1789 || !DECL_ARTIFICIAL (label
)
1790 || DECL_NONLOCAL (label
)
1791 || FORCED_LABEL (label
))
1795 gcc_checking_assert (EH_LANDING_PAD_NR (label
) == 0);
1796 gsi_remove (&i
, true);
1801 free (label_for_bb
);
1804 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1805 the ones jumping to the same label.
1806 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1809 group_case_labels_stmt (gswitch
*stmt
)
1811 int old_size
= gimple_switch_num_labels (stmt
);
1812 int i
, next_index
, new_size
;
1813 basic_block default_bb
= NULL
;
1814 hash_set
<tree
> *removed_labels
= NULL
;
1816 default_bb
= gimple_switch_default_bb (cfun
, stmt
);
1818 /* Look for possible opportunities to merge cases. */
1820 while (i
< old_size
)
1822 tree base_case
, base_high
;
1823 basic_block base_bb
;
1825 base_case
= gimple_switch_label (stmt
, i
);
1827 gcc_assert (base_case
);
1828 base_bb
= label_to_block (cfun
, CASE_LABEL (base_case
));
1830 /* Discard cases that have the same destination as the default case or
1831 whose destination blocks have already been removed as unreachable. */
1833 || base_bb
== default_bb
1835 && removed_labels
->contains (CASE_LABEL (base_case
))))
1841 base_high
= CASE_HIGH (base_case
)
1842 ? CASE_HIGH (base_case
)
1843 : CASE_LOW (base_case
);
1846 /* Try to merge case labels. Break out when we reach the end
1847 of the label vector or when we cannot merge the next case
1848 label with the current one. */
1849 while (next_index
< old_size
)
1851 tree merge_case
= gimple_switch_label (stmt
, next_index
);
1852 basic_block merge_bb
= label_to_block (cfun
, CASE_LABEL (merge_case
));
1853 wide_int bhp1
= wi::to_wide (base_high
) + 1;
1855 /* Merge the cases if they jump to the same place,
1856 and their ranges are consecutive. */
1857 if (merge_bb
== base_bb
1858 && (removed_labels
== NULL
1859 || !removed_labels
->contains (CASE_LABEL (merge_case
)))
1860 && wi::to_wide (CASE_LOW (merge_case
)) == bhp1
)
1863 = (CASE_HIGH (merge_case
)
1864 ? CASE_HIGH (merge_case
) : CASE_LOW (merge_case
));
1865 CASE_HIGH (base_case
) = base_high
;
1872 /* Discard cases that have an unreachable destination block. */
1873 if (EDGE_COUNT (base_bb
->succs
) == 0
1874 && gimple_seq_unreachable_p (bb_seq (base_bb
))
1875 /* Don't optimize this if __builtin_unreachable () is the
1876 implicitly added one by the C++ FE too early, before
1877 -Wreturn-type can be diagnosed. We'll optimize it later
1878 during switchconv pass or any other cfg cleanup. */
1879 && (gimple_in_ssa_p (cfun
)
1880 || (LOCATION_LOCUS (gimple_location (last_nondebug_stmt (base_bb
)))
1881 != BUILTINS_LOCATION
)))
1883 edge base_edge
= find_edge (gimple_bb (stmt
), base_bb
);
1884 if (base_edge
!= NULL
)
1886 for (gimple_stmt_iterator gsi
= gsi_start_bb (base_bb
);
1887 !gsi_end_p (gsi
); gsi_next (&gsi
))
1888 if (glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
)))
1890 if (FORCED_LABEL (gimple_label_label (stmt
))
1891 || DECL_NONLOCAL (gimple_label_label (stmt
)))
1893 /* Forced/non-local labels aren't going to be removed,
1894 but they will be moved to some neighbouring basic
1895 block. If some later case label refers to one of
1896 those labels, we should throw that case away rather
1897 than keeping it around and refering to some random
1898 other basic block without an edge to it. */
1899 if (removed_labels
== NULL
)
1900 removed_labels
= new hash_set
<tree
>;
1901 removed_labels
->add (gimple_label_label (stmt
));
1906 remove_edge_and_dominated_blocks (base_edge
);
1913 gimple_switch_set_label (stmt
, new_size
,
1914 gimple_switch_label (stmt
, i
));
1919 gcc_assert (new_size
<= old_size
);
1921 if (new_size
< old_size
)
1922 gimple_switch_set_num_labels (stmt
, new_size
);
1924 delete removed_labels
;
1925 return new_size
< old_size
;
1928 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1929 and scan the sorted vector of cases. Combine the ones jumping to the
1933 group_case_labels (void)
1936 bool changed
= false;
1938 FOR_EACH_BB_FN (bb
, cfun
)
1940 if (gswitch
*stmt
= safe_dyn_cast
<gswitch
*> (*gsi_last_bb (bb
)))
1941 changed
|= group_case_labels_stmt (stmt
);
1947 /* Checks whether we can merge block B into block A. */
1950 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1954 if (!single_succ_p (a
))
1957 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1960 if (single_succ (a
) != b
)
1963 if (!single_pred_p (b
))
1966 if (a
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
1967 || b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1970 /* If A ends by a statement causing exceptions or something similar, we
1971 cannot merge the blocks. */
1972 stmt
= *gsi_last_bb (a
);
1973 if (stmt
&& stmt_ends_bb_p (stmt
))
1976 /* Examine the labels at the beginning of B. */
1977 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);
1981 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1984 lab
= gimple_label_label (label_stmt
);
1986 /* Do not remove user forced labels or for -O0 any user labels. */
1987 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1991 /* Protect simple loop latches. We only want to avoid merging
1992 the latch with the loop header or with a block in another
1993 loop in this case. */
1995 && b
->loop_father
->latch
== b
1996 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
)
1997 && (b
->loop_father
->header
== a
1998 || b
->loop_father
!= a
->loop_father
))
2001 /* It must be possible to eliminate all phi nodes in B. If ssa form
2002 is not up-to-date and a name-mapping is registered, we cannot eliminate
2003 any phis. Symbols marked for renaming are never a problem though. */
2004 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);
2007 gphi
*phi
= gsi
.phi ();
2008 /* Technically only new names matter. */
2009 if (name_registered_for_update_p (PHI_RESULT (phi
)))
2013 /* When not optimizing, don't merge if we'd lose goto_locus. */
2015 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
2017 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
2018 gimple_stmt_iterator prev
, next
;
2019 prev
= gsi_last_nondebug_bb (a
);
2020 next
= gsi_after_labels (b
);
2021 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
2022 gsi_next_nondebug (&next
);
2023 if ((gsi_end_p (prev
)
2024 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
2025 && (gsi_end_p (next
)
2026 || gimple_location (gsi_stmt (next
)) != goto_locus
))
2033 /* Replaces all uses of NAME by VAL. */
2036 replace_uses_by (tree name
, tree val
)
2038 imm_use_iterator imm_iter
;
2043 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
2045 /* Mark the block if we change the last stmt in it. */
2046 if (cfgcleanup_altered_bbs
2047 && stmt_ends_bb_p (stmt
))
2048 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
2050 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
2052 replace_exp (use
, val
);
2054 if (gimple_code (stmt
) == GIMPLE_PHI
)
2056 e
= gimple_phi_arg_edge (as_a
<gphi
*> (stmt
),
2057 PHI_ARG_INDEX_FROM_USE (use
));
2058 if (e
->flags
& EDGE_ABNORMAL
2059 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
))
2061 /* This can only occur for virtual operands, since
2062 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
2063 would prevent replacement. */
2064 gcc_checking_assert (virtual_operand_p (name
));
2065 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
2070 if (gimple_code (stmt
) != GIMPLE_PHI
)
2072 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2073 gimple
*orig_stmt
= stmt
;
2076 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2077 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2078 only change sth from non-invariant to invariant, and only
2079 when propagating constants. */
2080 if (is_gimple_min_invariant (val
))
2081 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
2083 tree op
= gimple_op (stmt
, i
);
2084 /* Operands may be empty here. For example, the labels
2085 of a GIMPLE_COND are nulled out following the creation
2086 of the corresponding CFG edges. */
2087 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
2088 recompute_tree_invariant_for_addr_expr (op
);
2091 if (fold_stmt (&gsi
))
2092 stmt
= gsi_stmt (gsi
);
2094 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
2095 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
2101 gcc_checking_assert (has_zero_uses (name
));
2103 /* Also update the trees stored in loop structures. */
2106 for (auto loop
: loops_list (cfun
, 0))
2107 substitute_in_loop_info (loop
, name
, val
);
2111 /* Merge block B into block A. */
2114 gimple_merge_blocks (basic_block a
, basic_block b
)
2116 gimple_stmt_iterator last
, gsi
;
2120 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
2122 /* Remove all single-valued PHI nodes from block B of the form
2123 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2124 gsi
= gsi_last_bb (a
);
2125 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
2127 gimple
*phi
= gsi_stmt (psi
);
2128 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
2130 bool may_replace_uses
= (virtual_operand_p (def
)
2131 || may_propagate_copy (def
, use
));
2133 /* In case we maintain loop closed ssa form, do not propagate arguments
2134 of loop exit phi nodes. */
2136 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
2137 && !virtual_operand_p (def
)
2138 && TREE_CODE (use
) == SSA_NAME
2139 && a
->loop_father
!= b
->loop_father
)
2140 may_replace_uses
= false;
2142 if (!may_replace_uses
)
2144 gcc_assert (!virtual_operand_p (def
));
2146 /* Note that just emitting the copies is fine -- there is no problem
2147 with ordering of phi nodes. This is because A is the single
2148 predecessor of B, therefore results of the phi nodes cannot
2149 appear as arguments of the phi nodes. */
2150 copy
= gimple_build_assign (def
, use
);
2151 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
2152 remove_phi_node (&psi
, false);
2156 /* If we deal with a PHI for virtual operands, we can simply
2157 propagate these without fussing with folding or updating
2159 if (virtual_operand_p (def
))
2161 imm_use_iterator iter
;
2162 use_operand_p use_p
;
2165 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
2166 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
2167 SET_USE (use_p
, use
);
2169 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
2170 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
2173 replace_uses_by (def
, use
);
2175 remove_phi_node (&psi
, true);
2179 /* Ensure that B follows A. */
2180 move_block_after (b
, a
);
2182 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
2183 gcc_assert (!*gsi_last_bb (a
)
2184 || !stmt_ends_bb_p (*gsi_last_bb (a
)));
2186 /* Remove labels from B and set gimple_bb to A for other statements. */
2187 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
2189 gimple
*stmt
= gsi_stmt (gsi
);
2190 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2192 tree label
= gimple_label_label (label_stmt
);
2195 gsi_remove (&gsi
, false);
2197 /* Now that we can thread computed gotos, we might have
2198 a situation where we have a forced label in block B
2199 However, the label at the start of block B might still be
2200 used in other ways (think about the runtime checking for
2201 Fortran assigned gotos). So we cannot just delete the
2202 label. Instead we move the label to the start of block A. */
2203 if (FORCED_LABEL (label
))
2205 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
2206 tree first_label
= NULL_TREE
;
2207 if (!gsi_end_p (dest_gsi
))
2208 if (glabel
*first_label_stmt
2209 = dyn_cast
<glabel
*> (gsi_stmt (dest_gsi
)))
2210 first_label
= gimple_label_label (first_label_stmt
);
2212 && (DECL_NONLOCAL (first_label
)
2213 || EH_LANDING_PAD_NR (first_label
) != 0))
2214 gsi_insert_after (&dest_gsi
, stmt
, GSI_NEW_STMT
);
2216 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
2218 /* Other user labels keep around in a form of a debug stmt. */
2219 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_BIND_STMTS
)
2221 gimple
*dbg
= gimple_build_debug_bind (label
,
2224 gimple_debug_bind_reset_value (dbg
);
2225 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
2228 lp_nr
= EH_LANDING_PAD_NR (label
);
2231 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
2232 lp
->post_landing_pad
= NULL
;
2237 gimple_set_bb (stmt
, a
);
2242 /* When merging two BBs, if their counts are different, the larger count
2243 is selected as the new bb count. This is to handle inconsistent
2245 if (a
->loop_father
== b
->loop_father
)
2247 a
->count
= a
->count
.merge (b
->count
);
2250 /* Merge the sequences. */
2251 last
= gsi_last_bb (a
);
2252 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
2253 set_bb_seq (b
, NULL
);
2255 if (cfgcleanup_altered_bbs
)
2256 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
2260 /* Return the one of two successors of BB that is not reachable by a
2261 complex edge, if there is one. Else, return BB. We use
2262 this in optimizations that use post-dominators for their heuristics,
2263 to catch the cases in C++ where function calls are involved. */
2266 single_noncomplex_succ (basic_block bb
)
2269 if (EDGE_COUNT (bb
->succs
) != 2)
2272 e0
= EDGE_SUCC (bb
, 0);
2273 e1
= EDGE_SUCC (bb
, 1);
2274 if (e0
->flags
& EDGE_COMPLEX
)
2276 if (e1
->flags
& EDGE_COMPLEX
)
2282 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2285 notice_special_calls (gcall
*call
)
2287 int flags
= gimple_call_flags (call
);
2289 if (flags
& ECF_MAY_BE_ALLOCA
)
2290 cfun
->calls_alloca
= true;
2291 if (flags
& ECF_RETURNS_TWICE
)
2292 cfun
->calls_setjmp
= true;
2296 /* Clear flags set by notice_special_calls. Used by dead code removal
2297 to update the flags. */
2300 clear_special_calls (void)
2302 cfun
->calls_alloca
= false;
2303 cfun
->calls_setjmp
= false;
2306 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2309 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
2311 /* Since this block is no longer reachable, we can just delete all
2312 of its PHI nodes. */
2313 remove_phi_nodes (bb
);
2315 /* Remove edges to BB's successors. */
2316 while (EDGE_COUNT (bb
->succs
) > 0)
2317 remove_edge (EDGE_SUCC (bb
, 0));
2321 /* Remove statements of basic block BB. */
2324 remove_bb (basic_block bb
)
2326 gimple_stmt_iterator i
;
2330 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
2331 if (dump_flags
& TDF_DETAILS
)
2333 dump_bb (dump_file
, bb
, 0, TDF_BLOCKS
);
2334 fprintf (dump_file
, "\n");
2340 class loop
*loop
= bb
->loop_father
;
2342 /* If a loop gets removed, clean up the information associated
2344 if (loop
->latch
== bb
2345 || loop
->header
== bb
)
2346 free_numbers_of_iterations_estimates (loop
);
2349 /* Remove all the instructions in the block. */
2350 if (bb_seq (bb
) != NULL
)
2352 /* Walk backwards so as to get a chance to substitute all
2353 released DEFs into debug stmts. See
2354 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
2356 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
2358 gimple
*stmt
= gsi_stmt (i
);
2359 glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
);
2361 && (FORCED_LABEL (gimple_label_label (label_stmt
))
2362 || DECL_NONLOCAL (gimple_label_label (label_stmt
))))
2365 gimple_stmt_iterator new_gsi
;
2367 /* A non-reachable non-local label may still be referenced.
2368 But it no longer needs to carry the extra semantics of
2370 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
2372 DECL_NONLOCAL (gimple_label_label (label_stmt
)) = 0;
2373 FORCED_LABEL (gimple_label_label (label_stmt
)) = 1;
2376 new_bb
= bb
->prev_bb
;
2377 /* Don't move any labels into ENTRY block. */
2378 if (new_bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
2380 new_bb
= single_succ (new_bb
);
2381 gcc_assert (new_bb
!= bb
);
2383 if ((unsigned) bb
->index
< bb_to_omp_idx
.length ()
2384 && ((unsigned) new_bb
->index
>= bb_to_omp_idx
.length ()
2385 || (bb_to_omp_idx
[bb
->index
]
2386 != bb_to_omp_idx
[new_bb
->index
])))
2388 /* During cfg pass make sure to put orphaned labels
2389 into the right OMP region. */
2393 FOR_EACH_VEC_ELT (bb_to_omp_idx
, i
, idx
)
2394 if (i
>= NUM_FIXED_BLOCKS
2395 && idx
== bb_to_omp_idx
[bb
->index
]
2396 && i
!= (unsigned) bb
->index
)
2398 new_bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
2403 new_bb
= single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2404 gcc_assert (new_bb
!= bb
);
2407 new_gsi
= gsi_after_labels (new_bb
);
2408 gsi_remove (&i
, false);
2409 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
2413 /* Release SSA definitions. */
2414 release_defs (stmt
);
2415 gsi_remove (&i
, true);
2419 i
= gsi_last_bb (bb
);
2425 if ((unsigned) bb
->index
< bb_to_omp_idx
.length ())
2426 bb_to_omp_idx
[bb
->index
] = -1;
2427 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2428 bb
->il
.gimple
.seq
= NULL
;
2429 bb
->il
.gimple
.phi_nodes
= NULL
;
2433 /* Given a basic block BB and a value VAL for use in the final statement
2434 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2435 the edge that will be taken out of the block.
2436 If VAL is NULL_TREE, then the current value of the final statement's
2437 predicate or index is used.
2438 If the value does not match a unique edge, NULL is returned. */
2441 find_taken_edge (basic_block bb
, tree val
)
2445 stmt
= *gsi_last_bb (bb
);
2447 /* Handle ENTRY and EXIT. */
2451 else if (gimple_code (stmt
) == GIMPLE_COND
)
2452 return find_taken_edge_cond_expr (as_a
<gcond
*> (stmt
), val
);
2454 else if (gimple_code (stmt
) == GIMPLE_SWITCH
)
2455 return find_taken_edge_switch_expr (as_a
<gswitch
*> (stmt
), val
);
2457 else if (computed_goto_p (stmt
))
2459 /* Only optimize if the argument is a label, if the argument is
2460 not a label then we cannot construct a proper CFG.
2462 It may be the case that we only need to allow the LABEL_REF to
2463 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2464 appear inside a LABEL_EXPR just to be safe. */
2466 && (TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
2467 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
2468 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
2471 /* Otherwise we only know the taken successor edge if it's unique. */
2472 return single_succ_p (bb
) ? single_succ_edge (bb
) : NULL
;
2475 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2476 statement, determine which of the outgoing edges will be taken out of the
2477 block. Return NULL if either edge may be taken. */
2480 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2485 dest
= label_to_block (cfun
, val
);
2487 e
= find_edge (bb
, dest
);
2489 /* It's possible for find_edge to return NULL here on invalid code
2490 that abuses the labels-as-values extension (e.g. code that attempts to
2491 jump *between* functions via stored labels-as-values; PR 84136).
2492 If so, then we simply return that NULL for the edge.
2493 We don't currently have a way of detecting such invalid code, so we
2494 can't assert that it was the case when a NULL edge occurs here. */
2499 /* Given COND_STMT and a constant value VAL for use as the predicate,
2500 determine which of the two edges will be taken out of
2501 the statement's block. Return NULL if either edge may be taken.
2502 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2506 find_taken_edge_cond_expr (const gcond
*cond_stmt
, tree val
)
2508 edge true_edge
, false_edge
;
2510 if (val
== NULL_TREE
)
2512 /* Use the current value of the predicate. */
2513 if (gimple_cond_true_p (cond_stmt
))
2514 val
= integer_one_node
;
2515 else if (gimple_cond_false_p (cond_stmt
))
2516 val
= integer_zero_node
;
2520 else if (TREE_CODE (val
) != INTEGER_CST
)
2523 extract_true_false_edges_from_block (gimple_bb (cond_stmt
),
2524 &true_edge
, &false_edge
);
2526 return (integer_zerop (val
) ? false_edge
: true_edge
);
2529 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2530 which edge will be taken out of the statement's block. Return NULL if any
2532 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2536 find_taken_edge_switch_expr (const gswitch
*switch_stmt
, tree val
)
2538 basic_block dest_bb
;
2542 if (gimple_switch_num_labels (switch_stmt
) == 1)
2543 taken_case
= gimple_switch_default_label (switch_stmt
);
2546 if (val
== NULL_TREE
)
2547 val
= gimple_switch_index (switch_stmt
);
2548 if (TREE_CODE (val
) != INTEGER_CST
)
2551 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2553 dest_bb
= label_to_block (cfun
, CASE_LABEL (taken_case
));
2555 e
= find_edge (gimple_bb (switch_stmt
), dest_bb
);
2561 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2562 We can make optimal use here of the fact that the case labels are
2563 sorted: We can do a binary search for a case matching VAL. */
2566 find_case_label_for_value (const gswitch
*switch_stmt
, tree val
)
2568 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2569 tree default_case
= gimple_switch_default_label (switch_stmt
);
2571 for (low
= 0, high
= n
; high
- low
> 1; )
2573 size_t i
= (high
+ low
) / 2;
2574 tree t
= gimple_switch_label (switch_stmt
, i
);
2577 /* Cache the result of comparing CASE_LOW and val. */
2578 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2585 if (CASE_HIGH (t
) == NULL
)
2587 /* A singe-valued case label. */
2593 /* A case range. We can only handle integer ranges. */
2594 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2599 return default_case
;
2603 /* Dump a basic block on stderr. */
2606 gimple_debug_bb (basic_block bb
)
2608 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2612 /* Dump basic block with index N on stderr. */
2615 gimple_debug_bb_n (int n
)
2617 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun
, n
));
2618 return BASIC_BLOCK_FOR_FN (cfun
, n
);
2622 /* Dump the CFG on stderr.
2624 FLAGS are the same used by the tree dumping functions
2625 (see TDF_* in dumpfile.h). */
2628 gimple_debug_cfg (dump_flags_t flags
)
2630 gimple_dump_cfg (stderr
, flags
);
2634 /* Dump the program showing basic block boundaries on the given FILE.
2636 FLAGS are the same used by the tree dumping functions (see TDF_* in
2640 gimple_dump_cfg (FILE *file
, dump_flags_t flags
)
2642 if (flags
& TDF_DETAILS
)
2644 dump_function_header (file
, current_function_decl
, flags
);
2645 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2646 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2647 last_basic_block_for_fn (cfun
));
2649 brief_dump_cfg (file
, flags
);
2650 fprintf (file
, "\n");
2653 if (flags
& TDF_STATS
)
2654 dump_cfg_stats (file
);
2656 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2660 /* Dump CFG statistics on FILE. */
2663 dump_cfg_stats (FILE *file
)
2665 static long max_num_merged_labels
= 0;
2666 unsigned long size
, total
= 0;
2669 const char * const fmt_str
= "%-30s%-13s%12s\n";
2670 const char * const fmt_str_1
= "%-30s%13d" PRsa (11) "\n";
2671 const char * const fmt_str_2
= "%-30s%13ld" PRsa (11) "\n";
2672 const char * const fmt_str_3
= "%-43s" PRsa (11) "\n";
2673 const char *funcname
= current_function_name ();
2675 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2677 fprintf (file
, "---------------------------------------------------------\n");
2678 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2679 fprintf (file
, fmt_str
, "", " instances ", "used ");
2680 fprintf (file
, "---------------------------------------------------------\n");
2682 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2684 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2685 SIZE_AMOUNT (size
));
2688 FOR_EACH_BB_FN (bb
, cfun
)
2689 num_edges
+= EDGE_COUNT (bb
->succs
);
2690 size
= num_edges
* sizeof (class edge_def
);
2692 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SIZE_AMOUNT (size
));
2694 fprintf (file
, "---------------------------------------------------------\n");
2695 fprintf (file
, fmt_str_3
, "Total memory used by CFG data",
2696 SIZE_AMOUNT (total
));
2697 fprintf (file
, "---------------------------------------------------------\n");
2698 fprintf (file
, "\n");
2700 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2701 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2703 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2704 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2706 fprintf (file
, "\n");
2710 /* Dump CFG statistics on stderr. Keep extern so that it's always
2711 linked in the final executable. */
2714 debug_cfg_stats (void)
2716 dump_cfg_stats (stderr
);
2719 /*---------------------------------------------------------------------------
2720 Miscellaneous helpers
2721 ---------------------------------------------------------------------------*/
2723 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2724 flow. Transfers of control flow associated with EH are excluded. */
2727 call_can_make_abnormal_goto (gimple
*t
)
2729 /* If the function has no non-local labels, then a call cannot make an
2730 abnormal transfer of control. */
2731 if (!cfun
->has_nonlocal_label
2732 && !cfun
->calls_setjmp
)
2735 /* Likewise if the call has no side effects. */
2736 if (!gimple_has_side_effects (t
))
2739 /* Likewise if the called function is leaf. */
2740 if (gimple_call_flags (t
) & ECF_LEAF
)
2747 /* Return true if T can make an abnormal transfer of control flow.
2748 Transfers of control flow associated with EH are excluded. */
2751 stmt_can_make_abnormal_goto (gimple
*t
)
2753 if (computed_goto_p (t
))
2755 if (is_gimple_call (t
))
2756 return call_can_make_abnormal_goto (t
);
2761 /* Return true if T represents a stmt that always transfers control. */
2764 is_ctrl_stmt (gimple
*t
)
2766 switch (gimple_code (t
))
2780 /* Return true if T is a statement that may alter the flow of control
2781 (e.g., a call to a non-returning function). */
2784 is_ctrl_altering_stmt (gimple
*t
)
2788 switch (gimple_code (t
))
2791 /* Per stmt call flag indicates whether the call could alter
2793 if (gimple_call_ctrl_altering_p (t
))
2797 case GIMPLE_EH_DISPATCH
:
2798 /* EH_DISPATCH branches to the individual catch handlers at
2799 this level of a try or allowed-exceptions region. It can
2800 fallthru to the next statement as well. */
2804 if (gimple_asm_nlabels (as_a
<gasm
*> (t
)) > 0)
2809 /* OpenMP directives alter control flow. */
2812 case GIMPLE_TRANSACTION
:
2813 /* A transaction start alters control flow. */
2820 /* If a statement can throw, it alters control flow. */
2821 return stmt_can_throw_internal (cfun
, t
);
2825 /* Return true if T is a simple local goto. */
2828 simple_goto_p (gimple
*t
)
2830 return (gimple_code (t
) == GIMPLE_GOTO
2831 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2835 /* Return true if STMT should start a new basic block. PREV_STMT is
2836 the statement preceding STMT. It is used when STMT is a label or a
2837 case label. Labels should only start a new basic block if their
2838 previous statement wasn't a label. Otherwise, sequence of labels
2839 would generate unnecessary basic blocks that only contain a single
2843 stmt_starts_bb_p (gimple
*stmt
, gimple
*prev_stmt
)
2848 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2849 any nondebug stmts in the block. We don't want to start another
2850 block in this case: the debug stmt will already have started the
2851 one STMT would start if we weren't outputting debug stmts. */
2852 if (prev_stmt
&& is_gimple_debug (prev_stmt
))
2855 /* Labels start a new basic block only if the preceding statement
2856 wasn't a label of the same type. This prevents the creation of
2857 consecutive blocks that have nothing but a single label. */
2858 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2860 /* Nonlocal and computed GOTO targets always start a new block. */
2861 if (DECL_NONLOCAL (gimple_label_label (label_stmt
))
2862 || FORCED_LABEL (gimple_label_label (label_stmt
)))
2865 if (glabel
*plabel
= safe_dyn_cast
<glabel
*> (prev_stmt
))
2867 if (DECL_NONLOCAL (gimple_label_label (plabel
))
2868 || !DECL_ARTIFICIAL (gimple_label_label (plabel
)))
2871 cfg_stats
.num_merged_labels
++;
2877 else if (gimple_code (stmt
) == GIMPLE_CALL
)
2879 if (gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2880 /* setjmp acts similar to a nonlocal GOTO target and thus should
2881 start a new block. */
2883 if (gimple_call_internal_p (stmt
, IFN_PHI
)
2885 && gimple_code (prev_stmt
) != GIMPLE_LABEL
2886 && (gimple_code (prev_stmt
) != GIMPLE_CALL
2887 || ! gimple_call_internal_p (prev_stmt
, IFN_PHI
)))
2888 /* PHI nodes start a new block unless preceeded by a label
2897 /* Return true if T should end a basic block. */
2900 stmt_ends_bb_p (gimple
*t
)
2902 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2905 /* Remove block annotations and other data structures. */
2908 delete_tree_cfg_annotations (struct function
*fn
)
2910 vec_free (label_to_block_map_for_fn (fn
));
2913 /* Return the virtual phi in BB. */
2916 get_virtual_phi (basic_block bb
)
2918 for (gphi_iterator gsi
= gsi_start_phis (bb
);
2922 gphi
*phi
= gsi
.phi ();
2924 if (virtual_operand_p (PHI_RESULT (phi
)))
2931 /* Return the first statement in basic block BB. */
2934 first_stmt (basic_block bb
)
2936 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2937 gimple
*stmt
= NULL
;
2939 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2947 /* Return the first non-label statement in basic block BB. */
2950 first_non_label_stmt (basic_block bb
)
2952 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2953 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2955 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2958 /* Return the last statement in basic block BB. */
2961 last_nondebug_stmt (basic_block bb
)
2963 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2964 gimple
*stmt
= NULL
;
2966 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2974 /* Return the last statement of an otherwise empty block. Return NULL
2975 if the block is totally empty, or if it contains more than one
2979 last_and_only_stmt (basic_block bb
)
2981 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2982 gimple
*last
, *prev
;
2987 last
= gsi_stmt (i
);
2988 gsi_prev_nondebug (&i
);
2992 /* Empty statements should no longer appear in the instruction stream.
2993 Everything that might have appeared before should be deleted by
2994 remove_useless_stmts, and the optimizers should just gsi_remove
2995 instead of smashing with build_empty_stmt.
2997 Thus the only thing that should appear here in a block containing
2998 one executable statement is a label. */
2999 prev
= gsi_stmt (i
);
3000 if (gimple_code (prev
) == GIMPLE_LABEL
)
3006 /* Returns the basic block after which the new basic block created
3007 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3008 near its "logical" location. This is of most help to humans looking
3009 at debugging dumps. */
3012 split_edge_bb_loc (edge edge_in
)
3014 basic_block dest
= edge_in
->dest
;
3015 basic_block dest_prev
= dest
->prev_bb
;
3019 edge e
= find_edge (dest_prev
, dest
);
3020 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
3021 return edge_in
->src
;
3026 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3027 Abort on abnormal edges. */
3030 gimple_split_edge (edge edge_in
)
3032 basic_block new_bb
, after_bb
, dest
;
3035 /* Abnormal edges cannot be split. */
3036 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
3038 dest
= edge_in
->dest
;
3040 after_bb
= split_edge_bb_loc (edge_in
);
3042 new_bb
= create_empty_bb (after_bb
);
3043 new_bb
->count
= edge_in
->count ();
3045 /* We want to avoid re-allocating PHIs when we first
3046 add the fallthru edge from new_bb to dest but we also
3047 want to avoid changing PHI argument order when
3048 first redirecting edge_in away from dest. The former
3049 avoids changing PHI argument order by adding them
3050 last and then the redirection swapping it back into
3051 place by means of unordered remove.
3052 So hack around things by temporarily removing all PHIs
3053 from the destination during the edge redirection and then
3054 making sure the edges stay in order. */
3055 gimple_seq saved_phis
= phi_nodes (dest
);
3056 unsigned old_dest_idx
= edge_in
->dest_idx
;
3057 set_phi_nodes (dest
, NULL
);
3058 new_edge
= make_single_succ_edge (new_bb
, dest
, EDGE_FALLTHRU
);
3059 e
= redirect_edge_and_branch (edge_in
, new_bb
);
3060 gcc_assert (e
== edge_in
&& new_edge
->dest_idx
== old_dest_idx
);
3061 /* set_phi_nodes sets the BB of the PHI nodes, so do it manually here. */
3062 dest
->il
.gimple
.phi_nodes
= saved_phis
;
3068 /* Verify properties of the address expression T whose base should be
3069 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
3072 verify_address (tree t
, bool verify_addressable
)
3075 bool old_side_effects
;
3077 bool new_side_effects
;
3079 old_constant
= TREE_CONSTANT (t
);
3080 old_side_effects
= TREE_SIDE_EFFECTS (t
);
3082 recompute_tree_invariant_for_addr_expr (t
);
3083 new_side_effects
= TREE_SIDE_EFFECTS (t
);
3084 new_constant
= TREE_CONSTANT (t
);
3086 if (old_constant
!= new_constant
)
3088 error ("constant not recomputed when %<ADDR_EXPR%> changed");
3091 if (old_side_effects
!= new_side_effects
)
3093 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
3097 tree base
= TREE_OPERAND (t
, 0);
3098 while (handled_component_p (base
))
3099 base
= TREE_OPERAND (base
, 0);
3102 || TREE_CODE (base
) == PARM_DECL
3103 || TREE_CODE (base
) == RESULT_DECL
))
3106 if (verify_addressable
&& !TREE_ADDRESSABLE (base
))
3108 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
3116 /* Verify if EXPR is a valid GIMPLE reference expression. If
3117 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3118 if there is an error, otherwise false. */
3121 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
3123 const char *code_name
= get_tree_code_name (TREE_CODE (expr
));
3125 if (TREE_CODE (expr
) == REALPART_EXPR
3126 || TREE_CODE (expr
) == IMAGPART_EXPR
3127 || TREE_CODE (expr
) == BIT_FIELD_REF
3128 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3130 tree op
= TREE_OPERAND (expr
, 0);
3131 if (TREE_CODE (expr
) != VIEW_CONVERT_EXPR
3132 && !is_gimple_reg_type (TREE_TYPE (expr
)))
3134 error ("non-scalar %qs", code_name
);
3138 if (TREE_CODE (expr
) == BIT_FIELD_REF
)
3140 tree t1
= TREE_OPERAND (expr
, 1);
3141 tree t2
= TREE_OPERAND (expr
, 2);
3142 poly_uint64 size
, bitpos
;
3143 if (!poly_int_tree_p (t1
, &size
)
3144 || !poly_int_tree_p (t2
, &bitpos
)
3145 || !types_compatible_p (bitsizetype
, TREE_TYPE (t1
))
3146 || !types_compatible_p (bitsizetype
, TREE_TYPE (t2
)))
3148 error ("invalid position or size operand to %qs", code_name
);
3151 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
))
3152 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr
)), size
))
3154 error ("integral result type precision does not match "
3155 "field size of %qs", code_name
);
3158 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr
))
3159 && TYPE_MODE (TREE_TYPE (expr
)) != BLKmode
3160 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr
))),
3163 error ("mode size of non-integral result does not "
3164 "match field size of %qs",
3168 if (INTEGRAL_TYPE_P (TREE_TYPE (op
))
3169 && !type_has_mode_precision_p (TREE_TYPE (op
)))
3171 error ("%qs of non-mode-precision operand", code_name
);
3174 if (!AGGREGATE_TYPE_P (TREE_TYPE (op
))
3175 && maybe_gt (size
+ bitpos
,
3176 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op
)))))
3178 error ("position plus size exceeds size of referenced object in "
3184 if ((TREE_CODE (expr
) == REALPART_EXPR
3185 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3186 && !useless_type_conversion_p (TREE_TYPE (expr
),
3187 TREE_TYPE (TREE_TYPE (op
))))
3189 error ("type mismatch in %qs reference", code_name
);
3190 debug_generic_stmt (TREE_TYPE (expr
));
3191 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3195 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3197 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3198 that their operand is not a register an invariant when
3199 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3200 bug). Otherwise there is nothing to verify, gross mismatches at
3201 most invoke undefined behavior. */
3203 && (is_gimple_reg (op
) || is_gimple_min_invariant (op
)))
3205 error ("conversion of %qs on the left hand side of %qs",
3206 get_tree_code_name (TREE_CODE (op
)), code_name
);
3207 debug_generic_stmt (expr
);
3210 else if (is_gimple_reg (op
)
3211 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3213 error ("conversion of register to a different size in %qs",
3215 debug_generic_stmt (expr
);
3223 bool require_non_reg
= false;
3224 while (handled_component_p (expr
))
3226 require_non_reg
= true;
3227 code_name
= get_tree_code_name (TREE_CODE (expr
));
3229 if (TREE_CODE (expr
) == REALPART_EXPR
3230 || TREE_CODE (expr
) == IMAGPART_EXPR
3231 || TREE_CODE (expr
) == BIT_FIELD_REF
)
3233 error ("non-top-level %qs", code_name
);
3237 tree op
= TREE_OPERAND (expr
, 0);
3239 if (TREE_CODE (expr
) == ARRAY_REF
3240 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
3242 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
3243 || (TREE_OPERAND (expr
, 2)
3244 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3245 || (TREE_OPERAND (expr
, 3)
3246 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
3248 error ("invalid operands to %qs", code_name
);
3249 debug_generic_stmt (expr
);
3254 /* Verify if the reference array element types are compatible. */
3255 if (TREE_CODE (expr
) == ARRAY_REF
3256 && !useless_type_conversion_p (TREE_TYPE (expr
),
3257 TREE_TYPE (TREE_TYPE (op
))))
3259 error ("type mismatch in %qs", code_name
);
3260 debug_generic_stmt (TREE_TYPE (expr
));
3261 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3264 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
3265 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
3266 TREE_TYPE (TREE_TYPE (op
))))
3268 error ("type mismatch in %qs", code_name
);
3269 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
3270 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3274 if (TREE_CODE (expr
) == COMPONENT_REF
)
3276 if (TREE_OPERAND (expr
, 2)
3277 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3279 error ("invalid %qs offset operator", code_name
);
3282 if (!useless_type_conversion_p (TREE_TYPE (expr
),
3283 TREE_TYPE (TREE_OPERAND (expr
, 1))))
3285 error ("type mismatch in %qs", code_name
);
3286 debug_generic_stmt (TREE_TYPE (expr
));
3287 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
3295 code_name
= get_tree_code_name (TREE_CODE (expr
));
3297 if (TREE_CODE (expr
) == MEM_REF
)
3299 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0))
3300 || (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
3301 && verify_address (TREE_OPERAND (expr
, 0), false)))
3303 error ("invalid address operand in %qs", code_name
);
3304 debug_generic_stmt (expr
);
3307 if (!poly_int_tree_p (TREE_OPERAND (expr
, 1))
3308 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3310 error ("invalid offset operand in %qs", code_name
);
3311 debug_generic_stmt (expr
);
3314 if (MR_DEPENDENCE_CLIQUE (expr
) != 0
3315 && MR_DEPENDENCE_CLIQUE (expr
) > cfun
->last_clique
)
3317 error ("invalid clique in %qs", code_name
);
3318 debug_generic_stmt (expr
);
3322 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3324 if (!TMR_BASE (expr
)
3325 || !is_gimple_mem_ref_addr (TMR_BASE (expr
))
3326 || (TREE_CODE (TMR_BASE (expr
)) == ADDR_EXPR
3327 && verify_address (TMR_BASE (expr
), false)))
3329 error ("invalid address operand in %qs", code_name
);
3332 if (!TMR_OFFSET (expr
)
3333 || !poly_int_tree_p (TMR_OFFSET (expr
))
3334 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3336 error ("invalid offset operand in %qs", code_name
);
3337 debug_generic_stmt (expr
);
3340 if (MR_DEPENDENCE_CLIQUE (expr
) != 0
3341 && MR_DEPENDENCE_CLIQUE (expr
) > cfun
->last_clique
)
3343 error ("invalid clique in %qs", code_name
);
3344 debug_generic_stmt (expr
);
3348 else if (INDIRECT_REF_P (expr
))
3350 error ("%qs in gimple IL", code_name
);
3351 debug_generic_stmt (expr
);
3354 else if (require_non_reg
3355 && (is_gimple_reg (expr
)
3356 || (is_gimple_min_invariant (expr
)
3357 /* STRING_CSTs are representatives of the string table
3358 entry which lives in memory. */
3359 && TREE_CODE (expr
) != STRING_CST
)))
3361 error ("%qs as base where non-register is required", code_name
);
3362 debug_generic_stmt (expr
);
3367 && (is_gimple_reg (expr
) || is_gimple_min_invariant (expr
)))
3370 if (TREE_CODE (expr
) != SSA_NAME
&& is_gimple_id (expr
))
3373 if (TREE_CODE (expr
) != TARGET_MEM_REF
3374 && TREE_CODE (expr
) != MEM_REF
)
3376 error ("invalid expression for min lvalue");
3383 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3384 list of pointer-to types that is trivially convertible to DEST. */
3387 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3391 if (!TYPE_POINTER_TO (src_obj
))
3394 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3395 if (useless_type_conversion_p (dest
, src
))
3401 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3402 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3405 valid_fixed_convert_types_p (tree type1
, tree type2
)
3407 return (FIXED_POINT_TYPE_P (type1
)
3408 && (INTEGRAL_TYPE_P (type2
)
3409 || SCALAR_FLOAT_TYPE_P (type2
)
3410 || FIXED_POINT_TYPE_P (type2
)));
3413 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3414 is a problem, otherwise false. */
3417 verify_gimple_call (gcall
*stmt
)
3419 tree fn
= gimple_call_fn (stmt
);
3420 tree fntype
, fndecl
;
3423 if (gimple_call_internal_p (stmt
))
3427 error ("gimple call has two targets");
3428 debug_generic_stmt (fn
);
3436 error ("gimple call has no target");
3441 if (fn
&& !is_gimple_call_addr (fn
))
3443 error ("invalid function in gimple call");
3444 debug_generic_stmt (fn
);
3449 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3450 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3451 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3453 error ("non-function in gimple call");
3457 fndecl
= gimple_call_fndecl (stmt
);
3459 && TREE_CODE (fndecl
) == FUNCTION_DECL
3460 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3461 && !DECL_PURE_P (fndecl
)
3462 && !TREE_READONLY (fndecl
))
3464 error ("invalid pure const state for function");
3468 tree lhs
= gimple_call_lhs (stmt
);
3470 && (!is_gimple_reg (lhs
)
3471 && (!is_gimple_lvalue (lhs
)
3472 || verify_types_in_gimple_reference
3473 (TREE_CODE (lhs
) == WITH_SIZE_EXPR
3474 ? TREE_OPERAND (lhs
, 0) : lhs
, true))))
3476 error ("invalid LHS in gimple call");
3480 if (gimple_call_ctrl_altering_p (stmt
)
3481 && gimple_call_noreturn_p (stmt
)
3482 && should_remove_lhs_p (lhs
))
3484 error ("LHS in %<noreturn%> call");
3488 fntype
= gimple_call_fntype (stmt
);
3491 && !useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (fntype
))
3492 /* ??? At least C++ misses conversions at assignments from
3493 void * call results.
3494 For now simply allow arbitrary pointer type conversions. */
3495 && !(POINTER_TYPE_P (TREE_TYPE (lhs
))
3496 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3498 error ("invalid conversion in gimple call");
3499 debug_generic_stmt (TREE_TYPE (lhs
));
3500 debug_generic_stmt (TREE_TYPE (fntype
));
3504 if (gimple_call_chain (stmt
)
3505 && !is_gimple_val (gimple_call_chain (stmt
)))
3507 error ("invalid static chain in gimple call");
3508 debug_generic_stmt (gimple_call_chain (stmt
));
3512 /* If there is a static chain argument, the call should either be
3513 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3514 if (gimple_call_chain (stmt
)
3516 && !DECL_STATIC_CHAIN (fndecl
))
3518 error ("static chain with function that doesn%'t use one");
3522 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3524 switch (DECL_FUNCTION_CODE (fndecl
))
3526 case BUILT_IN_UNREACHABLE
:
3527 case BUILT_IN_UNREACHABLE_TRAP
:
3529 if (gimple_call_num_args (stmt
) > 0)
3531 /* Built-in unreachable with parameters might not be caught by
3532 undefined behavior sanitizer. Front-ends do check users do not
3533 call them that way but we also produce calls to
3534 __builtin_unreachable internally, for example when IPA figures
3535 out a call cannot happen in a legal program. In such cases,
3536 we must make sure arguments are stripped off. */
3537 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3547 /* For a call to .DEFERRED_INIT,
3548 LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL)
3549 we should guarantee that when the 1st argument is a constant, it should
3550 be the same as the size of the LHS. */
3552 if (gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
3554 tree size_of_arg0
= gimple_call_arg (stmt
, 0);
3555 tree size_of_lhs
= TYPE_SIZE_UNIT (TREE_TYPE (lhs
));
3557 if (TREE_CODE (lhs
) == SSA_NAME
)
3558 lhs
= SSA_NAME_VAR (lhs
);
3560 poly_uint64 size_from_arg0
, size_from_lhs
;
3561 bool is_constant_size_arg0
= poly_int_tree_p (size_of_arg0
,
3563 bool is_constant_size_lhs
= poly_int_tree_p (size_of_lhs
,
3565 if (is_constant_size_arg0
&& is_constant_size_lhs
)
3566 if (maybe_ne (size_from_arg0
, size_from_lhs
))
3568 error ("%<DEFERRED_INIT%> calls should have same "
3569 "constant size for the first argument and LHS");
3574 /* ??? The C frontend passes unpromoted arguments in case it
3575 didn't see a function declaration before the call. So for now
3576 leave the call arguments mostly unverified. Once we gimplify
3577 unit-at-a-time we have a chance to fix this. */
3578 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3580 tree arg
= gimple_call_arg (stmt
, i
);
3581 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3582 && !is_gimple_val (arg
))
3583 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3584 && !is_gimple_lvalue (arg
)))
3586 error ("invalid argument to gimple call");
3587 debug_generic_expr (arg
);
3590 if (!is_gimple_reg (arg
))
3592 if (TREE_CODE (arg
) == WITH_SIZE_EXPR
)
3593 arg
= TREE_OPERAND (arg
, 0);
3594 if (verify_types_in_gimple_reference (arg
, false))
3602 /* Verifies the gimple comparison with the result type TYPE and
3603 the operands OP0 and OP1, comparison code is CODE. */
3606 verify_gimple_comparison (tree type
, tree op0
, tree op1
, enum tree_code code
)
3608 tree op0_type
= TREE_TYPE (op0
);
3609 tree op1_type
= TREE_TYPE (op1
);
3611 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3613 error ("invalid operands in gimple comparison");
3617 /* For comparisons we do not have the operations type as the
3618 effective type the comparison is carried out in. Instead
3619 we require that either the first operand is trivially
3620 convertible into the second, or the other way around. */
3621 if (!useless_type_conversion_p (op0_type
, op1_type
)
3622 && !useless_type_conversion_p (op1_type
, op0_type
))
3624 error ("mismatching comparison operand types");
3625 debug_generic_expr (op0_type
);
3626 debug_generic_expr (op1_type
);
3630 /* The resulting type of a comparison may be an effective boolean type. */
3631 if (INTEGRAL_TYPE_P (type
)
3632 && (TREE_CODE (type
) == BOOLEAN_TYPE
3633 || TYPE_PRECISION (type
) == 1))
3635 if ((VECTOR_TYPE_P (op0_type
)
3636 || VECTOR_TYPE_P (op1_type
))
3637 && code
!= EQ_EXPR
&& code
!= NE_EXPR
3638 && !VECTOR_BOOLEAN_TYPE_P (op0_type
)
3639 && !VECTOR_INTEGER_TYPE_P (op0_type
))
3641 error ("unsupported operation or type for vector comparison"
3642 " returning a boolean");
3643 debug_generic_expr (op0_type
);
3644 debug_generic_expr (op1_type
);
3648 /* Or a boolean vector type with the same element count
3649 as the comparison operand types. */
3650 else if (VECTOR_TYPE_P (type
)
3651 && TREE_CODE (TREE_TYPE (type
)) == BOOLEAN_TYPE
)
3653 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3654 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3656 error ("non-vector operands in vector comparison");
3657 debug_generic_expr (op0_type
);
3658 debug_generic_expr (op1_type
);
3662 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type
),
3663 TYPE_VECTOR_SUBPARTS (op0_type
)))
3665 error ("invalid vector comparison resulting type");
3666 debug_generic_expr (type
);
3672 error ("bogus comparison result type");
3673 debug_generic_expr (type
);
3680 /* Verify a gimple assignment statement STMT with an unary rhs.
3681 Returns true if anything is wrong. */
3684 verify_gimple_assign_unary (gassign
*stmt
)
3686 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3687 tree lhs
= gimple_assign_lhs (stmt
);
3688 tree lhs_type
= TREE_TYPE (lhs
);
3689 tree rhs1
= gimple_assign_rhs1 (stmt
);
3690 tree rhs1_type
= TREE_TYPE (rhs1
);
3692 if (!is_gimple_reg (lhs
))
3694 error ("non-register as LHS of unary operation");
3698 if (!is_gimple_val (rhs1
))
3700 error ("invalid operand in unary operation");
3704 const char* const code_name
= get_tree_code_name (rhs_code
);
3706 /* First handle conversions. */
3711 /* Allow conversions between vectors with the same number of elements,
3712 provided that the conversion is OK for the element types too. */
3713 if (VECTOR_TYPE_P (lhs_type
)
3714 && VECTOR_TYPE_P (rhs1_type
)
3715 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type
),
3716 TYPE_VECTOR_SUBPARTS (rhs1_type
)))
3718 lhs_type
= TREE_TYPE (lhs_type
);
3719 rhs1_type
= TREE_TYPE (rhs1_type
);
3721 else if (VECTOR_TYPE_P (lhs_type
) || VECTOR_TYPE_P (rhs1_type
))
3723 error ("invalid vector types in nop conversion");
3724 debug_generic_expr (lhs_type
);
3725 debug_generic_expr (rhs1_type
);
3729 /* Allow conversions from pointer type to integral type only if
3730 there is no sign or zero extension involved.
3731 For targets were the precision of ptrofftype doesn't match that
3732 of pointers we allow conversions to types where
3733 POINTERS_EXTEND_UNSIGNED specifies how that works. */
3734 if ((POINTER_TYPE_P (lhs_type
)
3735 && INTEGRAL_TYPE_P (rhs1_type
))
3736 || (POINTER_TYPE_P (rhs1_type
)
3737 && INTEGRAL_TYPE_P (lhs_type
)
3738 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3739 #if defined(POINTERS_EXTEND_UNSIGNED)
3740 || (TYPE_MODE (rhs1_type
) == ptr_mode
3741 && (TYPE_PRECISION (lhs_type
)
3742 == BITS_PER_WORD
/* word_mode */
3743 || (TYPE_PRECISION (lhs_type
)
3744 == GET_MODE_PRECISION (Pmode
))))
3749 /* Allow conversion from integral to offset type and vice versa. */
3750 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3751 && INTEGRAL_TYPE_P (rhs1_type
))
3752 || (INTEGRAL_TYPE_P (lhs_type
)
3753 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3756 /* Otherwise assert we are converting between types of the
3758 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3760 error ("invalid types in nop conversion");
3761 debug_generic_expr (lhs_type
);
3762 debug_generic_expr (rhs1_type
);
3769 case ADDR_SPACE_CONVERT_EXPR
:
3771 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3772 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3773 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3775 error ("invalid types in address space conversion");
3776 debug_generic_expr (lhs_type
);
3777 debug_generic_expr (rhs1_type
);
3784 case FIXED_CONVERT_EXPR
:
3786 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3787 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3789 error ("invalid types in fixed-point conversion");
3790 debug_generic_expr (lhs_type
);
3791 debug_generic_expr (rhs1_type
);
3800 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3801 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3802 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3804 error ("invalid types in conversion to floating-point");
3805 debug_generic_expr (lhs_type
);
3806 debug_generic_expr (rhs1_type
);
3813 case FIX_TRUNC_EXPR
:
3815 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3816 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3817 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3819 error ("invalid types in conversion to integer");
3820 debug_generic_expr (lhs_type
);
3821 debug_generic_expr (rhs1_type
);
3828 case VEC_UNPACK_HI_EXPR
:
3829 case VEC_UNPACK_LO_EXPR
:
3830 case VEC_UNPACK_FLOAT_HI_EXPR
:
3831 case VEC_UNPACK_FLOAT_LO_EXPR
:
3832 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
3833 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
3834 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3835 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3836 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3837 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type
)))
3838 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3839 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
)))
3840 || ((rhs_code
== VEC_UNPACK_HI_EXPR
3841 || rhs_code
== VEC_UNPACK_LO_EXPR
)
3842 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3843 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3844 || ((rhs_code
== VEC_UNPACK_FLOAT_HI_EXPR
3845 || rhs_code
== VEC_UNPACK_FLOAT_LO_EXPR
)
3846 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3847 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
))))
3848 || ((rhs_code
== VEC_UNPACK_FIX_TRUNC_HI_EXPR
3849 || rhs_code
== VEC_UNPACK_FIX_TRUNC_LO_EXPR
)
3850 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3851 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type
))))
3852 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type
)),
3853 2 * GET_MODE_SIZE (element_mode (rhs1_type
)))
3854 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type
)
3855 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type
)))
3856 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type
),
3857 TYPE_VECTOR_SUBPARTS (rhs1_type
)))
3859 error ("type mismatch in %qs expression", code_name
);
3860 debug_generic_expr (lhs_type
);
3861 debug_generic_expr (rhs1_type
);
3872 /* Disallow pointer and offset types for many of the unary gimple. */
3873 if (POINTER_TYPE_P (lhs_type
)
3874 || TREE_CODE (lhs_type
) == OFFSET_TYPE
)
3876 error ("invalid types for %qs", code_name
);
3877 debug_generic_expr (lhs_type
);
3878 debug_generic_expr (rhs1_type
);
3884 if (!ANY_INTEGRAL_TYPE_P (lhs_type
)
3885 || !TYPE_UNSIGNED (lhs_type
)
3886 || !ANY_INTEGRAL_TYPE_P (rhs1_type
)
3887 || TYPE_UNSIGNED (rhs1_type
)
3888 || element_precision (lhs_type
) != element_precision (rhs1_type
))
3890 error ("invalid types for %qs", code_name
);
3891 debug_generic_expr (lhs_type
);
3892 debug_generic_expr (rhs1_type
);
3897 case VEC_DUPLICATE_EXPR
:
3898 if (TREE_CODE (lhs_type
) != VECTOR_TYPE
3899 || !useless_type_conversion_p (TREE_TYPE (lhs_type
), rhs1_type
))
3901 error ("%qs should be from a scalar to a like vector", code_name
);
3902 debug_generic_expr (lhs_type
);
3903 debug_generic_expr (rhs1_type
);
3912 /* For the remaining codes assert there is no conversion involved. */
3913 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3915 error ("non-trivial conversion in unary operation");
3916 debug_generic_expr (lhs_type
);
3917 debug_generic_expr (rhs1_type
);
3924 /* Verify a gimple assignment statement STMT with a binary rhs.
3925 Returns true if anything is wrong. */
3928 verify_gimple_assign_binary (gassign
*stmt
)
3930 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3931 tree lhs
= gimple_assign_lhs (stmt
);
3932 tree lhs_type
= TREE_TYPE (lhs
);
3933 tree rhs1
= gimple_assign_rhs1 (stmt
);
3934 tree rhs1_type
= TREE_TYPE (rhs1
);
3935 tree rhs2
= gimple_assign_rhs2 (stmt
);
3936 tree rhs2_type
= TREE_TYPE (rhs2
);
3938 if (!is_gimple_reg (lhs
))
3940 error ("non-register as LHS of binary operation");
3944 if (!is_gimple_val (rhs1
)
3945 || !is_gimple_val (rhs2
))
3947 error ("invalid operands in binary operation");
3951 const char* const code_name
= get_tree_code_name (rhs_code
);
3953 /* First handle operations that involve different types. */
3958 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3959 || !(INTEGRAL_TYPE_P (rhs1_type
)
3960 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3961 || !(INTEGRAL_TYPE_P (rhs2_type
)
3962 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3964 error ("type mismatch in %qs", code_name
);
3965 debug_generic_expr (lhs_type
);
3966 debug_generic_expr (rhs1_type
);
3967 debug_generic_expr (rhs2_type
);
3979 /* Shifts and rotates are ok on integral types, fixed point
3980 types and integer vector types. */
3981 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3982 && !FIXED_POINT_TYPE_P (rhs1_type
)
3983 && ! (VECTOR_TYPE_P (rhs1_type
)
3984 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3985 || (!INTEGRAL_TYPE_P (rhs2_type
)
3986 /* Vector shifts of vectors are also ok. */
3987 && ! (VECTOR_TYPE_P (rhs1_type
)
3988 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3989 && VECTOR_TYPE_P (rhs2_type
)
3990 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3991 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3993 error ("type mismatch in %qs", code_name
);
3994 debug_generic_expr (lhs_type
);
3995 debug_generic_expr (rhs1_type
);
3996 debug_generic_expr (rhs2_type
);
4003 case WIDEN_LSHIFT_EXPR
:
4005 if (!INTEGRAL_TYPE_P (lhs_type
)
4006 || !INTEGRAL_TYPE_P (rhs1_type
)
4007 || TREE_CODE (rhs2
) != INTEGER_CST
4008 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
4010 error ("type mismatch in %qs", code_name
);
4011 debug_generic_expr (lhs_type
);
4012 debug_generic_expr (rhs1_type
);
4013 debug_generic_expr (rhs2_type
);
4020 case VEC_WIDEN_LSHIFT_HI_EXPR
:
4021 case VEC_WIDEN_LSHIFT_LO_EXPR
:
4023 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4024 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4025 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
4026 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
4027 || TREE_CODE (rhs2
) != INTEGER_CST
4028 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
4029 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
4031 error ("type mismatch in %qs", code_name
);
4032 debug_generic_expr (lhs_type
);
4033 debug_generic_expr (rhs1_type
);
4034 debug_generic_expr (rhs2_type
);
4044 tree lhs_etype
= lhs_type
;
4045 tree rhs1_etype
= rhs1_type
;
4046 tree rhs2_etype
= rhs2_type
;
4047 if (VECTOR_TYPE_P (lhs_type
))
4049 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4050 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
4052 error ("invalid non-vector operands to %qs", code_name
);
4055 lhs_etype
= TREE_TYPE (lhs_type
);
4056 rhs1_etype
= TREE_TYPE (rhs1_type
);
4057 rhs2_etype
= TREE_TYPE (rhs2_type
);
4059 if (POINTER_TYPE_P (lhs_etype
)
4060 || POINTER_TYPE_P (rhs1_etype
)
4061 || POINTER_TYPE_P (rhs2_etype
))
4063 error ("invalid (pointer) operands %qs", code_name
);
4067 /* Continue with generic binary expression handling. */
4071 case POINTER_PLUS_EXPR
:
4073 if (!POINTER_TYPE_P (rhs1_type
)
4074 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
4075 || !ptrofftype_p (rhs2_type
))
4077 error ("type mismatch in %qs", code_name
);
4078 debug_generic_stmt (lhs_type
);
4079 debug_generic_stmt (rhs1_type
);
4080 debug_generic_stmt (rhs2_type
);
4087 case POINTER_DIFF_EXPR
:
4089 if (!POINTER_TYPE_P (rhs1_type
)
4090 || !POINTER_TYPE_P (rhs2_type
)
4091 /* Because we special-case pointers to void we allow difference
4092 of arbitrary pointers with the same mode. */
4093 || TYPE_MODE (rhs1_type
) != TYPE_MODE (rhs2_type
)
4094 || !INTEGRAL_TYPE_P (lhs_type
)
4095 || TYPE_UNSIGNED (lhs_type
)
4096 || TYPE_PRECISION (lhs_type
) != TYPE_PRECISION (rhs1_type
))
4098 error ("type mismatch in %qs", code_name
);
4099 debug_generic_stmt (lhs_type
);
4100 debug_generic_stmt (rhs1_type
);
4101 debug_generic_stmt (rhs2_type
);
4108 case TRUTH_ANDIF_EXPR
:
4109 case TRUTH_ORIF_EXPR
:
4110 case TRUTH_AND_EXPR
:
4112 case TRUTH_XOR_EXPR
:
4122 case UNORDERED_EXPR
:
4130 /* Comparisons are also binary, but the result type is not
4131 connected to the operand types. */
4132 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
, rhs_code
);
4134 case WIDEN_MULT_EXPR
:
4135 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
4137 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
4138 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
4140 case WIDEN_SUM_EXPR
:
4142 if (((TREE_CODE (rhs1_type
) != VECTOR_TYPE
4143 || TREE_CODE (lhs_type
) != VECTOR_TYPE
)
4144 && ((!INTEGRAL_TYPE_P (rhs1_type
)
4145 && !SCALAR_FLOAT_TYPE_P (rhs1_type
))
4146 || (!INTEGRAL_TYPE_P (lhs_type
)
4147 && !SCALAR_FLOAT_TYPE_P (lhs_type
))))
4148 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
4149 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type
)),
4150 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4152 error ("type mismatch in %qs", code_name
);
4153 debug_generic_expr (lhs_type
);
4154 debug_generic_expr (rhs1_type
);
4155 debug_generic_expr (rhs2_type
);
4161 case VEC_WIDEN_MULT_HI_EXPR
:
4162 case VEC_WIDEN_MULT_LO_EXPR
:
4163 case VEC_WIDEN_MULT_EVEN_EXPR
:
4164 case VEC_WIDEN_MULT_ODD_EXPR
:
4166 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4167 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4168 || !types_compatible_p (rhs1_type
, rhs2_type
)
4169 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type
)),
4170 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4172 error ("type mismatch in %qs", code_name
);
4173 debug_generic_expr (lhs_type
);
4174 debug_generic_expr (rhs1_type
);
4175 debug_generic_expr (rhs2_type
);
4181 case VEC_PACK_TRUNC_EXPR
:
4182 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4183 vector boolean types. */
4184 if (VECTOR_BOOLEAN_TYPE_P (lhs_type
)
4185 && VECTOR_BOOLEAN_TYPE_P (rhs1_type
)
4186 && types_compatible_p (rhs1_type
, rhs2_type
)
4187 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type
),
4188 2 * TYPE_VECTOR_SUBPARTS (rhs1_type
)))
4192 case VEC_PACK_SAT_EXPR
:
4193 case VEC_PACK_FIX_TRUNC_EXPR
:
4195 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4196 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4197 || !((rhs_code
== VEC_PACK_FIX_TRUNC_EXPR
4198 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
))
4199 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
)))
4200 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
4201 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))))
4202 || !types_compatible_p (rhs1_type
, rhs2_type
)
4203 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type
)),
4204 2 * GET_MODE_SIZE (element_mode (lhs_type
)))
4205 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type
),
4206 TYPE_VECTOR_SUBPARTS (lhs_type
)))
4208 error ("type mismatch in %qs", code_name
);
4209 debug_generic_expr (lhs_type
);
4210 debug_generic_expr (rhs1_type
);
4211 debug_generic_expr (rhs2_type
);
4218 case VEC_PACK_FLOAT_EXPR
:
4219 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4220 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4221 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
4222 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type
))
4223 || !types_compatible_p (rhs1_type
, rhs2_type
)
4224 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type
)),
4225 2 * GET_MODE_SIZE (element_mode (lhs_type
)))
4226 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type
),
4227 TYPE_VECTOR_SUBPARTS (lhs_type
)))
4229 error ("type mismatch in %qs", code_name
);
4230 debug_generic_expr (lhs_type
);
4231 debug_generic_expr (rhs1_type
);
4232 debug_generic_expr (rhs2_type
);
4239 case MULT_HIGHPART_EXPR
:
4240 case TRUNC_DIV_EXPR
:
4242 case FLOOR_DIV_EXPR
:
4243 case ROUND_DIV_EXPR
:
4244 case TRUNC_MOD_EXPR
:
4246 case FLOOR_MOD_EXPR
:
4247 case ROUND_MOD_EXPR
:
4249 case EXACT_DIV_EXPR
:
4252 /* Disallow pointer and offset types for many of the binary gimple. */
4253 if (POINTER_TYPE_P (lhs_type
)
4254 || TREE_CODE (lhs_type
) == OFFSET_TYPE
)
4256 error ("invalid types for %qs", code_name
);
4257 debug_generic_expr (lhs_type
);
4258 debug_generic_expr (rhs1_type
);
4259 debug_generic_expr (rhs2_type
);
4262 /* Continue with generic binary expression handling. */
4267 /* Continue with generic binary expression handling. */
4271 if (POINTER_TYPE_P (lhs_type
)
4272 && TREE_CODE (rhs2
) == INTEGER_CST
)
4274 /* Disallow pointer and offset types for many of the binary gimple. */
4275 if (POINTER_TYPE_P (lhs_type
)
4276 || TREE_CODE (lhs_type
) == OFFSET_TYPE
)
4278 error ("invalid types for %qs", code_name
);
4279 debug_generic_expr (lhs_type
);
4280 debug_generic_expr (rhs1_type
);
4281 debug_generic_expr (rhs2_type
);
4284 /* Continue with generic binary expression handling. */
4287 case VEC_SERIES_EXPR
:
4288 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
))
4290 error ("type mismatch in %qs", code_name
);
4291 debug_generic_expr (rhs1_type
);
4292 debug_generic_expr (rhs2_type
);
4295 if (TREE_CODE (lhs_type
) != VECTOR_TYPE
4296 || !useless_type_conversion_p (TREE_TYPE (lhs_type
), rhs1_type
))
4298 error ("vector type expected in %qs", code_name
);
4299 debug_generic_expr (lhs_type
);
4308 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4309 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4311 error ("type mismatch in binary expression");
4312 debug_generic_stmt (lhs_type
);
4313 debug_generic_stmt (rhs1_type
);
4314 debug_generic_stmt (rhs2_type
);
4321 /* Verify a gimple assignment statement STMT with a ternary rhs.
4322 Returns true if anything is wrong. */
4325 verify_gimple_assign_ternary (gassign
*stmt
)
4327 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4328 tree lhs
= gimple_assign_lhs (stmt
);
4329 tree lhs_type
= TREE_TYPE (lhs
);
4330 tree rhs1
= gimple_assign_rhs1 (stmt
);
4331 tree rhs1_type
= TREE_TYPE (rhs1
);
4332 tree rhs2
= gimple_assign_rhs2 (stmt
);
4333 tree rhs2_type
= TREE_TYPE (rhs2
);
4334 tree rhs3
= gimple_assign_rhs3 (stmt
);
4335 tree rhs3_type
= TREE_TYPE (rhs3
);
4337 if (!is_gimple_reg (lhs
))
4339 error ("non-register as LHS of ternary operation");
4343 if (!is_gimple_val (rhs1
)
4344 || !is_gimple_val (rhs2
)
4345 || !is_gimple_val (rhs3
))
4347 error ("invalid operands in ternary operation");
4351 const char* const code_name
= get_tree_code_name (rhs_code
);
4353 /* First handle operations that involve different types. */
4356 case WIDEN_MULT_PLUS_EXPR
:
4357 case WIDEN_MULT_MINUS_EXPR
:
4358 if ((!INTEGRAL_TYPE_P (rhs1_type
)
4359 && !FIXED_POINT_TYPE_P (rhs1_type
))
4360 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
4361 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4362 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
4363 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
4365 error ("type mismatch in %qs", code_name
);
4366 debug_generic_expr (lhs_type
);
4367 debug_generic_expr (rhs1_type
);
4368 debug_generic_expr (rhs2_type
);
4369 debug_generic_expr (rhs3_type
);
4375 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type
)
4376 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type
),
4377 TYPE_VECTOR_SUBPARTS (lhs_type
)))
4379 error ("the first argument of a %qs must be of a "
4380 "boolean vector type of the same number of elements "
4381 "as the result", code_name
);
4382 debug_generic_expr (lhs_type
);
4383 debug_generic_expr (rhs1_type
);
4388 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
4389 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4391 error ("type mismatch in %qs", code_name
);
4392 debug_generic_expr (lhs_type
);
4393 debug_generic_expr (rhs2_type
);
4394 debug_generic_expr (rhs3_type
);
4400 /* If permute is constant, then we allow for lhs and rhs
4401 to have different vector types, provided:
4402 (1) lhs, rhs1, rhs2 have same element type.
4403 (2) rhs3 vector is constant and has integer element type.
4404 (3) len(lhs) == len(rhs3) && len(rhs1) == len(rhs2). */
4406 if (TREE_CODE (lhs_type
) != VECTOR_TYPE
4407 || TREE_CODE (rhs1_type
) != VECTOR_TYPE
4408 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4409 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4411 error ("vector types expected in %qs", code_name
);
4412 debug_generic_expr (lhs_type
);
4413 debug_generic_expr (rhs1_type
);
4414 debug_generic_expr (rhs2_type
);
4415 debug_generic_expr (rhs3_type
);
4419 /* If rhs3 is constant, we allow lhs, rhs1 and rhs2 to be different vector types,
4420 as long as lhs, rhs1 and rhs2 have same element type. */
4421 if (TREE_CONSTANT (rhs3
)
4422 ? (!useless_type_conversion_p (TREE_TYPE (lhs_type
), TREE_TYPE (rhs1_type
))
4423 || !useless_type_conversion_p (TREE_TYPE (lhs_type
), TREE_TYPE (rhs2_type
)))
4424 : (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4425 || !useless_type_conversion_p (lhs_type
, rhs2_type
)))
4427 error ("type mismatch in %qs", code_name
);
4428 debug_generic_expr (lhs_type
);
4429 debug_generic_expr (rhs1_type
);
4430 debug_generic_expr (rhs2_type
);
4431 debug_generic_expr (rhs3_type
);
4435 /* If rhs3 is constant, relax the check len(rhs2) == len(rhs3). */
4436 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type
),
4437 TYPE_VECTOR_SUBPARTS (rhs2_type
))
4438 || (!TREE_CONSTANT(rhs3
)
4439 && maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type
),
4440 TYPE_VECTOR_SUBPARTS (rhs3_type
)))
4441 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type
),
4442 TYPE_VECTOR_SUBPARTS (lhs_type
)))
4444 error ("vectors with different element number found in %qs",
4446 debug_generic_expr (lhs_type
);
4447 debug_generic_expr (rhs1_type
);
4448 debug_generic_expr (rhs2_type
);
4449 debug_generic_expr (rhs3_type
);
4453 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
4454 || (TREE_CODE (rhs3
) != VECTOR_CST
4455 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4456 (TREE_TYPE (rhs3_type
)))
4457 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4458 (TREE_TYPE (rhs1_type
))))))
4460 error ("invalid mask type in %qs", code_name
);
4461 debug_generic_expr (lhs_type
);
4462 debug_generic_expr (rhs1_type
);
4463 debug_generic_expr (rhs2_type
);
4464 debug_generic_expr (rhs3_type
);
4471 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
)
4472 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4473 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
)))
4474 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type
))))
4476 error ("type mismatch in %qs", code_name
);
4477 debug_generic_expr (lhs_type
);
4478 debug_generic_expr (rhs1_type
);
4479 debug_generic_expr (rhs2_type
);
4480 debug_generic_expr (rhs3_type
);
4484 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4485 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4486 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4488 error ("vector types expected in %qs", code_name
);
4489 debug_generic_expr (lhs_type
);
4490 debug_generic_expr (rhs1_type
);
4491 debug_generic_expr (rhs2_type
);
4492 debug_generic_expr (rhs3_type
);
4498 case BIT_INSERT_EXPR
:
4499 if (! useless_type_conversion_p (lhs_type
, rhs1_type
))
4501 error ("type mismatch in %qs", code_name
);
4502 debug_generic_expr (lhs_type
);
4503 debug_generic_expr (rhs1_type
);
4506 if (! ((INTEGRAL_TYPE_P (rhs1_type
)
4507 && INTEGRAL_TYPE_P (rhs2_type
))
4508 /* Vector element insert. */
4509 || (VECTOR_TYPE_P (rhs1_type
)
4510 && types_compatible_p (TREE_TYPE (rhs1_type
), rhs2_type
))
4511 /* Aligned sub-vector insert. */
4512 || (VECTOR_TYPE_P (rhs1_type
)
4513 && VECTOR_TYPE_P (rhs2_type
)
4514 && types_compatible_p (TREE_TYPE (rhs1_type
),
4515 TREE_TYPE (rhs2_type
))
4516 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type
),
4517 TYPE_VECTOR_SUBPARTS (rhs2_type
))
4518 && multiple_p (wi::to_poly_offset (rhs3
),
4519 wi::to_poly_offset (TYPE_SIZE (rhs2_type
))))))
4521 error ("not allowed type combination in %qs", code_name
);
4522 debug_generic_expr (rhs1_type
);
4523 debug_generic_expr (rhs2_type
);
4526 if (! tree_fits_uhwi_p (rhs3
)
4527 || ! types_compatible_p (bitsizetype
, TREE_TYPE (rhs3
))
4528 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type
)))
4530 error ("invalid position or size in %qs", code_name
);
4533 if (INTEGRAL_TYPE_P (rhs1_type
)
4534 && !type_has_mode_precision_p (rhs1_type
))
4536 error ("%qs into non-mode-precision operand", code_name
);
4539 if (INTEGRAL_TYPE_P (rhs1_type
))
4541 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4542 if (bitpos
>= TYPE_PRECISION (rhs1_type
)
4543 || (bitpos
+ TYPE_PRECISION (rhs2_type
)
4544 > TYPE_PRECISION (rhs1_type
)))
4546 error ("insertion out of range in %qs", code_name
);
4550 else if (VECTOR_TYPE_P (rhs1_type
))
4552 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4553 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (TYPE_SIZE (rhs2_type
));
4554 if (bitpos
% bitsize
!= 0)
4556 error ("%qs not at element boundary", code_name
);
4564 if (((TREE_CODE (rhs1_type
) != VECTOR_TYPE
4565 || TREE_CODE (lhs_type
) != VECTOR_TYPE
)
4566 && ((!INTEGRAL_TYPE_P (rhs1_type
)
4567 && !SCALAR_FLOAT_TYPE_P (rhs1_type
))
4568 || (!INTEGRAL_TYPE_P (lhs_type
)
4569 && !SCALAR_FLOAT_TYPE_P (lhs_type
))))
4570 /* rhs1_type and rhs2_type may differ in sign. */
4571 || !tree_nop_conversion_p (rhs1_type
, rhs2_type
)
4572 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4573 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type
)),
4574 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4576 error ("type mismatch in %qs", code_name
);
4577 debug_generic_expr (lhs_type
);
4578 debug_generic_expr (rhs1_type
);
4579 debug_generic_expr (rhs2_type
);
4585 case REALIGN_LOAD_EXPR
:
4595 /* Verify a gimple assignment statement STMT with a single rhs.
4596 Returns true if anything is wrong. */
4599 verify_gimple_assign_single (gassign
*stmt
)
4601 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4602 tree lhs
= gimple_assign_lhs (stmt
);
4603 tree lhs_type
= TREE_TYPE (lhs
);
4604 tree rhs1
= gimple_assign_rhs1 (stmt
);
4605 tree rhs1_type
= TREE_TYPE (rhs1
);
4608 const char* const code_name
= get_tree_code_name (rhs_code
);
4610 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
4612 error ("non-trivial conversion in %qs", code_name
);
4613 debug_generic_expr (lhs_type
);
4614 debug_generic_expr (rhs1_type
);
4618 if (gimple_clobber_p (stmt
)
4619 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
4621 error ("%qs LHS in clobber statement",
4622 get_tree_code_name (TREE_CODE (lhs
)));
4623 debug_generic_expr (lhs
);
4627 if (TREE_CODE (lhs
) == WITH_SIZE_EXPR
)
4629 error ("%qs LHS in assignment statement",
4630 get_tree_code_name (TREE_CODE (lhs
)));
4631 debug_generic_expr (lhs
);
4635 if (handled_component_p (lhs
)
4636 || TREE_CODE (lhs
) == MEM_REF
4637 || TREE_CODE (lhs
) == TARGET_MEM_REF
)
4638 res
|= verify_types_in_gimple_reference (lhs
, true);
4640 /* Special codes we cannot handle via their class. */
4645 tree op
= TREE_OPERAND (rhs1
, 0);
4646 if (!is_gimple_addressable (op
))
4648 error ("invalid operand in %qs", code_name
);
4652 /* Technically there is no longer a need for matching types, but
4653 gimple hygiene asks for this check. In LTO we can end up
4654 combining incompatible units and thus end up with addresses
4655 of globals that change their type to a common one. */
4657 && !types_compatible_p (TREE_TYPE (op
),
4658 TREE_TYPE (TREE_TYPE (rhs1
)))
4659 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
4662 error ("type mismatch in %qs", code_name
);
4663 debug_generic_stmt (TREE_TYPE (rhs1
));
4664 debug_generic_stmt (TREE_TYPE (op
));
4668 return (verify_address (rhs1
, true)
4669 || verify_types_in_gimple_reference (op
, true));
4674 error ("%qs in gimple IL", code_name
);
4677 case WITH_SIZE_EXPR
:
4678 if (!is_gimple_val (TREE_OPERAND (rhs1
, 1)))
4680 error ("invalid %qs size argument in load", code_name
);
4681 debug_generic_stmt (lhs
);
4682 debug_generic_stmt (rhs1
);
4685 rhs1
= TREE_OPERAND (rhs1
, 0);
4690 case ARRAY_RANGE_REF
:
4691 case VIEW_CONVERT_EXPR
:
4694 case TARGET_MEM_REF
:
4696 if (!is_gimple_reg (lhs
)
4697 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4699 error ("invalid RHS for gimple memory store: %qs", code_name
);
4700 debug_generic_stmt (lhs
);
4701 debug_generic_stmt (rhs1
);
4704 return res
|| verify_types_in_gimple_reference (rhs1
, false);
4716 /* tcc_declaration */
4721 if (!is_gimple_reg (lhs
)
4722 && !is_gimple_reg (rhs1
)
4723 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4725 error ("invalid RHS for gimple memory store: %qs", code_name
);
4726 debug_generic_stmt (lhs
);
4727 debug_generic_stmt (rhs1
);
4733 if (VECTOR_TYPE_P (rhs1_type
))
4736 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
4738 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
4740 /* For vector CONSTRUCTORs we require that either it is empty
4741 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4742 (then the element count must be correct to cover the whole
4743 outer vector and index must be NULL on all elements, or it is
4744 a CONSTRUCTOR of scalar elements, where we as an exception allow
4745 smaller number of elements (assuming zero filling) and
4746 consecutive indexes as compared to NULL indexes (such
4747 CONSTRUCTORs can appear in the IL from FEs). */
4748 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4750 if (elt_t
== NULL_TREE
)
4752 elt_t
= TREE_TYPE (elt_v
);
4753 if (VECTOR_TYPE_P (elt_t
))
4755 tree elt_t
= TREE_TYPE (elt_v
);
4756 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4759 error ("incorrect type of vector %qs elements",
4761 debug_generic_stmt (rhs1
);
4764 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1
)
4765 * TYPE_VECTOR_SUBPARTS (elt_t
),
4766 TYPE_VECTOR_SUBPARTS (rhs1_type
)))
4768 error ("incorrect number of vector %qs elements",
4770 debug_generic_stmt (rhs1
);
4774 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4777 error ("incorrect type of vector %qs elements",
4779 debug_generic_stmt (rhs1
);
4782 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1
),
4783 TYPE_VECTOR_SUBPARTS (rhs1_type
)))
4785 error ("incorrect number of vector %qs elements",
4787 debug_generic_stmt (rhs1
);
4791 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4793 error ("incorrect type of vector CONSTRUCTOR elements");
4794 debug_generic_stmt (rhs1
);
4797 if (elt_i
!= NULL_TREE
4798 && (VECTOR_TYPE_P (elt_t
)
4799 || TREE_CODE (elt_i
) != INTEGER_CST
4800 || compare_tree_int (elt_i
, i
) != 0))
4802 error ("vector %qs with non-NULL element index",
4804 debug_generic_stmt (rhs1
);
4807 if (!is_gimple_val (elt_v
))
4809 error ("vector %qs element is not a GIMPLE value",
4811 debug_generic_stmt (rhs1
);
4816 else if (CONSTRUCTOR_NELTS (rhs1
) != 0)
4818 error ("non-vector %qs with elements", code_name
);
4819 debug_generic_stmt (rhs1
);
4834 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4835 is a problem, otherwise false. */
4838 verify_gimple_assign (gassign
*stmt
)
4840 if (gimple_assign_nontemporal_move_p (stmt
))
4842 tree lhs
= gimple_assign_lhs (stmt
);
4843 if (is_gimple_reg (lhs
))
4845 error ("nontemporal store lhs cannot be a gimple register");
4846 debug_generic_stmt (lhs
);
4851 switch (gimple_assign_rhs_class (stmt
))
4853 case GIMPLE_SINGLE_RHS
:
4854 return verify_gimple_assign_single (stmt
);
4856 case GIMPLE_UNARY_RHS
:
4857 return verify_gimple_assign_unary (stmt
);
4859 case GIMPLE_BINARY_RHS
:
4860 return verify_gimple_assign_binary (stmt
);
4862 case GIMPLE_TERNARY_RHS
:
4863 return verify_gimple_assign_ternary (stmt
);
4870 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4871 is a problem, otherwise false. */
4874 verify_gimple_return (greturn
*stmt
)
4876 tree op
= gimple_return_retval (stmt
);
4877 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4879 /* We cannot test for present return values as we do not fix up missing
4880 return values from the original source. */
4884 if (!is_gimple_val (op
)
4885 && TREE_CODE (op
) != RESULT_DECL
)
4887 error ("invalid operand in return statement");
4888 debug_generic_stmt (op
);
4892 if ((TREE_CODE (op
) == RESULT_DECL
4893 && DECL_BY_REFERENCE (op
))
4894 || (TREE_CODE (op
) == SSA_NAME
4895 && SSA_NAME_VAR (op
)
4896 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4897 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4898 op
= TREE_TYPE (op
);
4900 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4902 error ("invalid conversion in return statement");
4903 debug_generic_stmt (restype
);
4904 debug_generic_stmt (TREE_TYPE (op
));
4912 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4913 is a problem, otherwise false. */
4916 verify_gimple_goto (ggoto
*stmt
)
4918 tree dest
= gimple_goto_dest (stmt
);
4920 /* ??? We have two canonical forms of direct goto destinations, a
4921 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4922 if (TREE_CODE (dest
) != LABEL_DECL
4923 && (!is_gimple_val (dest
)
4924 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4926 error ("goto destination is neither a label nor a pointer");
4933 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4934 is a problem, otherwise false. */
4937 verify_gimple_switch (gswitch
*stmt
)
4940 tree elt
, prev_upper_bound
= NULL_TREE
;
4941 tree index_type
, elt_type
= NULL_TREE
;
4943 if (!is_gimple_val (gimple_switch_index (stmt
)))
4945 error ("invalid operand to switch statement");
4946 debug_generic_stmt (gimple_switch_index (stmt
));
4950 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4951 if (! INTEGRAL_TYPE_P (index_type
))
4953 error ("non-integral type switch statement");
4954 debug_generic_expr (index_type
);
4958 elt
= gimple_switch_label (stmt
, 0);
4959 if (CASE_LOW (elt
) != NULL_TREE
4960 || CASE_HIGH (elt
) != NULL_TREE
4961 || CASE_CHAIN (elt
) != NULL_TREE
)
4963 error ("invalid default case label in switch statement");
4964 debug_generic_expr (elt
);
4968 n
= gimple_switch_num_labels (stmt
);
4969 for (i
= 1; i
< n
; i
++)
4971 elt
= gimple_switch_label (stmt
, i
);
4973 if (CASE_CHAIN (elt
))
4975 error ("invalid %<CASE_CHAIN%>");
4976 debug_generic_expr (elt
);
4979 if (! CASE_LOW (elt
))
4981 error ("invalid case label in switch statement");
4982 debug_generic_expr (elt
);
4986 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4988 error ("invalid case range in switch statement");
4989 debug_generic_expr (elt
);
4995 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4996 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4998 error ("type precision mismatch in switch statement");
5002 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
5003 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
5005 error ("type mismatch for case label in switch statement");
5006 debug_generic_expr (elt
);
5010 if (prev_upper_bound
)
5012 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
5014 error ("case labels not sorted in switch statement");
5019 prev_upper_bound
= CASE_HIGH (elt
);
5020 if (! prev_upper_bound
)
5021 prev_upper_bound
= CASE_LOW (elt
);
5027 /* Verify a gimple debug statement STMT.
5028 Returns true if anything is wrong. */
5031 verify_gimple_debug (gimple
*stmt ATTRIBUTE_UNUSED
)
5033 /* There isn't much that could be wrong in a gimple debug stmt. A
5034 gimple debug bind stmt, for example, maps a tree, that's usually
5035 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
5036 component or member of an aggregate type, to another tree, that
5037 can be an arbitrary expression. These stmts expand into debug
5038 insns, and are converted to debug notes by var-tracking.cc. */
5042 /* Verify a gimple label statement STMT.
5043 Returns true if anything is wrong. */
5046 verify_gimple_label (glabel
*stmt
)
5048 tree decl
= gimple_label_label (stmt
);
5052 if (TREE_CODE (decl
) != LABEL_DECL
)
5054 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
5055 && DECL_CONTEXT (decl
) != current_function_decl
)
5057 error ("label context is not the current function declaration");
5061 uid
= LABEL_DECL_UID (decl
);
5064 || (*label_to_block_map_for_fn (cfun
))[uid
] != gimple_bb (stmt
)))
5066 error ("incorrect entry in %<label_to_block_map%>");
5070 uid
= EH_LANDING_PAD_NR (decl
);
5073 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
5074 if (decl
!= lp
->post_landing_pad
)
5076 error ("incorrect setting of landing pad number");
5084 /* Verify a gimple cond statement STMT.
5085 Returns true if anything is wrong. */
5088 verify_gimple_cond (gcond
*stmt
)
5090 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
5092 error ("invalid comparison code in gimple cond");
5095 if (!(!gimple_cond_true_label (stmt
)
5096 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
5097 || !(!gimple_cond_false_label (stmt
)
5098 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
5100 error ("invalid labels in gimple cond");
5104 return verify_gimple_comparison (boolean_type_node
,
5105 gimple_cond_lhs (stmt
),
5106 gimple_cond_rhs (stmt
),
5107 gimple_cond_code (stmt
));
5110 /* Verify the GIMPLE statement STMT. Returns true if there is an
5111 error, otherwise false. */
5114 verify_gimple_stmt (gimple
*stmt
)
5116 switch (gimple_code (stmt
))
5119 return verify_gimple_assign (as_a
<gassign
*> (stmt
));
5122 return verify_gimple_label (as_a
<glabel
*> (stmt
));
5125 return verify_gimple_call (as_a
<gcall
*> (stmt
));
5128 return verify_gimple_cond (as_a
<gcond
*> (stmt
));
5131 return verify_gimple_goto (as_a
<ggoto
*> (stmt
));
5134 return verify_gimple_switch (as_a
<gswitch
*> (stmt
));
5137 return verify_gimple_return (as_a
<greturn
*> (stmt
));
5142 case GIMPLE_TRANSACTION
:
5143 return verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
5145 /* Tuples that do not have tree operands. */
5147 case GIMPLE_PREDICT
:
5149 case GIMPLE_EH_DISPATCH
:
5150 case GIMPLE_EH_MUST_NOT_THROW
:
5154 /* OpenMP directives are validated by the FE and never operated
5155 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
5156 non-gimple expressions when the main index variable has had
5157 its address taken. This does not affect the loop itself
5158 because the header of an GIMPLE_OMP_FOR is merely used to determine
5159 how to setup the parallel iteration. */
5166 return verify_gimple_debug (stmt
);
5173 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
5174 and false otherwise. */
5177 verify_gimple_phi (gphi
*phi
)
5181 tree phi_result
= gimple_phi_result (phi
);
5186 error ("invalid %<PHI%> result");
5190 virtual_p
= virtual_operand_p (phi_result
);
5191 if (TREE_CODE (phi_result
) != SSA_NAME
5193 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
5195 error ("invalid %<PHI%> result");
5199 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5201 tree t
= gimple_phi_arg_def (phi
, i
);
5205 error ("missing %<PHI%> def");
5209 /* Addressable variables do have SSA_NAMEs but they
5210 are not considered gimple values. */
5211 else if ((TREE_CODE (t
) == SSA_NAME
5212 && virtual_p
!= virtual_operand_p (t
))
5214 && (TREE_CODE (t
) != SSA_NAME
5215 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
5217 && !is_gimple_val (t
)))
5219 error ("invalid %<PHI%> argument");
5220 debug_generic_expr (t
);
5223 #ifdef ENABLE_TYPES_CHECKING
5224 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
5226 error ("incompatible types in %<PHI%> argument %u", i
);
5227 debug_generic_stmt (TREE_TYPE (phi_result
));
5228 debug_generic_stmt (TREE_TYPE (t
));
5237 /* Verify the GIMPLE statements inside the sequence STMTS. */
5240 verify_gimple_in_seq_2 (gimple_seq stmts
)
5242 gimple_stmt_iterator ittr
;
5245 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
5247 gimple
*stmt
= gsi_stmt (ittr
);
5249 switch (gimple_code (stmt
))
5252 err
|= verify_gimple_in_seq_2 (
5253 gimple_bind_body (as_a
<gbind
*> (stmt
)));
5257 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
5258 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
5261 case GIMPLE_EH_FILTER
:
5262 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
5265 case GIMPLE_EH_ELSE
:
5267 geh_else
*eh_else
= as_a
<geh_else
*> (stmt
);
5268 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else
));
5269 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else
));
5274 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (
5275 as_a
<gcatch
*> (stmt
)));
5279 err
|= verify_gimple_in_seq_2 (gimple_assume_body (stmt
));
5282 case GIMPLE_TRANSACTION
:
5283 err
|= verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
5288 bool err2
= verify_gimple_stmt (stmt
);
5290 debug_gimple_stmt (stmt
);
5299 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5300 is a problem, otherwise false. */
5303 verify_gimple_transaction (gtransaction
*stmt
)
5307 lab
= gimple_transaction_label_norm (stmt
);
5308 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5310 lab
= gimple_transaction_label_uninst (stmt
);
5311 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5313 lab
= gimple_transaction_label_over (stmt
);
5314 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5317 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
5321 /* Verify the GIMPLE statements inside the statement list STMTS. */
5324 verify_gimple_in_seq (gimple_seq stmts
, bool ice
)
5326 timevar_push (TV_TREE_STMT_VERIFY
);
5327 bool res
= verify_gimple_in_seq_2 (stmts
);
5329 internal_error ("%<verify_gimple%> failed");
5330 timevar_pop (TV_TREE_STMT_VERIFY
);
5334 /* Return true when the T can be shared. */
5337 tree_node_can_be_shared (tree t
)
5339 if (IS_TYPE_OR_DECL_P (t
)
5340 || TREE_CODE (t
) == SSA_NAME
5341 || TREE_CODE (t
) == IDENTIFIER_NODE
5342 || TREE_CODE (t
) == CASE_LABEL_EXPR
5343 || is_gimple_min_invariant (t
))
5346 if (t
== error_mark_node
)
5352 /* Called via walk_tree. Verify tree sharing. */
5355 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
5357 hash_set
<void *> *visited
= (hash_set
<void *> *) data
;
5359 if (tree_node_can_be_shared (*tp
))
5361 *walk_subtrees
= false;
5365 if (visited
->add (*tp
))
5371 /* Called via walk_gimple_stmt. Verify tree sharing. */
5374 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
5376 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5377 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
5380 static bool eh_error_found
;
5382 verify_eh_throw_stmt_node (gimple
*const &stmt
, const int &,
5383 hash_set
<gimple
*> *visited
)
5385 if (!visited
->contains (stmt
))
5387 error ("dead statement in EH table");
5388 debug_gimple_stmt (stmt
);
5389 eh_error_found
= true;
5394 /* Verify if the location LOCs block is in BLOCKS. */
5397 verify_location (hash_set
<tree
> *blocks
, location_t loc
)
5399 tree block
= LOCATION_BLOCK (loc
);
5400 if (block
!= NULL_TREE
5401 && !blocks
->contains (block
))
5403 error ("location references block not in block tree");
5406 if (block
!= NULL_TREE
)
5407 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
5411 /* Called via walk_tree. Verify that expressions have no blocks. */
5414 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
5418 *walk_subtrees
= false;
5422 location_t loc
= EXPR_LOCATION (*tp
);
5423 if (LOCATION_BLOCK (loc
) != NULL
)
5429 /* Called via walk_tree. Verify locations of expressions. */
5432 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
5434 hash_set
<tree
> *blocks
= (hash_set
<tree
> *) data
;
5437 /* ??? This doesn't really belong here but there's no good place to
5438 stick this remainder of old verify_expr. */
5439 /* ??? This barfs on debug stmts which contain binds to vars with
5440 different function context. */
5443 || TREE_CODE (t
) == PARM_DECL
5444 || TREE_CODE (t
) == RESULT_DECL
)
5446 tree context
= decl_function_context (t
);
5447 if (context
!= cfun
->decl
5448 && !SCOPE_FILE_SCOPE_P (context
)
5450 && !DECL_EXTERNAL (t
))
5452 error ("local declaration from a different function");
5458 if (VAR_P (t
) && DECL_HAS_DEBUG_EXPR_P (t
))
5460 tree x
= DECL_DEBUG_EXPR (t
);
5461 tree addr
= walk_tree (&x
, verify_expr_no_block
, NULL
, NULL
);
5466 || TREE_CODE (t
) == PARM_DECL
5467 || TREE_CODE (t
) == RESULT_DECL
)
5468 && DECL_HAS_VALUE_EXPR_P (t
))
5470 tree x
= DECL_VALUE_EXPR (t
);
5471 tree addr
= walk_tree (&x
, verify_expr_no_block
, NULL
, NULL
);
5478 *walk_subtrees
= false;
5482 location_t loc
= EXPR_LOCATION (t
);
5483 if (verify_location (blocks
, loc
))
5489 /* Called via walk_gimple_op. Verify locations of expressions. */
5492 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
5494 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5495 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
5498 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5501 collect_subblocks (hash_set
<tree
> *blocks
, tree block
)
5504 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
5507 collect_subblocks (blocks
, t
);
5511 /* Disable warnings about missing quoting in GCC diagnostics for
5512 the verification errors. Their format strings don't follow
5513 GCC diagnostic conventions and trigger an ICE in the end. */
5515 # pragma GCC diagnostic push
5516 # pragma GCC diagnostic ignored "-Wformat-diag"
5519 /* Verify the GIMPLE statements in the CFG of FN. */
5522 verify_gimple_in_cfg (struct function
*fn
, bool verify_nothrow
, bool ice
)
5527 timevar_push (TV_TREE_STMT_VERIFY
);
5528 hash_set
<void *> visited
;
5529 hash_set
<gimple
*> visited_throwing_stmts
;
5531 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5532 hash_set
<tree
> blocks
;
5533 if (DECL_INITIAL (fn
->decl
))
5535 blocks
.add (DECL_INITIAL (fn
->decl
));
5536 collect_subblocks (&blocks
, DECL_INITIAL (fn
->decl
));
5539 FOR_EACH_BB_FN (bb
, fn
)
5541 gimple_stmt_iterator gsi
;
5545 for (gphi_iterator gpi
= gsi_start_phis (bb
);
5549 gphi
*phi
= gpi
.phi ();
5553 if (gimple_bb (phi
) != bb
)
5555 error ("gimple_bb (phi) is set to a wrong basic block");
5559 err2
|= verify_gimple_phi (phi
);
5561 /* Only PHI arguments have locations. */
5562 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
5564 error ("PHI node with location");
5568 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5570 tree arg
= gimple_phi_arg_def (phi
, i
);
5571 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
5575 error ("incorrect sharing of tree nodes");
5576 debug_generic_expr (addr
);
5579 location_t loc
= gimple_phi_arg_location (phi
, i
);
5580 if (virtual_operand_p (gimple_phi_result (phi
))
5581 && loc
!= UNKNOWN_LOCATION
)
5583 error ("virtual PHI with argument locations");
5586 addr
= walk_tree (&arg
, verify_expr_location_1
, &blocks
, NULL
);
5589 debug_generic_expr (addr
);
5592 err2
|= verify_location (&blocks
, loc
);
5596 debug_gimple_stmt (phi
);
5600 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5602 gimple
*stmt
= gsi_stmt (gsi
);
5604 struct walk_stmt_info wi
;
5608 if (gimple_bb (stmt
) != bb
)
5610 error ("gimple_bb (stmt) is set to a wrong basic block");
5614 err2
|= verify_gimple_stmt (stmt
);
5615 err2
|= verify_location (&blocks
, gimple_location (stmt
));
5617 memset (&wi
, 0, sizeof (wi
));
5618 wi
.info
= (void *) &visited
;
5619 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
5622 error ("incorrect sharing of tree nodes");
5623 debug_generic_expr (addr
);
5627 memset (&wi
, 0, sizeof (wi
));
5628 wi
.info
= (void *) &blocks
;
5629 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
5632 debug_generic_expr (addr
);
5636 /* If the statement is marked as part of an EH region, then it is
5637 expected that the statement could throw. Verify that when we
5638 have optimizations that simplify statements such that we prove
5639 that they cannot throw, that we update other data structures
5641 lp_nr
= lookup_stmt_eh_lp (stmt
);
5643 visited_throwing_stmts
.add (stmt
);
5646 if (!stmt_could_throw_p (cfun
, stmt
))
5650 error ("statement marked for throw, but doesn%'t");
5654 else if (!gsi_one_before_end_p (gsi
))
5656 error ("statement marked for throw in middle of block");
5662 debug_gimple_stmt (stmt
);
5666 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5667 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
5668 err
|= verify_location (&blocks
, e
->goto_locus
);
5671 hash_map
<gimple
*, int> *eh_table
= get_eh_throw_stmt_table (cfun
);
5672 eh_error_found
= false;
5674 eh_table
->traverse
<hash_set
<gimple
*> *, verify_eh_throw_stmt_node
>
5675 (&visited_throwing_stmts
);
5677 if (ice
&& (err
|| eh_error_found
))
5678 internal_error ("verify_gimple failed");
5680 verify_histograms ();
5681 timevar_pop (TV_TREE_STMT_VERIFY
);
5683 return (err
|| eh_error_found
);
5687 /* Verifies that the flow information is OK. */
5690 gimple_verify_flow_info (void)
5694 gimple_stmt_iterator gsi
;
5699 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5700 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5702 error ("ENTRY_BLOCK has IL associated with it");
5706 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5707 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5709 error ("EXIT_BLOCK has IL associated with it");
5713 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5714 if (e
->flags
& EDGE_FALLTHRU
)
5716 error ("fallthru to exit from bb %d", e
->src
->index
);
5719 if (cfun
->cfg
->full_profile
5720 && !ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
.initialized_p ())
5722 error ("entry block count not initialized");
5725 if (cfun
->cfg
->full_profile
5726 && !EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
.initialized_p ())
5728 error ("exit block count not initialized");
5731 if (cfun
->cfg
->full_profile
5732 && !single_succ_edge
5733 (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->probability
.initialized_p ())
5735 error ("probability of edge from entry block not initialized");
5740 FOR_EACH_BB_FN (bb
, cfun
)
5742 bool found_ctrl_stmt
= false;
5746 if (cfun
->cfg
->full_profile
)
5748 if (!bb
->count
.initialized_p ())
5750 error ("count of bb %d not initialized", bb
->index
);
5753 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5754 if (!e
->probability
.initialized_p ())
5756 error ("probability of edge %d->%d not initialized",
5757 bb
->index
, e
->dest
->index
);
5762 /* Skip labels on the start of basic block. */
5763 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5766 gimple
*prev_stmt
= stmt
;
5768 stmt
= gsi_stmt (gsi
);
5770 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5773 label
= gimple_label_label (as_a
<glabel
*> (stmt
));
5774 if (prev_stmt
&& DECL_NONLOCAL (label
))
5776 error ("nonlocal label %qD is not first in a sequence "
5777 "of labels in bb %d", label
, bb
->index
);
5781 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
5783 error ("EH landing pad label %qD is not first in a sequence "
5784 "of labels in bb %d", label
, bb
->index
);
5788 if (label_to_block (cfun
, label
) != bb
)
5790 error ("label %qD to block does not match in bb %d",
5795 if (decl_function_context (label
) != current_function_decl
)
5797 error ("label %qD has incorrect context in bb %d",
5803 /* Verify that body of basic block BB is free of control flow. */
5804 bool seen_nondebug_stmt
= false;
5805 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5807 gimple
*stmt
= gsi_stmt (gsi
);
5809 /* Do NOT disregard debug stmts after found_ctrl_stmt. */
5810 if (found_ctrl_stmt
)
5812 error ("control flow in the middle of basic block %d",
5817 if (stmt_ends_bb_p (stmt
))
5818 found_ctrl_stmt
= true;
5820 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
5822 error ("label %qD in the middle of basic block %d",
5823 gimple_label_label (label_stmt
), bb
->index
);
5827 /* Check that no statements appear between a returns_twice call
5828 and its associated abnormal edge. */
5829 if (gimple_code (stmt
) == GIMPLE_CALL
5830 && gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
5832 bool misplaced
= false;
5833 /* TM is an exception: it points abnormal edges just after the
5834 call that starts a transaction, i.e. it must end the BB. */
5835 if (gimple_call_builtin_p (stmt
, BUILT_IN_TM_START
))
5837 if (single_succ_p (bb
)
5838 && bb_has_abnormal_pred (single_succ (bb
))
5839 && !gsi_one_nondebug_before_end_p (gsi
))
5841 error ("returns_twice call is not last in basic block "
5848 if (seen_nondebug_stmt
&& bb_has_abnormal_pred (bb
))
5850 error ("returns_twice call is not first in basic block "
5857 print_gimple_stmt (stderr
, stmt
, 0, TDF_SLIM
);
5861 if (!is_gimple_debug (stmt
))
5862 seen_nondebug_stmt
= true;
5865 gsi
= gsi_last_nondebug_bb (bb
);
5866 if (gsi_end_p (gsi
))
5869 stmt
= gsi_stmt (gsi
);
5871 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5874 if (verify_eh_edges (stmt
))
5877 if (is_ctrl_stmt (stmt
))
5879 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5880 if (e
->flags
& EDGE_FALLTHRU
)
5882 error ("fallthru edge after a control statement in bb %d",
5888 if (gimple_code (stmt
) != GIMPLE_COND
)
5890 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5891 after anything else but if statement. */
5892 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5893 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5895 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5901 switch (gimple_code (stmt
))
5908 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5912 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5913 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5914 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5915 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5916 || EDGE_COUNT (bb
->succs
) >= 3)
5918 error ("wrong outgoing edge flags at end of bb %d",
5926 if (simple_goto_p (stmt
))
5928 error ("explicit goto at end of bb %d", bb
->index
);
5933 /* FIXME. We should double check that the labels in the
5934 destination blocks have their address taken. */
5935 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5936 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5937 | EDGE_FALSE_VALUE
))
5938 || !(e
->flags
& EDGE_ABNORMAL
))
5940 error ("wrong outgoing edge flags at end of bb %d",
5948 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5952 if (!single_succ_p (bb
)
5953 || (single_succ_edge (bb
)->flags
5954 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5955 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5957 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5960 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5962 error ("return edge does not point to exit in bb %d",
5970 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5975 n
= gimple_switch_num_labels (switch_stmt
);
5977 /* Mark all the destination basic blocks. */
5978 for (i
= 0; i
< n
; ++i
)
5980 basic_block label_bb
= gimple_switch_label_bb (cfun
, switch_stmt
, i
);
5981 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5982 label_bb
->aux
= (void *)1;
5985 /* Verify that the case labels are sorted. */
5986 prev
= gimple_switch_label (switch_stmt
, 0);
5987 for (i
= 1; i
< n
; ++i
)
5989 tree c
= gimple_switch_label (switch_stmt
, i
);
5992 error ("found default case not at the start of "
5998 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
6000 error ("case labels not sorted: ");
6001 print_generic_expr (stderr
, prev
);
6002 fprintf (stderr
," is greater than ");
6003 print_generic_expr (stderr
, c
);
6004 fprintf (stderr
," but comes before it.\n");
6009 /* VRP will remove the default case if it can prove it will
6010 never be executed. So do not verify there always exists
6011 a default case here. */
6013 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6017 error ("extra outgoing edge %d->%d",
6018 bb
->index
, e
->dest
->index
);
6022 e
->dest
->aux
= (void *)2;
6023 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
6024 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
6026 error ("wrong outgoing edge flags at end of bb %d",
6032 /* Check that we have all of them. */
6033 for (i
= 0; i
< n
; ++i
)
6035 basic_block label_bb
= gimple_switch_label_bb (cfun
,
6038 if (label_bb
->aux
!= (void *)2)
6040 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
6045 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6046 e
->dest
->aux
= (void *)0;
6050 case GIMPLE_EH_DISPATCH
:
6051 if (verify_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
)))
6060 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
6061 verify_dominators (CDI_DOMINATORS
);
6067 # pragma GCC diagnostic pop
6070 /* Updates phi nodes after creating a forwarder block joined
6071 by edge FALLTHRU. */
6074 gimple_make_forwarder_block (edge fallthru
)
6078 basic_block dummy
, bb
;
6081 bool forward_location_p
;
6083 dummy
= fallthru
->src
;
6084 bb
= fallthru
->dest
;
6086 if (single_pred_p (bb
))
6089 /* We can forward location info if we have only one predecessor. */
6090 forward_location_p
= single_pred_p (dummy
);
6092 /* If we redirected a branch we must create new PHI nodes at the
6094 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6096 gphi
*phi
, *new_phi
;
6099 var
= gimple_phi_result (phi
);
6100 new_phi
= create_phi_node (var
, bb
);
6101 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
6102 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
6104 ? gimple_phi_arg_location (phi
, 0) : UNKNOWN_LOCATION
);
6107 /* Add the arguments we have stored on edges. */
6108 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6113 flush_pending_stmts (e
);
6118 /* Return a non-special label in the head of basic block BLOCK.
6119 Create one if it doesn't exist. */
6122 gimple_block_label (basic_block bb
)
6124 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
6129 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
6131 stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
6134 label
= gimple_label_label (stmt
);
6135 if (!DECL_NONLOCAL (label
))
6138 gsi_move_before (&i
, &s
);
6143 label
= create_artificial_label (UNKNOWN_LOCATION
);
6144 stmt
= gimple_build_label (label
);
6145 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
6150 /* Attempt to perform edge redirection by replacing a possibly complex
6151 jump instruction by a goto or by removing the jump completely.
6152 This can apply only if all edges now point to the same block. The
6153 parameters and return values are equivalent to
6154 redirect_edge_and_branch. */
6157 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
6159 basic_block src
= e
->src
;
6160 gimple_stmt_iterator i
;
6163 /* We can replace or remove a complex jump only when we have exactly
6165 if (EDGE_COUNT (src
->succs
) != 2
6166 /* Verify that all targets will be TARGET. Specifically, the
6167 edge that is not E must also go to TARGET. */
6168 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
6171 i
= gsi_last_bb (src
);
6175 stmt
= gsi_stmt (i
);
6177 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
6179 gsi_remove (&i
, true);
6180 e
= ssa_redirect_edge (e
, target
);
6181 e
->flags
= EDGE_FALLTHRU
;
6189 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
6190 edge representing the redirected branch. */
6193 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
6195 basic_block bb
= e
->src
;
6196 gimple_stmt_iterator gsi
;
6200 if (e
->flags
& EDGE_ABNORMAL
)
6203 if (e
->dest
== dest
)
6206 if (e
->flags
& EDGE_EH
)
6207 return redirect_eh_edge (e
, dest
);
6209 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
6211 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
6216 gsi
= gsi_last_nondebug_bb (bb
);
6217 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
6219 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
6222 /* For COND_EXPR, we only need to redirect the edge. */
6226 /* No non-abnormal edges should lead from a non-simple goto, and
6227 simple ones should be represented implicitly. */
6232 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
6233 tree label
= gimple_block_label (dest
);
6234 tree cases
= get_cases_for_edge (e
, switch_stmt
);
6236 /* If we have a list of cases associated with E, then use it
6237 as it's a lot faster than walking the entire case vector. */
6240 edge e2
= find_edge (e
->src
, dest
);
6247 CASE_LABEL (cases
) = label
;
6248 cases
= CASE_CHAIN (cases
);
6251 /* If there was already an edge in the CFG, then we need
6252 to move all the cases associated with E to E2. */
6255 tree cases2
= get_cases_for_edge (e2
, switch_stmt
);
6257 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
6258 CASE_CHAIN (cases2
) = first
;
6260 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
6264 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
6266 for (i
= 0; i
< n
; i
++)
6268 tree elt
= gimple_switch_label (switch_stmt
, i
);
6269 if (label_to_block (cfun
, CASE_LABEL (elt
)) == e
->dest
)
6270 CASE_LABEL (elt
) = label
;
6278 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
6279 int i
, n
= gimple_asm_nlabels (asm_stmt
);
6282 for (i
= 0; i
< n
; ++i
)
6284 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
6285 if (label_to_block (cfun
, TREE_VALUE (cons
)) == e
->dest
)
6288 label
= gimple_block_label (dest
);
6289 TREE_VALUE (cons
) = label
;
6293 /* If we didn't find any label matching the former edge in the
6294 asm labels, we must be redirecting the fallthrough
6296 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
6301 gsi_remove (&gsi
, true);
6302 e
->flags
|= EDGE_FALLTHRU
;
6305 case GIMPLE_OMP_RETURN
:
6306 case GIMPLE_OMP_CONTINUE
:
6307 case GIMPLE_OMP_SECTIONS_SWITCH
:
6308 case GIMPLE_OMP_FOR
:
6309 /* The edges from OMP constructs can be simply redirected. */
6312 case GIMPLE_EH_DISPATCH
:
6313 if (!(e
->flags
& EDGE_FALLTHRU
))
6314 redirect_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
), e
, dest
);
6317 case GIMPLE_TRANSACTION
:
6318 if (e
->flags
& EDGE_TM_ABORT
)
6319 gimple_transaction_set_label_over (as_a
<gtransaction
*> (stmt
),
6320 gimple_block_label (dest
));
6321 else if (e
->flags
& EDGE_TM_UNINSTRUMENTED
)
6322 gimple_transaction_set_label_uninst (as_a
<gtransaction
*> (stmt
),
6323 gimple_block_label (dest
));
6325 gimple_transaction_set_label_norm (as_a
<gtransaction
*> (stmt
),
6326 gimple_block_label (dest
));
6330 /* Otherwise it must be a fallthru edge, and we don't need to
6331 do anything besides redirecting it. */
6332 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
6336 /* Update/insert PHI nodes as necessary. */
6338 /* Now update the edges in the CFG. */
6339 e
= ssa_redirect_edge (e
, dest
);
6344 /* Returns true if it is possible to remove edge E by redirecting
6345 it to the destination of the other edge from E->src. */
6348 gimple_can_remove_branch_p (const_edge e
)
6350 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
6356 /* Simple wrapper, as we can always redirect fallthru edges. */
6359 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
6361 e
= gimple_redirect_edge_and_branch (e
, dest
);
6368 /* Splits basic block BB after statement STMT (but at least after the
6369 labels). If STMT is NULL, BB is split just after the labels. */
6372 gimple_split_block (basic_block bb
, void *stmt
)
6374 gimple_stmt_iterator gsi
;
6375 gimple_stmt_iterator gsi_tgt
;
6381 new_bb
= create_empty_bb (bb
);
6383 /* Redirect the outgoing edges. */
6384 new_bb
->succs
= bb
->succs
;
6386 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
6389 /* Get a stmt iterator pointing to the first stmt to move. */
6390 if (!stmt
|| gimple_code ((gimple
*) stmt
) == GIMPLE_LABEL
)
6391 gsi
= gsi_after_labels (bb
);
6394 gsi
= gsi_for_stmt ((gimple
*) stmt
);
6398 /* Move everything from GSI to the new basic block. */
6399 if (gsi_end_p (gsi
))
6402 /* Split the statement list - avoid re-creating new containers as this
6403 brings ugly quadratic memory consumption in the inliner.
6404 (We are still quadratic since we need to update stmt BB pointers,
6406 gsi_split_seq_before (&gsi
, &list
);
6407 set_bb_seq (new_bb
, list
);
6408 for (gsi_tgt
= gsi_start (list
);
6409 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
6410 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
6416 /* Moves basic block BB after block AFTER. */
6419 gimple_move_block_after (basic_block bb
, basic_block after
)
6421 if (bb
->prev_bb
== after
)
6425 link_block (bb
, after
);
6431 /* Return TRUE if block BB has no executable statements, otherwise return
6435 gimple_empty_block_p (basic_block bb
)
6437 /* BB must have no executable statements. */
6438 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
6441 while (!gsi_end_p (gsi
))
6443 gimple
*stmt
= gsi_stmt (gsi
);
6444 if (is_gimple_debug (stmt
))
6446 else if (gimple_code (stmt
) == GIMPLE_NOP
6447 || gimple_code (stmt
) == GIMPLE_PREDICT
)
6457 /* Split a basic block if it ends with a conditional branch and if the
6458 other part of the block is not empty. */
6461 gimple_split_block_before_cond_jump (basic_block bb
)
6463 gimple
*last
, *split_point
;
6464 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
6465 if (gsi_end_p (gsi
))
6467 last
= gsi_stmt (gsi
);
6468 if (gimple_code (last
) != GIMPLE_COND
6469 && gimple_code (last
) != GIMPLE_SWITCH
)
6472 split_point
= gsi_stmt (gsi
);
6473 return split_block (bb
, split_point
)->dest
;
6477 /* Return true if basic_block can be duplicated. */
6480 gimple_can_duplicate_bb_p (const_basic_block bb
)
6482 gimple
*last
= last_nondebug_stmt (CONST_CAST_BB (bb
));
6484 /* Do checks that can only fail for the last stmt, to minimize the work in the
6487 /* A transaction is a single entry multiple exit region. It
6488 must be duplicated in its entirety or not at all. */
6489 if (gimple_code (last
) == GIMPLE_TRANSACTION
)
6492 /* An IFN_UNIQUE call must be duplicated as part of its group,
6494 if (is_gimple_call (last
)
6495 && gimple_call_internal_p (last
)
6496 && gimple_call_internal_unique_p (last
))
6500 for (gimple_stmt_iterator gsi
= gsi_start_bb (CONST_CAST_BB (bb
));
6501 !gsi_end_p (gsi
); gsi_next (&gsi
))
6503 gimple
*g
= gsi_stmt (gsi
);
6505 /* Prohibit duplication of returns_twice calls, otherwise associated
6506 abnormal edges also need to be duplicated properly.
6507 An IFN_GOMP_SIMT_ENTER_ALLOC/IFN_GOMP_SIMT_EXIT call must be
6508 duplicated as part of its group, or not at all.
6509 The IFN_GOMP_SIMT_VOTE_ANY and IFN_GOMP_SIMT_XCHG_* are part of such a
6510 group, so the same holds there. */
6511 if (is_gimple_call (g
)
6512 && (gimple_call_flags (g
) & ECF_RETURNS_TWICE
6513 || gimple_call_internal_p (g
, IFN_GOMP_SIMT_ENTER_ALLOC
)
6514 || gimple_call_internal_p (g
, IFN_GOMP_SIMT_EXIT
)
6515 || gimple_call_internal_p (g
, IFN_GOMP_SIMT_VOTE_ANY
)
6516 || gimple_call_internal_p (g
, IFN_GOMP_SIMT_XCHG_BFLY
)
6517 || gimple_call_internal_p (g
, IFN_GOMP_SIMT_XCHG_IDX
)))
6524 /* Create a duplicate of the basic block BB. NOTE: This does not
6525 preserve SSA form. */
6528 gimple_duplicate_bb (basic_block bb
, copy_bb_data
*id
)
6531 gimple_stmt_iterator gsi_tgt
;
6533 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
6535 /* Copy the PHI nodes. We ignore PHI node arguments here because
6536 the incoming edges have not been setup yet. */
6537 for (gphi_iterator gpi
= gsi_start_phis (bb
);
6543 copy
= create_phi_node (NULL_TREE
, new_bb
);
6544 create_new_def_for (gimple_phi_result (phi
), copy
,
6545 gimple_phi_result_ptr (copy
));
6546 gimple_set_uid (copy
, gimple_uid (phi
));
6549 gsi_tgt
= gsi_start_bb (new_bb
);
6550 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
6554 def_operand_p def_p
;
6555 ssa_op_iter op_iter
;
6557 gimple
*stmt
, *copy
;
6559 stmt
= gsi_stmt (gsi
);
6560 if (gimple_code (stmt
) == GIMPLE_LABEL
)
6563 /* Don't duplicate label debug stmts. */
6564 if (gimple_debug_bind_p (stmt
)
6565 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
6569 /* Create a new copy of STMT and duplicate STMT's virtual
6571 copy
= gimple_copy (stmt
);
6572 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
6574 maybe_duplicate_eh_stmt (copy
, stmt
);
6575 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
6577 /* When copying around a stmt writing into a local non-user
6578 aggregate, make sure it won't share stack slot with other
6580 lhs
= gimple_get_lhs (stmt
);
6581 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
6583 tree base
= get_base_address (lhs
);
6585 && (VAR_P (base
) || TREE_CODE (base
) == RESULT_DECL
)
6586 && DECL_IGNORED_P (base
)
6587 && !TREE_STATIC (base
)
6588 && !DECL_EXTERNAL (base
)
6589 && (!VAR_P (base
) || !DECL_HAS_VALUE_EXPR_P (base
)))
6590 DECL_NONSHAREABLE (base
) = 1;
6593 /* If requested remap dependence info of cliques brought in
6596 for (unsigned i
= 0; i
< gimple_num_ops (copy
); ++i
)
6598 tree op
= gimple_op (copy
, i
);
6601 if (TREE_CODE (op
) == ADDR_EXPR
6602 || TREE_CODE (op
) == WITH_SIZE_EXPR
)
6603 op
= TREE_OPERAND (op
, 0);
6604 while (handled_component_p (op
))
6605 op
= TREE_OPERAND (op
, 0);
6606 if ((TREE_CODE (op
) == MEM_REF
6607 || TREE_CODE (op
) == TARGET_MEM_REF
)
6608 && MR_DEPENDENCE_CLIQUE (op
) > 1
6609 && MR_DEPENDENCE_CLIQUE (op
) != bb
->loop_father
->owned_clique
)
6611 if (!id
->dependence_map
)
6612 id
->dependence_map
= new hash_map
<dependence_hash
,
6615 unsigned short &newc
= id
->dependence_map
->get_or_insert
6616 (MR_DEPENDENCE_CLIQUE (op
), &existed
);
6619 gcc_assert (MR_DEPENDENCE_CLIQUE (op
) <= cfun
->last_clique
);
6620 newc
= get_new_clique (cfun
);
6622 MR_DEPENDENCE_CLIQUE (op
) = newc
;
6626 /* Create new names for all the definitions created by COPY and
6627 add replacement mappings for each new name. */
6628 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
6629 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
6635 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6638 add_phi_args_after_copy_edge (edge e_copy
)
6640 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
6643 gphi
*phi
, *phi_copy
;
6645 gphi_iterator psi
, psi_copy
;
6647 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
6650 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
6652 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
6653 dest
= get_bb_original (e_copy
->dest
);
6655 dest
= e_copy
->dest
;
6657 e
= find_edge (bb
, dest
);
6660 /* During loop unrolling the target of the latch edge is copied.
6661 In this case we are not looking for edge to dest, but to
6662 duplicated block whose original was dest. */
6663 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6665 if ((e
->dest
->flags
& BB_DUPLICATED
)
6666 && get_bb_original (e
->dest
) == dest
)
6670 gcc_assert (e
!= NULL
);
6673 for (psi
= gsi_start_phis (e
->dest
),
6674 psi_copy
= gsi_start_phis (e_copy
->dest
);
6676 gsi_next (&psi
), gsi_next (&psi_copy
))
6679 phi_copy
= psi_copy
.phi ();
6680 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
6681 add_phi_arg (phi_copy
, def
, e_copy
,
6682 gimple_phi_arg_location_from_edge (phi
, e
));
6687 /* Basic block BB_COPY was created by code duplication. Add phi node
6688 arguments for edges going out of BB_COPY. The blocks that were
6689 duplicated have BB_DUPLICATED set. */
6692 add_phi_args_after_copy_bb (basic_block bb_copy
)
6697 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
6699 add_phi_args_after_copy_edge (e_copy
);
6703 /* Blocks in REGION_COPY array of length N_REGION were created by
6704 duplication of basic blocks. Add phi node arguments for edges
6705 going from these blocks. If E_COPY is not NULL, also add
6706 phi node arguments for its destination.*/
6709 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
6714 for (i
= 0; i
< n_region
; i
++)
6715 region_copy
[i
]->flags
|= BB_DUPLICATED
;
6717 for (i
= 0; i
< n_region
; i
++)
6718 add_phi_args_after_copy_bb (region_copy
[i
]);
6720 add_phi_args_after_copy_edge (e_copy
);
6722 for (i
= 0; i
< n_region
; i
++)
6723 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
6726 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6727 important exit edge EXIT. By important we mean that no SSA name defined
6728 inside region is live over the other exit edges of the region. All entry
6729 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6730 to the duplicate of the region. Dominance and loop information is
6731 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6732 UPDATE_DOMINANCE is false then we assume that the caller will update the
6733 dominance information after calling this function. The new basic
6734 blocks are stored to REGION_COPY in the same order as they had in REGION,
6735 provided that REGION_COPY is not NULL.
6736 The function returns false if it is unable to copy the region,
6739 It is callers responsibility to update profile. */
6742 gimple_duplicate_seme_region (edge entry
, edge exit
,
6743 basic_block
*region
, unsigned n_region
,
6744 basic_block
*region_copy
,
6745 bool update_dominance
)
6748 bool free_region_copy
= false, copying_header
= false;
6749 class loop
*loop
= entry
->dest
->loop_father
;
6753 if (!can_copy_bbs_p (region
, n_region
))
6756 /* Some sanity checking. Note that we do not check for all possible
6757 missuses of the functions. I.e. if you ask to copy something weird,
6758 it will work, but the state of structures probably will not be
6760 for (i
= 0; i
< n_region
; i
++)
6762 /* We do not handle subloops, i.e. all the blocks must belong to the
6764 if (region
[i
]->loop_father
!= loop
)
6767 if (region
[i
] != entry
->dest
6768 && region
[i
] == loop
->header
)
6772 /* In case the function is used for loop header copying (which is the primary
6773 use), ensure that EXIT and its copy will be new latch and entry edges. */
6774 if (loop
->header
== entry
->dest
)
6776 copying_header
= true;
6778 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
6781 for (i
= 0; i
< n_region
; i
++)
6782 if (region
[i
] != exit
->src
6783 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
6787 initialize_original_copy_tables ();
6790 set_loop_copy (loop
, loop_outer (loop
));
6792 set_loop_copy (loop
, loop
);
6796 region_copy
= XNEWVEC (basic_block
, n_region
);
6797 free_region_copy
= true;
6800 /* Record blocks outside the region that are dominated by something
6802 auto_vec
<basic_block
> doms
;
6803 if (update_dominance
)
6804 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6806 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
6807 split_edge_bb_loc (entry
), update_dominance
);
6811 loop
->header
= exit
->dest
;
6812 loop
->latch
= exit
->src
;
6815 /* Redirect the entry and add the phi node arguments. */
6816 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
6817 gcc_assert (redirected
!= NULL
);
6818 flush_pending_stmts (entry
);
6820 /* Concerning updating of dominators: We must recount dominators
6821 for entry block and its copy. Anything that is outside of the
6822 region, but was dominated by something inside needs recounting as
6824 if (update_dominance
)
6826 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
6827 doms
.safe_push (get_bb_original (entry
->dest
));
6828 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6831 /* Add the other PHI node arguments. */
6832 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
6834 if (free_region_copy
)
6837 free_original_copy_tables ();
6841 /* Checks if BB is part of the region defined by N_REGION BBS. */
6843 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
6847 for (n
= 0; n
< n_region
; n
++)
6856 /* For each PHI in BB, copy the argument associated with SRC_E to TGT_E.
6857 Assuming the argument exists, just does not have a value. */
6860 copy_phi_arg_into_existing_phi (edge src_e
, edge tgt_e
)
6862 int src_idx
= src_e
->dest_idx
;
6863 int tgt_idx
= tgt_e
->dest_idx
;
6865 /* Iterate over each PHI in e->dest. */
6866 for (gphi_iterator gsi
= gsi_start_phis (src_e
->dest
),
6867 gsi2
= gsi_start_phis (tgt_e
->dest
);
6869 gsi_next (&gsi
), gsi_next (&gsi2
))
6871 gphi
*src_phi
= gsi
.phi ();
6872 gphi
*dest_phi
= gsi2
.phi ();
6873 tree val
= gimple_phi_arg_def (src_phi
, src_idx
);
6874 location_t locus
= gimple_phi_arg_location (src_phi
, src_idx
);
6876 SET_PHI_ARG_DEF (dest_phi
, tgt_idx
, val
);
6877 gimple_phi_arg_set_location (dest_phi
, tgt_idx
, locus
);
6881 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6882 are stored to REGION_COPY in the same order in that they appear
6883 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6884 the region, EXIT an exit from it. The condition guarding EXIT
6885 is moved to ENTRY. Returns true if duplication succeeds, false
6911 gimple_duplicate_sese_tail (edge entry
, edge exit
,
6912 basic_block
*region
, unsigned n_region
,
6913 basic_block
*region_copy
)
6916 bool free_region_copy
= false;
6917 class loop
*loop
= exit
->dest
->loop_father
;
6918 class loop
*orig_loop
= entry
->dest
->loop_father
;
6919 basic_block switch_bb
, entry_bb
, nentry_bb
;
6920 profile_count total_count
= profile_count::uninitialized (),
6921 exit_count
= profile_count::uninitialized ();
6922 edge exits
[2], nexits
[2], e
;
6923 gimple_stmt_iterator gsi
;
6925 basic_block exit_bb
;
6926 class loop
*target
, *aloop
, *cloop
;
6928 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
6930 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
6932 if (!can_copy_bbs_p (region
, n_region
))
6935 initialize_original_copy_tables ();
6936 set_loop_copy (orig_loop
, loop
);
6939 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
6941 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
6943 cloop
= duplicate_loop (aloop
, target
);
6944 duplicate_subloops (aloop
, cloop
);
6950 region_copy
= XNEWVEC (basic_block
, n_region
);
6951 free_region_copy
= true;
6954 gcc_assert (!need_ssa_update_p (cfun
));
6956 /* Record blocks outside the region that are dominated by something
6958 auto_vec
<basic_block
> doms
= get_dominated_by_region (CDI_DOMINATORS
, region
,
6961 total_count
= exit
->src
->count
;
6962 exit_count
= exit
->count ();
6963 /* Fix up corner cases, to avoid division by zero or creation of negative
6965 if (exit_count
> total_count
)
6966 exit_count
= total_count
;
6968 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6969 split_edge_bb_loc (exit
), true);
6970 if (total_count
.initialized_p () && exit_count
.initialized_p ())
6972 scale_bbs_frequencies_profile_count (region
, n_region
,
6973 total_count
- exit_count
,
6975 scale_bbs_frequencies_profile_count (region_copy
, n_region
, exit_count
,
6979 /* Create the switch block, and put the exit condition to it. */
6980 entry_bb
= entry
->dest
;
6981 nentry_bb
= get_bb_copy (entry_bb
);
6982 if (!*gsi_last_bb (entry
->src
)
6983 || !stmt_ends_bb_p (*gsi_last_bb (entry
->src
)))
6984 switch_bb
= entry
->src
;
6986 switch_bb
= split_edge (entry
);
6987 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6989 gcond
*cond_stmt
= as_a
<gcond
*> (*gsi_last_bb (exit
->src
));
6990 cond_stmt
= as_a
<gcond
*> (gimple_copy (cond_stmt
));
6992 gsi
= gsi_last_bb (switch_bb
);
6993 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6995 sorig
= single_succ_edge (switch_bb
);
6996 sorig
->flags
= exits
[1]->flags
;
6997 sorig
->probability
= exits
[1]->probability
;
6998 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6999 snew
->probability
= exits
[0]->probability
;
7002 /* Register the new edge from SWITCH_BB in loop exit lists. */
7003 rescan_loop_exit (snew
, true, false);
7005 /* Add the PHI node arguments. */
7006 add_phi_args_after_copy (region_copy
, n_region
, snew
);
7008 /* Get rid of now superfluous conditions and associated edges (and phi node
7010 exit_bb
= exit
->dest
;
7012 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
7013 PENDING_STMT (e
) = NULL
;
7015 /* The latch of ORIG_LOOP was copied, and so was the backedge
7016 to the original header. We redirect this backedge to EXIT_BB. */
7017 for (i
= 0; i
< n_region
; i
++)
7018 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
7020 gcc_assert (single_succ_edge (region_copy
[i
]));
7021 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
7022 PENDING_STMT (e
) = NULL
;
7023 copy_phi_arg_into_existing_phi (nexits
[0], e
);
7025 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
7026 PENDING_STMT (e
) = NULL
;
7028 /* Anything that is outside of the region, but was dominated by something
7029 inside needs to update dominance info. */
7030 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
7032 if (free_region_copy
)
7035 free_original_copy_tables ();
7039 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
7040 adding blocks when the dominator traversal reaches EXIT. This
7041 function silently assumes that ENTRY strictly dominates EXIT. */
7044 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
7045 vec
<basic_block
> *bbs_p
)
7049 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
7051 son
= next_dom_son (CDI_DOMINATORS
, son
))
7053 bbs_p
->safe_push (son
);
7055 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
7059 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
7060 The duplicates are recorded in VARS_MAP. */
7063 replace_by_duplicate_decl (tree
*tp
, hash_map
<tree
, tree
> *vars_map
,
7066 tree t
= *tp
, new_t
;
7067 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
7069 if (DECL_CONTEXT (t
) == to_context
)
7073 tree
&loc
= vars_map
->get_or_insert (t
, &existed
);
7079 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
7080 add_local_decl (f
, new_t
);
7084 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
7085 new_t
= copy_node (t
);
7087 DECL_CONTEXT (new_t
) = to_context
;
7098 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
7099 VARS_MAP maps old ssa names and var_decls to the new ones. */
7102 replace_ssa_name (tree name
, hash_map
<tree
, tree
> *vars_map
,
7107 gcc_assert (!virtual_operand_p (name
));
7109 tree
*loc
= vars_map
->get (name
);
7113 tree decl
= SSA_NAME_VAR (name
);
7116 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name
));
7117 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
7118 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
7119 decl
, SSA_NAME_DEF_STMT (name
));
7122 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
7123 name
, SSA_NAME_DEF_STMT (name
));
7125 /* Now that we've used the def stmt to define new_name, make sure it
7126 doesn't define name anymore. */
7127 SSA_NAME_DEF_STMT (name
) = NULL
;
7129 vars_map
->put (name
, new_name
);
7143 hash_map
<tree
, tree
> *vars_map
;
7144 htab_t new_label_map
;
7145 hash_map
<void *, void *> *eh_map
;
7149 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
7150 contained in *TP if it has been ORIG_BLOCK previously and change the
7151 DECL_CONTEXT of every local variable referenced in *TP. */
7154 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
7156 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
7157 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
7162 tree block
= TREE_BLOCK (t
);
7163 if (block
== NULL_TREE
)
7165 else if (block
== p
->orig_block
7166 || p
->orig_block
== NULL_TREE
)
7168 /* tree_node_can_be_shared says we can share invariant
7169 addresses but unshare_expr copies them anyways. Make sure
7170 to unshare before adjusting the block in place - we do not
7171 always see a copy here. */
7172 if (TREE_CODE (t
) == ADDR_EXPR
7173 && is_gimple_min_invariant (t
))
7174 *tp
= t
= unshare_expr (t
);
7175 TREE_SET_BLOCK (t
, p
->new_block
);
7177 else if (flag_checking
)
7179 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
7180 block
= BLOCK_SUPERCONTEXT (block
);
7181 gcc_assert (block
== p
->orig_block
);
7184 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
7186 if (TREE_CODE (t
) == SSA_NAME
)
7187 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
7188 else if (TREE_CODE (t
) == PARM_DECL
7189 && gimple_in_ssa_p (cfun
))
7190 *tp
= *(p
->vars_map
->get (t
));
7191 else if (TREE_CODE (t
) == LABEL_DECL
)
7193 if (p
->new_label_map
)
7195 struct tree_map in
, *out
;
7197 out
= (struct tree_map
*)
7198 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
7203 /* For FORCED_LABELs we can end up with references from other
7204 functions if some SESE regions are outlined. It is UB to
7205 jump in between them, but they could be used just for printing
7206 addresses etc. In that case, DECL_CONTEXT on the label should
7207 be the function containing the glabel stmt with that LABEL_DECL,
7208 rather than whatever function a reference to the label was seen
7210 if (!FORCED_LABEL (t
) && !DECL_NONLOCAL (t
))
7211 DECL_CONTEXT (t
) = p
->to_context
;
7213 else if (p
->remap_decls_p
)
7215 /* Replace T with its duplicate. T should no longer appear in the
7216 parent function, so this looks wasteful; however, it may appear
7217 in referenced_vars, and more importantly, as virtual operands of
7218 statements, and in alias lists of other variables. It would be
7219 quite difficult to expunge it from all those places. ??? It might
7220 suffice to do this for addressable variables. */
7221 if ((VAR_P (t
) && !is_global_var (t
))
7222 || TREE_CODE (t
) == CONST_DECL
)
7223 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
7227 else if (TYPE_P (t
))
7233 /* Helper for move_stmt_r. Given an EH region number for the source
7234 function, map that to the duplicate EH regio number in the dest. */
7237 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
7239 eh_region old_r
, new_r
;
7241 old_r
= get_eh_region_from_number (old_nr
);
7242 new_r
= static_cast<eh_region
> (*p
->eh_map
->get (old_r
));
7244 return new_r
->index
;
7247 /* Similar, but operate on INTEGER_CSTs. */
7250 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
7254 old_nr
= tree_to_shwi (old_t_nr
);
7255 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
7257 return build_int_cst (integer_type_node
, new_nr
);
7260 /* Like move_stmt_op, but for gimple statements.
7262 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
7263 contained in the current statement in *GSI_P and change the
7264 DECL_CONTEXT of every local variable referenced in the current
7268 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
7269 struct walk_stmt_info
*wi
)
7271 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
7272 gimple
*stmt
= gsi_stmt (*gsi_p
);
7273 tree block
= gimple_block (stmt
);
7275 if (block
== p
->orig_block
7276 || (p
->orig_block
== NULL_TREE
7277 && block
!= NULL_TREE
))
7278 gimple_set_block (stmt
, p
->new_block
);
7280 switch (gimple_code (stmt
))
7283 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7285 tree r
, fndecl
= gimple_call_fndecl (stmt
);
7286 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
7287 switch (DECL_FUNCTION_CODE (fndecl
))
7289 case BUILT_IN_EH_COPY_VALUES
:
7290 r
= gimple_call_arg (stmt
, 1);
7291 r
= move_stmt_eh_region_tree_nr (r
, p
);
7292 gimple_call_set_arg (stmt
, 1, r
);
7295 case BUILT_IN_EH_POINTER
:
7296 case BUILT_IN_EH_FILTER
:
7297 r
= gimple_call_arg (stmt
, 0);
7298 r
= move_stmt_eh_region_tree_nr (r
, p
);
7299 gimple_call_set_arg (stmt
, 0, r
);
7310 gresx
*resx_stmt
= as_a
<gresx
*> (stmt
);
7311 int r
= gimple_resx_region (resx_stmt
);
7312 r
= move_stmt_eh_region_nr (r
, p
);
7313 gimple_resx_set_region (resx_stmt
, r
);
7317 case GIMPLE_EH_DISPATCH
:
7319 geh_dispatch
*eh_dispatch_stmt
= as_a
<geh_dispatch
*> (stmt
);
7320 int r
= gimple_eh_dispatch_region (eh_dispatch_stmt
);
7321 r
= move_stmt_eh_region_nr (r
, p
);
7322 gimple_eh_dispatch_set_region (eh_dispatch_stmt
, r
);
7326 case GIMPLE_OMP_RETURN
:
7327 case GIMPLE_OMP_CONTINUE
:
7332 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7333 so that such labels can be referenced from other regions.
7334 Make sure to update it when seeing a GIMPLE_LABEL though,
7335 that is the owner of the label. */
7336 walk_gimple_op (stmt
, move_stmt_op
, wi
);
7337 *handled_ops_p
= true;
7338 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
7339 if (FORCED_LABEL (label
) || DECL_NONLOCAL (label
))
7340 DECL_CONTEXT (label
) = p
->to_context
;
7345 if (is_gimple_omp (stmt
))
7347 /* Do not remap variables inside OMP directives. Variables
7348 referenced in clauses and directive header belong to the
7349 parent function and should not be moved into the child
7351 bool save_remap_decls_p
= p
->remap_decls_p
;
7352 p
->remap_decls_p
= false;
7353 *handled_ops_p
= true;
7355 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
7358 p
->remap_decls_p
= save_remap_decls_p
;
7366 /* Move basic block BB from function CFUN to function DEST_FN. The
7367 block is moved out of the original linked list and placed after
7368 block AFTER in the new list. Also, the block is removed from the
7369 original array of blocks and placed in DEST_FN's array of blocks.
7370 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7371 updated to reflect the moved edges.
7373 The local variables are remapped to new instances, VARS_MAP is used
7374 to record the mapping. */
7377 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
7378 basic_block after
, bool update_edge_count_p
,
7379 struct move_stmt_d
*d
)
7381 struct control_flow_graph
*cfg
;
7384 gimple_stmt_iterator si
;
7387 /* Remove BB from dominance structures. */
7388 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
7390 /* Move BB from its current loop to the copy in the new function. */
7393 class loop
*new_loop
= (class loop
*)bb
->loop_father
->aux
;
7395 bb
->loop_father
= new_loop
;
7398 /* Link BB to the new linked list. */
7399 move_block_after (bb
, after
);
7401 /* Update the edge count in the corresponding flowgraphs. */
7402 if (update_edge_count_p
)
7403 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7405 cfun
->cfg
->x_n_edges
--;
7406 dest_cfun
->cfg
->x_n_edges
++;
7409 /* Remove BB from the original basic block array. */
7410 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
7411 cfun
->cfg
->x_n_basic_blocks
--;
7413 /* Grow DEST_CFUN's basic block array if needed. */
7414 cfg
= dest_cfun
->cfg
;
7415 cfg
->x_n_basic_blocks
++;
7416 if (bb
->index
>= cfg
->x_last_basic_block
)
7417 cfg
->x_last_basic_block
= bb
->index
+ 1;
7419 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
7420 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
7421 vec_safe_grow_cleared (cfg
->x_basic_block_info
,
7422 cfg
->x_last_basic_block
+ 1);
7424 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
7426 /* Remap the variables in phi nodes. */
7427 for (gphi_iterator psi
= gsi_start_phis (bb
);
7430 gphi
*phi
= psi
.phi ();
7432 tree op
= PHI_RESULT (phi
);
7436 if (virtual_operand_p (op
))
7438 /* Remove the phi nodes for virtual operands (alias analysis will be
7439 run for the new function, anyway). But replace all uses that
7440 might be outside of the region we move. */
7441 use_operand_p use_p
;
7442 imm_use_iterator iter
;
7444 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, op
)
7445 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
7446 SET_USE (use_p
, SSA_NAME_VAR (op
));
7447 remove_phi_node (&psi
, true);
7451 SET_PHI_RESULT (phi
,
7452 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
7453 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
7455 op
= USE_FROM_PTR (use
);
7456 if (TREE_CODE (op
) == SSA_NAME
)
7457 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
7460 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
7462 location_t locus
= gimple_phi_arg_location (phi
, i
);
7463 tree block
= LOCATION_BLOCK (locus
);
7465 if (locus
== UNKNOWN_LOCATION
)
7467 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
7469 locus
= set_block (locus
, d
->new_block
);
7470 gimple_phi_arg_set_location (phi
, i
, locus
);
7477 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
7479 gimple
*stmt
= gsi_stmt (si
);
7480 struct walk_stmt_info wi
;
7482 memset (&wi
, 0, sizeof (wi
));
7484 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
7486 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
7488 tree label
= gimple_label_label (label_stmt
);
7489 int uid
= LABEL_DECL_UID (label
);
7491 gcc_assert (uid
> -1);
7493 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
7494 if (old_len
<= (unsigned) uid
)
7495 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, uid
+ 1);
7497 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
7498 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
7500 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
7502 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
7503 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
7506 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
7507 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
7509 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
7510 gimple_remove_stmt_histograms (cfun
, stmt
);
7512 /* We cannot leave any operands allocated from the operand caches of
7513 the current function. */
7514 free_stmt_operands (cfun
, stmt
);
7515 push_cfun (dest_cfun
);
7517 if (is_gimple_call (stmt
))
7518 notice_special_calls (as_a
<gcall
*> (stmt
));
7522 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7523 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
7525 tree block
= LOCATION_BLOCK (e
->goto_locus
);
7526 if (d
->orig_block
== NULL_TREE
7527 || block
== d
->orig_block
)
7528 e
->goto_locus
= set_block (e
->goto_locus
, d
->new_block
);
7532 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7533 the outermost EH region. Use REGION as the incoming base EH region.
7534 If there is no single outermost region, return NULL and set *ALL to
7538 find_outermost_region_in_block (struct function
*src_cfun
,
7539 basic_block bb
, eh_region region
,
7542 gimple_stmt_iterator si
;
7544 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
7546 gimple
*stmt
= gsi_stmt (si
);
7547 eh_region stmt_region
;
7550 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
7551 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
7555 region
= stmt_region
;
7556 else if (stmt_region
!= region
)
7558 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
7572 new_label_mapper (tree decl
, void *data
)
7574 htab_t hash
= (htab_t
) data
;
7578 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
7580 m
= XNEW (struct tree_map
);
7581 m
->hash
= DECL_UID (decl
);
7582 m
->base
.from
= decl
;
7583 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
7584 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
7585 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
7586 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
7588 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
7589 gcc_assert (*slot
== NULL
);
7596 /* Tree walker to replace the decls used inside value expressions by
7600 replace_block_vars_by_duplicates_1 (tree
*tp
, int *walk_subtrees
, void *data
)
7602 struct replace_decls_d
*rd
= (struct replace_decls_d
*)data
;
7604 switch (TREE_CODE (*tp
))
7609 replace_by_duplicate_decl (tp
, rd
->vars_map
, rd
->to_context
);
7615 if (IS_TYPE_OR_DECL_P (*tp
))
7616 *walk_subtrees
= false;
7621 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7625 replace_block_vars_by_duplicates (tree block
, hash_map
<tree
, tree
> *vars_map
,
7630 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
7633 if (!VAR_P (t
) && TREE_CODE (t
) != CONST_DECL
)
7635 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
7638 if (VAR_P (*tp
) && DECL_HAS_VALUE_EXPR_P (*tp
))
7640 tree x
= DECL_VALUE_EXPR (*tp
);
7641 struct replace_decls_d rd
= { vars_map
, to_context
};
7643 walk_tree (&x
, replace_block_vars_by_duplicates_1
, &rd
, NULL
);
7644 SET_DECL_VALUE_EXPR (t
, x
);
7645 DECL_HAS_VALUE_EXPR_P (t
) = 1;
7647 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
7652 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
7653 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
7656 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7660 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
7663 /* Discard it from the old loop array. */
7664 (*get_loops (fn1
))[loop
->num
] = NULL
;
7666 /* Place it in the new loop array, assigning it a new number. */
7667 loop
->num
= number_of_loops (fn2
);
7668 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
7670 /* Recurse to children. */
7671 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
7672 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
7675 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7676 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7679 verify_sese (basic_block entry
, basic_block exit
, vec
<basic_block
> *bbs_p
)
7684 bitmap bbs
= BITMAP_ALLOC (NULL
);
7687 gcc_assert (entry
!= NULL
);
7688 gcc_assert (entry
!= exit
);
7689 gcc_assert (bbs_p
!= NULL
);
7691 gcc_assert (bbs_p
->length () > 0);
7693 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7694 bitmap_set_bit (bbs
, bb
->index
);
7696 gcc_assert (bitmap_bit_p (bbs
, entry
->index
));
7697 gcc_assert (exit
== NULL
|| bitmap_bit_p (bbs
, exit
->index
));
7699 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7703 gcc_assert (single_pred_p (entry
));
7704 gcc_assert (!bitmap_bit_p (bbs
, single_pred (entry
)->index
));
7707 for (ei
= ei_start (bb
->preds
); !ei_end_p (ei
); ei_next (&ei
))
7710 gcc_assert (bitmap_bit_p (bbs
, e
->src
->index
));
7715 gcc_assert (single_succ_p (exit
));
7716 gcc_assert (!bitmap_bit_p (bbs
, single_succ (exit
)->index
));
7719 for (ei
= ei_start (bb
->succs
); !ei_end_p (ei
); ei_next (&ei
))
7722 gcc_assert (bitmap_bit_p (bbs
, e
->dest
->index
));
7729 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7732 gather_ssa_name_hash_map_from (tree
const &from
, tree
const &, void *data
)
7734 bitmap release_names
= (bitmap
)data
;
7736 if (TREE_CODE (from
) != SSA_NAME
)
7739 bitmap_set_bit (release_names
, SSA_NAME_VERSION (from
));
7743 /* Return LOOP_DIST_ALIAS call if present in BB. */
7746 find_loop_dist_alias (basic_block bb
)
7748 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
7749 if (!safe_is_a
<gcond
*> (*gsi
))
7753 if (gsi_end_p (gsi
))
7756 gimple
*g
= gsi_stmt (gsi
);
7757 if (gimple_call_internal_p (g
, IFN_LOOP_DIST_ALIAS
))
7762 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7763 to VALUE and update any immediate uses of it's LHS. */
7766 fold_loop_internal_call (gimple
*g
, tree value
)
7768 tree lhs
= gimple_call_lhs (g
);
7769 use_operand_p use_p
;
7770 imm_use_iterator iter
;
7772 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
7774 replace_call_with_value (&gsi
, value
);
7775 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
7777 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
7778 SET_USE (use_p
, value
);
7779 update_stmt (use_stmt
);
7780 /* If we turn conditional to constant, scale profile counts.
7781 We know that the conditional was created by loop distribution
7782 and all basic blocks dominated by the taken edge are part of
7783 the loop distributed. */
7784 if (gimple_code (use_stmt
) == GIMPLE_COND
)
7786 edge true_edge
, false_edge
;
7787 extract_true_false_edges_from_block (gimple_bb (use_stmt
),
7788 &true_edge
, &false_edge
);
7789 edge taken_edge
= NULL
, other_edge
= NULL
;
7790 if (gimple_cond_true_p (as_a
<gcond
*>(use_stmt
)))
7792 taken_edge
= true_edge
;
7793 other_edge
= false_edge
;
7795 else if (gimple_cond_false_p (as_a
<gcond
*>(use_stmt
)))
7797 taken_edge
= false_edge
;
7798 other_edge
= true_edge
;
7801 && !(taken_edge
->probability
== profile_probability::always ()))
7803 profile_count old_count
= taken_edge
->count ();
7804 profile_count new_count
= taken_edge
->src
->count
;
7805 taken_edge
->probability
= profile_probability::always ();
7806 other_edge
->probability
= profile_probability::never ();
7807 /* If we have multiple predecessors, we can't use the dominance
7808 test. This should not happen as the guarded code should
7809 start with pre-header. */
7810 gcc_assert (single_pred_edge (taken_edge
->dest
));
7811 if (old_count
.nonzero_p ())
7813 taken_edge
->dest
->count
7814 = taken_edge
->dest
->count
.apply_scale (new_count
,
7816 scale_strictly_dominated_blocks (taken_edge
->dest
,
7817 new_count
, old_count
);
7824 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7825 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7826 single basic block in the original CFG and the new basic block is
7827 returned. DEST_CFUN must not have a CFG yet.
7829 Note that the region need not be a pure SESE region. Blocks inside
7830 the region may contain calls to abort/exit. The only restriction
7831 is that ENTRY_BB should be the only entry point and it must
7834 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7835 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7836 to the new function.
7838 All local variables referenced in the region are assumed to be in
7839 the corresponding BLOCK_VARS and unexpanded variable lists
7840 associated with DEST_CFUN.
7842 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7843 reimplement move_sese_region_to_fn by duplicating the region rather than
7847 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
7848 basic_block exit_bb
, tree orig_block
)
7850 vec
<basic_block
> bbs
;
7851 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
7852 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
7853 struct function
*saved_cfun
= cfun
;
7854 int *entry_flag
, *exit_flag
;
7855 profile_probability
*entry_prob
, *exit_prob
;
7856 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
7859 htab_t new_label_map
;
7860 hash_map
<void *, void *> *eh_map
;
7861 class loop
*loop
= entry_bb
->loop_father
;
7862 class loop
*loop0
= get_loop (saved_cfun
, 0);
7863 struct move_stmt_d d
;
7865 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7867 gcc_assert (entry_bb
!= exit_bb
7869 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
7871 /* Collect all the blocks in the region. Manually add ENTRY_BB
7872 because it won't be added by dfs_enumerate_from. */
7874 bbs
.safe_push (entry_bb
);
7875 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
7878 verify_sese (entry_bb
, exit_bb
, &bbs
);
7880 /* The blocks that used to be dominated by something in BBS will now be
7881 dominated by the new block. */
7882 auto_vec
<basic_block
> dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
7886 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7887 the predecessor edges to ENTRY_BB and the successor edges to
7888 EXIT_BB so that we can re-attach them to the new basic block that
7889 will replace the region. */
7890 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
7891 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
7892 entry_flag
= XNEWVEC (int, num_entry_edges
);
7893 entry_prob
= XNEWVEC (profile_probability
, num_entry_edges
);
7895 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
7897 entry_prob
[i
] = e
->probability
;
7898 entry_flag
[i
] = e
->flags
;
7899 entry_pred
[i
++] = e
->src
;
7905 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
7906 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
7907 exit_flag
= XNEWVEC (int, num_exit_edges
);
7908 exit_prob
= XNEWVEC (profile_probability
, num_exit_edges
);
7910 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
7912 exit_prob
[i
] = e
->probability
;
7913 exit_flag
[i
] = e
->flags
;
7914 exit_succ
[i
++] = e
->dest
;
7926 /* Switch context to the child function to initialize DEST_FN's CFG. */
7927 gcc_assert (dest_cfun
->cfg
== NULL
);
7928 push_cfun (dest_cfun
);
7930 init_empty_tree_cfg ();
7932 /* Initialize EH information for the new function. */
7934 new_label_map
= NULL
;
7937 eh_region region
= NULL
;
7940 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7942 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
, &all
);
7947 init_eh_for_function ();
7948 if (region
!= NULL
|| all
)
7950 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
7951 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
7952 new_label_mapper
, new_label_map
);
7956 /* Initialize an empty loop tree. */
7957 struct loops
*loops
= ggc_cleared_alloc
<struct loops
> ();
7958 init_loops_structure (dest_cfun
, loops
, 1);
7959 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
7960 set_loops_for_fn (dest_cfun
, loops
);
7962 vec
<loop_p
, va_gc
> *larray
= get_loops (saved_cfun
)->copy ();
7964 /* Move the outlined loop tree part. */
7965 num_nodes
= bbs
.length ();
7966 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7968 if (bb
->loop_father
->header
== bb
)
7970 class loop
*this_loop
= bb
->loop_father
;
7971 /* Avoid the need to remap SSA names used in nb_iterations. */
7972 free_numbers_of_iterations_estimates (this_loop
);
7973 class loop
*outer
= loop_outer (this_loop
);
7975 /* If the SESE region contains some bbs ending with
7976 a noreturn call, those are considered to belong
7977 to the outermost loop in saved_cfun, rather than
7978 the entry_bb's loop_father. */
7982 num_nodes
-= this_loop
->num_nodes
;
7983 flow_loop_tree_node_remove (bb
->loop_father
);
7984 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
7985 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
7988 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
7991 /* Remove loop exits from the outlined region. */
7992 if (loops_for_fn (saved_cfun
)->exits
)
7993 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7995 struct loops
*l
= loops_for_fn (saved_cfun
);
7997 = l
->exits
->find_slot_with_hash (e
, htab_hash_pointer (e
),
8000 l
->exits
->clear_slot (slot
);
8004 /* Adjust the number of blocks in the tree root of the outlined part. */
8005 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
8007 /* Setup a mapping to be used by move_block_to_fn. */
8008 loop
->aux
= current_loops
->tree_root
;
8009 loop0
->aux
= current_loops
->tree_root
;
8011 /* Fix up orig_loop_num. If the block referenced in it has been moved
8012 to dest_cfun, update orig_loop_num field, otherwise clear it. */
8013 signed char *moved_orig_loop_num
= NULL
;
8014 for (auto dloop
: loops_list (dest_cfun
, 0))
8015 if (dloop
->orig_loop_num
)
8017 if (moved_orig_loop_num
== NULL
)
8019 = XCNEWVEC (signed char, vec_safe_length (larray
));
8020 if ((*larray
)[dloop
->orig_loop_num
] != NULL
8021 && get_loop (saved_cfun
, dloop
->orig_loop_num
) == NULL
)
8023 if (moved_orig_loop_num
[dloop
->orig_loop_num
] >= 0
8024 && moved_orig_loop_num
[dloop
->orig_loop_num
] < 2)
8025 moved_orig_loop_num
[dloop
->orig_loop_num
]++;
8026 dloop
->orig_loop_num
= (*larray
)[dloop
->orig_loop_num
]->num
;
8030 moved_orig_loop_num
[dloop
->orig_loop_num
] = -1;
8031 dloop
->orig_loop_num
= 0;
8036 if (moved_orig_loop_num
)
8038 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
8040 gimple
*g
= find_loop_dist_alias (bb
);
8044 int orig_loop_num
= tree_to_shwi (gimple_call_arg (g
, 0));
8045 gcc_assert (orig_loop_num
8046 && (unsigned) orig_loop_num
< vec_safe_length (larray
));
8047 if (moved_orig_loop_num
[orig_loop_num
] == 2)
8049 /* If we have moved both loops with this orig_loop_num into
8050 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
8051 too, update the first argument. */
8052 gcc_assert ((*larray
)[orig_loop_num
] != NULL
8053 && (get_loop (saved_cfun
, orig_loop_num
) == NULL
));
8054 tree t
= build_int_cst (integer_type_node
,
8055 (*larray
)[orig_loop_num
]->num
);
8056 gimple_call_set_arg (g
, 0, t
);
8058 /* Make sure the following loop will not update it. */
8059 moved_orig_loop_num
[orig_loop_num
] = 0;
8062 /* Otherwise at least one of the loops stayed in saved_cfun.
8063 Remove the LOOP_DIST_ALIAS call. */
8064 fold_loop_internal_call (g
, gimple_call_arg (g
, 1));
8066 FOR_EACH_BB_FN (bb
, saved_cfun
)
8068 gimple
*g
= find_loop_dist_alias (bb
);
8071 int orig_loop_num
= tree_to_shwi (gimple_call_arg (g
, 0));
8072 gcc_assert (orig_loop_num
8073 && (unsigned) orig_loop_num
< vec_safe_length (larray
));
8074 if (moved_orig_loop_num
[orig_loop_num
])
8075 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
8076 of the corresponding loops was moved, remove it. */
8077 fold_loop_internal_call (g
, gimple_call_arg (g
, 1));
8079 XDELETEVEC (moved_orig_loop_num
);
8083 /* Move blocks from BBS into DEST_CFUN. */
8084 gcc_assert (bbs
.length () >= 2);
8085 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
8086 hash_map
<tree
, tree
> vars_map
;
8088 memset (&d
, 0, sizeof (d
));
8089 d
.orig_block
= orig_block
;
8090 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
8091 d
.from_context
= cfun
->decl
;
8092 d
.to_context
= dest_cfun
->decl
;
8093 d
.vars_map
= &vars_map
;
8094 d
.new_label_map
= new_label_map
;
8096 d
.remap_decls_p
= true;
8098 if (gimple_in_ssa_p (cfun
))
8099 for (tree arg
= DECL_ARGUMENTS (d
.to_context
); arg
; arg
= DECL_CHAIN (arg
))
8101 tree narg
= make_ssa_name_fn (dest_cfun
, arg
, gimple_build_nop ());
8102 set_ssa_default_def (dest_cfun
, arg
, narg
);
8103 vars_map
.put (arg
, narg
);
8106 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
8108 /* No need to update edge counts on the last block. It has
8109 already been updated earlier when we detached the region from
8110 the original CFG. */
8111 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
8115 /* Adjust the maximum clique used. */
8116 dest_cfun
->last_clique
= saved_cfun
->last_clique
;
8120 /* Loop sizes are no longer correct, fix them up. */
8121 loop
->num_nodes
-= num_nodes
;
8122 for (class loop
*outer
= loop_outer (loop
);
8123 outer
; outer
= loop_outer (outer
))
8124 outer
->num_nodes
-= num_nodes
;
8125 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
8127 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vectorize_loops
)
8130 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
8135 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
8137 dest_cfun
->has_simduid_loops
= true;
8139 if (aloop
->force_vectorize
)
8140 dest_cfun
->has_force_vectorize_loops
= true;
8144 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
8148 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
8150 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
8151 = BLOCK_SUBBLOCKS (orig_block
);
8152 for (block
= BLOCK_SUBBLOCKS (orig_block
);
8153 block
; block
= BLOCK_CHAIN (block
))
8154 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
8155 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
8158 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
8159 &vars_map
, dest_cfun
->decl
);
8162 htab_delete (new_label_map
);
8166 /* We need to release ssa-names in a defined order, so first find them,
8167 and then iterate in ascending version order. */
8168 bitmap release_names
= BITMAP_ALLOC (NULL
);
8169 vars_map
.traverse
<void *, gather_ssa_name_hash_map_from
> (release_names
);
8171 EXECUTE_IF_SET_IN_BITMAP (release_names
, 0, i
, bi
)
8172 release_ssa_name (ssa_name (i
));
8173 BITMAP_FREE (release_names
);
8175 /* Rewire the entry and exit blocks. The successor to the entry
8176 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
8177 the child function. Similarly, the predecessor of DEST_FN's
8178 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
8179 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
8180 various CFG manipulation function get to the right CFG.
8182 FIXME, this is silly. The CFG ought to become a parameter to
8184 push_cfun (dest_cfun
);
8185 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= entry_bb
->count
;
8186 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
8189 make_single_succ_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
8190 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
= exit_bb
->count
;
8193 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
= profile_count::zero ();
8196 /* Back in the original function, the SESE region has disappeared,
8197 create a new basic block in its place. */
8198 bb
= create_empty_bb (entry_pred
[0]);
8200 add_bb_to_loop (bb
, loop
);
8201 profile_count count
= profile_count::zero ();
8202 for (i
= 0; i
< num_entry_edges
; i
++)
8204 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
8205 e
->probability
= entry_prob
[i
];
8206 count
+= e
->count ();
8210 for (i
= 0; i
< num_exit_edges
; i
++)
8212 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
8213 e
->probability
= exit_prob
[i
];
8216 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
8217 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
8218 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
8234 /* Dump default def DEF to file FILE using FLAGS and indentation
8238 dump_default_def (FILE *file
, tree def
, int spc
, dump_flags_t flags
)
8240 for (int i
= 0; i
< spc
; ++i
)
8241 fprintf (file
, " ");
8242 dump_ssaname_info_to_file (file
, def
, spc
);
8244 print_generic_expr (file
, TREE_TYPE (def
), flags
);
8245 fprintf (file
, " ");
8246 print_generic_expr (file
, def
, flags
);
8247 fprintf (file
, " = ");
8248 print_generic_expr (file
, SSA_NAME_VAR (def
), flags
);
8249 fprintf (file
, ";\n");
8252 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
8255 print_no_sanitize_attr_value (FILE *file
, tree value
)
8257 unsigned int flags
= tree_to_uhwi (value
);
8259 for (int i
= 0; sanitizer_opts
[i
].name
!= NULL
; ++i
)
8261 if ((sanitizer_opts
[i
].flag
& flags
) == sanitizer_opts
[i
].flag
)
8264 fprintf (file
, " | ");
8265 fprintf (file
, "%s", sanitizer_opts
[i
].name
);
8271 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
8275 dump_function_to_file (tree fndecl
, FILE *file
, dump_flags_t flags
)
8277 tree arg
, var
, old_current_fndecl
= current_function_decl
;
8278 struct function
*dsf
;
8279 bool ignore_topmost_bind
= false, any_var
= false;
8282 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
8283 && decl_is_tm_clone (fndecl
));
8284 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
8286 tree fntype
= TREE_TYPE (fndecl
);
8287 tree attrs
[] = { DECL_ATTRIBUTES (fndecl
), TYPE_ATTRIBUTES (fntype
) };
8289 for (int i
= 0; i
!= 2; ++i
)
8294 fprintf (file
, "__attribute__((");
8298 for (chain
= attrs
[i
]; chain
; first
= false, chain
= TREE_CHAIN (chain
))
8301 fprintf (file
, ", ");
8303 tree name
= get_attribute_name (chain
);
8304 print_generic_expr (file
, name
, dump_flags
);
8305 if (TREE_VALUE (chain
) != NULL_TREE
)
8307 fprintf (file
, " (");
8309 if (strstr (IDENTIFIER_POINTER (name
), "no_sanitize"))
8310 print_no_sanitize_attr_value (file
, TREE_VALUE (chain
));
8311 else if (!strcmp (IDENTIFIER_POINTER (name
),
8312 "omp declare variant base"))
8314 tree a
= TREE_VALUE (chain
);
8315 print_generic_expr (file
, TREE_PURPOSE (a
), dump_flags
);
8316 fprintf (file
, " match ");
8317 print_omp_context_selector (file
, TREE_VALUE (a
),
8321 print_generic_expr (file
, TREE_VALUE (chain
), dump_flags
);
8322 fprintf (file
, ")");
8326 fprintf (file
, "))\n");
8329 current_function_decl
= fndecl
;
8330 if (flags
& TDF_GIMPLE
)
8332 static bool hotness_bb_param_printed
= false;
8333 if (profile_info
!= NULL
8334 && !hotness_bb_param_printed
)
8336 hotness_bb_param_printed
= true;
8338 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8339 " */\n", get_hot_bb_threshold ());
8342 print_generic_expr (file
, TREE_TYPE (TREE_TYPE (fndecl
)),
8343 dump_flags
| TDF_SLIM
);
8344 fprintf (file
, " __GIMPLE (%s",
8345 (fun
->curr_properties
& PROP_ssa
) ? "ssa"
8346 : (fun
->curr_properties
& PROP_cfg
) ? "cfg"
8349 if (fun
&& fun
->cfg
)
8351 basic_block bb
= ENTRY_BLOCK_PTR_FOR_FN (fun
);
8352 if (bb
->count
.initialized_p ())
8353 fprintf (file
, ",%s(%" PRIu64
")",
8354 profile_quality_as_string (bb
->count
.quality ()),
8355 bb
->count
.value ());
8356 if (dump_flags
& TDF_UID
)
8357 fprintf (file
, ")\n%sD_%u (", function_name (fun
),
8360 fprintf (file
, ")\n%s (", function_name (fun
));
8365 print_generic_expr (file
, TREE_TYPE (fntype
), dump_flags
);
8366 if (dump_flags
& TDF_UID
)
8367 fprintf (file
, " %sD.%u %s(", function_name (fun
), DECL_UID (fndecl
),
8368 tmclone
? "[tm-clone] " : "");
8370 fprintf (file
, " %s %s(", function_name (fun
),
8371 tmclone
? "[tm-clone] " : "");
8374 arg
= DECL_ARGUMENTS (fndecl
);
8377 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
8378 fprintf (file
, " ");
8379 print_generic_expr (file
, arg
, dump_flags
);
8380 if (DECL_CHAIN (arg
))
8381 fprintf (file
, ", ");
8382 arg
= DECL_CHAIN (arg
);
8384 fprintf (file
, ")\n");
8386 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
8387 if (dsf
&& (flags
& TDF_EH
))
8388 dump_eh_tree (file
, dsf
);
8390 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
8392 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
8393 current_function_decl
= old_current_fndecl
;
8397 /* When GIMPLE is lowered, the variables are no longer available in
8398 BIND_EXPRs, so display them separately. */
8399 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
8402 ignore_topmost_bind
= true;
8404 fprintf (file
, "{\n");
8405 if (gimple_in_ssa_p (fun
)
8406 && (flags
& TDF_ALIAS
))
8408 for (arg
= DECL_ARGUMENTS (fndecl
); arg
!= NULL
;
8409 arg
= DECL_CHAIN (arg
))
8411 tree def
= ssa_default_def (fun
, arg
);
8413 dump_default_def (file
, def
, 2, flags
);
8416 tree res
= DECL_RESULT (fun
->decl
);
8417 if (res
!= NULL_TREE
8418 && DECL_BY_REFERENCE (res
))
8420 tree def
= ssa_default_def (fun
, res
);
8422 dump_default_def (file
, def
, 2, flags
);
8425 tree static_chain
= fun
->static_chain_decl
;
8426 if (static_chain
!= NULL_TREE
)
8428 tree def
= ssa_default_def (fun
, static_chain
);
8430 dump_default_def (file
, def
, 2, flags
);
8434 if (!vec_safe_is_empty (fun
->local_decls
))
8435 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
8437 print_generic_decl (file
, var
, flags
);
8438 fprintf (file
, "\n");
8445 if (gimple_in_ssa_p (fun
))
8446 FOR_EACH_SSA_NAME (ix
, name
, fun
)
8448 if (!SSA_NAME_VAR (name
)
8449 /* SSA name with decls without a name still get
8450 dumped as _N, list those explicitely as well even
8451 though we've dumped the decl declaration as D.xxx
8453 || !SSA_NAME_IDENTIFIER (name
))
8455 fprintf (file
, " ");
8456 print_generic_expr (file
, TREE_TYPE (name
), flags
);
8457 fprintf (file
, " ");
8458 print_generic_expr (file
, name
, flags
);
8459 fprintf (file
, ";\n");
8466 if (fun
&& fun
->decl
== fndecl
8468 && basic_block_info_for_fn (fun
))
8470 /* If the CFG has been built, emit a CFG-based dump. */
8471 if (!ignore_topmost_bind
)
8472 fprintf (file
, "{\n");
8474 if (any_var
&& n_basic_blocks_for_fn (fun
))
8475 fprintf (file
, "\n");
8477 FOR_EACH_BB_FN (bb
, fun
)
8478 dump_bb (file
, bb
, 2, flags
);
8480 fprintf (file
, "}\n");
8482 else if (fun
&& (fun
->curr_properties
& PROP_gimple_any
))
8484 /* The function is now in GIMPLE form but the CFG has not been
8485 built yet. Emit the single sequence of GIMPLE statements
8486 that make up its body. */
8487 gimple_seq body
= gimple_body (fndecl
);
8489 if (gimple_seq_first_stmt (body
)
8490 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
8491 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
8492 print_gimple_seq (file
, body
, 0, flags
);
8495 if (!ignore_topmost_bind
)
8496 fprintf (file
, "{\n");
8499 fprintf (file
, "\n");
8501 print_gimple_seq (file
, body
, 2, flags
);
8502 fprintf (file
, "}\n");
8509 /* Make a tree based dump. */
8510 chain
= DECL_SAVED_TREE (fndecl
);
8511 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
8513 if (ignore_topmost_bind
)
8515 chain
= BIND_EXPR_BODY (chain
);
8523 if (!ignore_topmost_bind
)
8525 fprintf (file
, "{\n");
8526 /* No topmost bind, pretend it's ignored for later. */
8527 ignore_topmost_bind
= true;
8533 fprintf (file
, "\n");
8535 print_generic_stmt_indented (file
, chain
, flags
, indent
);
8536 if (ignore_topmost_bind
)
8537 fprintf (file
, "}\n");
8540 if (flags
& TDF_ENUMERATE_LOCALS
)
8541 dump_enumerated_decls (file
, flags
);
8542 fprintf (file
, "\n\n");
8544 current_function_decl
= old_current_fndecl
;
8547 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8550 debug_function (tree fn
, dump_flags_t flags
)
8552 dump_function_to_file (fn
, stderr
, flags
);
8556 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8559 print_pred_bbs (FILE *file
, basic_block bb
)
8564 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
8565 fprintf (file
, "bb_%d ", e
->src
->index
);
8569 /* Print on FILE the indexes for the successors of basic_block BB. */
8572 print_succ_bbs (FILE *file
, basic_block bb
)
8577 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8578 fprintf (file
, "bb_%d ", e
->dest
->index
);
8581 /* Print to FILE the basic block BB following the VERBOSITY level. */
8584 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
8586 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
8587 memset ((void *) s_indent
, ' ', (size_t) indent
);
8588 s_indent
[indent
] = '\0';
8590 /* Print basic_block's header. */
8593 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
8594 print_pred_bbs (file
, bb
);
8595 fprintf (file
, "}, succs = {");
8596 print_succ_bbs (file
, bb
);
8597 fprintf (file
, "})\n");
8600 /* Print basic_block's body. */
8603 fprintf (file
, "%s {\n", s_indent
);
8604 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
8605 fprintf (file
, "%s }\n", s_indent
);
8609 /* Print loop information. */
8612 print_loop_info (FILE *file
, const class loop
*loop
, const char *prefix
)
8614 if (loop
->can_be_parallel
)
8615 fprintf (file
, ", can_be_parallel");
8616 if (loop
->warned_aggressive_loop_optimizations
)
8617 fprintf (file
, ", warned_aggressive_loop_optimizations");
8618 if (loop
->dont_vectorize
)
8619 fprintf (file
, ", dont_vectorize");
8620 if (loop
->force_vectorize
)
8621 fprintf (file
, ", force_vectorize");
8622 if (loop
->in_oacc_kernels_region
)
8623 fprintf (file
, ", in_oacc_kernels_region");
8625 fprintf (file
, ", finite_p");
8627 fprintf (file
, "\n%sunroll %d", prefix
, loop
->unroll
);
8628 if (loop
->nb_iterations
)
8630 fprintf (file
, "\n%sniter ", prefix
);
8631 print_generic_expr (file
, loop
->nb_iterations
);
8634 if (loop
->any_upper_bound
)
8636 fprintf (file
, "\n%supper_bound ", prefix
);
8637 print_decu (loop
->nb_iterations_upper_bound
, file
);
8639 if (loop
->any_likely_upper_bound
)
8641 fprintf (file
, "\n%slikely_upper_bound ", prefix
);
8642 print_decu (loop
->nb_iterations_likely_upper_bound
, file
);
8645 if (loop
->any_estimate
)
8647 fprintf (file
, "\n%sestimate ", prefix
);
8648 print_decu (loop
->nb_iterations_estimate
, file
);
8652 if (loop
->num
&& expected_loop_iterations_by_profile (loop
, &iterations
, &reliable
))
8654 fprintf (file
, "\n%siterations by profile: %f (%s%s) entry count:", prefix
,
8655 iterations
.to_double (), reliable
? "reliable" : "unreliable",
8656 maybe_flat_loop_profile (loop
) ? ", maybe flat" : "");
8657 loop_count_in (loop
).dump (file
, cfun
);
8662 static void print_loop_and_siblings (FILE *, class loop
*, int, int);
8664 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8665 VERBOSITY level this outputs the contents of the loop, or just its
8669 print_loop (FILE *file
, class loop
*loop
, int indent
, int verbosity
)
8677 s_indent
= (char *) alloca ((size_t) indent
+ 1);
8678 memset ((void *) s_indent
, ' ', (size_t) indent
);
8679 s_indent
[indent
] = '\0';
8681 /* Print loop's header. */
8682 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
8684 fprintf (file
, "header = %d", loop
->header
->index
);
8687 fprintf (file
, "deleted)\n");
8691 fprintf (file
, ", latch = %d", loop
->latch
->index
);
8693 fprintf (file
, ", multiple latches");
8694 print_loop_info (file
, loop
, s_indent
);
8695 fprintf (file
, ")\n");
8697 /* Print loop's body. */
8700 fprintf (file
, "%s{\n", s_indent
);
8701 FOR_EACH_BB_FN (bb
, cfun
)
8702 if (bb
->loop_father
== loop
)
8703 print_loops_bb (file
, bb
, indent
, verbosity
);
8705 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
8706 fprintf (file
, "%s}\n", s_indent
);
8710 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8711 spaces. Following VERBOSITY level this outputs the contents of the
8712 loop, or just its structure. */
8715 print_loop_and_siblings (FILE *file
, class loop
*loop
, int indent
,
8721 print_loop (file
, loop
, indent
, verbosity
);
8722 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
8725 /* Follow a CFG edge from the entry point of the program, and on entry
8726 of a loop, pretty print the loop structure on FILE. */
8729 print_loops (FILE *file
, int verbosity
)
8733 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
8734 fprintf (file
, "\nLoops in function: %s\n", current_function_name ());
8735 if (bb
&& bb
->loop_father
)
8736 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
8742 debug (class loop
&ref
)
8744 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
8748 debug (class loop
*ptr
)
8753 fprintf (stderr
, "<nil>\n");
8756 /* Dump a loop verbosely. */
8759 debug_verbose (class loop
&ref
)
8761 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
8765 debug_verbose (class loop
*ptr
)
8770 fprintf (stderr
, "<nil>\n");
8774 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8777 debug_loops (int verbosity
)
8779 print_loops (stderr
, verbosity
);
8782 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8785 debug_loop (class loop
*loop
, int verbosity
)
8787 print_loop (stderr
, loop
, 0, verbosity
);
8790 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8794 debug_loop_num (unsigned num
, int verbosity
)
8796 debug_loop (get_loop (cfun
, num
), verbosity
);
8799 /* Return true if BB ends with a call, possibly followed by some
8800 instructions that must stay with the call. Return false,
8804 gimple_block_ends_with_call_p (basic_block bb
)
8806 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
8807 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
8811 /* Return true if BB ends with a conditional branch. Return false,
8815 gimple_block_ends_with_condjump_p (const_basic_block bb
)
8817 return safe_is_a
<gcond
*> (*gsi_last_bb (const_cast <basic_block
> (bb
)));
8821 /* Return true if statement T may terminate execution of BB in ways not
8822 explicitly represtented in the CFG. */
8825 stmt_can_terminate_bb_p (gimple
*t
)
8827 tree fndecl
= NULL_TREE
;
8830 /* Eh exception not handled internally terminates execution of the whole
8832 if (stmt_can_throw_external (cfun
, t
))
8835 /* NORETURN and LONGJMP calls already have an edge to exit.
8836 CONST and PURE calls do not need one.
8837 We don't currently check for CONST and PURE here, although
8838 it would be a good idea, because those attributes are
8839 figured out from the RTL in mark_constant_function, and
8840 the counter incrementation code from -fprofile-arcs
8841 leads to different results from -fbranch-probabilities. */
8842 if (is_gimple_call (t
))
8844 fndecl
= gimple_call_fndecl (t
);
8845 call_flags
= gimple_call_flags (t
);
8848 if (is_gimple_call (t
)
8850 && fndecl_built_in_p (fndecl
)
8851 && (call_flags
& ECF_NOTHROW
)
8852 && !(call_flags
& ECF_RETURNS_TWICE
)
8853 /* fork() doesn't really return twice, but the effect of
8854 wrapping it in __gcov_fork() which calls __gcov_dump() and
8855 __gcov_reset() and clears the counters before forking has the same
8856 effect as returning twice. Force a fake edge. */
8857 && !fndecl_built_in_p (fndecl
, BUILT_IN_FORK
))
8860 if (is_gimple_call (t
))
8866 if (call_flags
& (ECF_PURE
| ECF_CONST
)
8867 && !(call_flags
& ECF_LOOPING_CONST_OR_PURE
))
8870 /* Function call may do longjmp, terminate program or do other things.
8871 Special case noreturn that have non-abnormal edges out as in this case
8872 the fact is sufficiently represented by lack of edges out of T. */
8873 if (!(call_flags
& ECF_NORETURN
))
8877 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8878 if ((e
->flags
& EDGE_FAKE
) == 0)
8882 if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (t
))
8883 if (gimple_asm_volatile_p (asm_stmt
) || gimple_asm_input_p (asm_stmt
))
8890 /* Add fake edges to the function exit for any non constant and non
8891 noreturn calls (or noreturn calls with EH/abnormal edges),
8892 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8893 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8896 The goal is to expose cases in which entering a basic block does
8897 not imply that all subsequent instructions must be executed. */
8900 gimple_flow_call_edges_add (sbitmap blocks
)
8903 int blocks_split
= 0;
8904 int last_bb
= last_basic_block_for_fn (cfun
);
8905 bool check_last_block
= false;
8907 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
8911 check_last_block
= true;
8913 check_last_block
= bitmap_bit_p (blocks
,
8914 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
8916 /* In the last basic block, before epilogue generation, there will be
8917 a fallthru edge to EXIT. Special care is required if the last insn
8918 of the last basic block is a call because make_edge folds duplicate
8919 edges, which would result in the fallthru edge also being marked
8920 fake, which would result in the fallthru edge being removed by
8921 remove_fake_edges, which would result in an invalid CFG.
8923 Moreover, we can't elide the outgoing fake edge, since the block
8924 profiler needs to take this into account in order to solve the minimal
8925 spanning tree in the case that the call doesn't return.
8927 Handle this by adding a dummy instruction in a new last basic block. */
8928 if (check_last_block
)
8930 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
8931 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
8934 if (!gsi_end_p (gsi
))
8937 if (t
&& stmt_can_terminate_bb_p (t
))
8941 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8944 gsi_insert_on_edge (e
, gimple_build_nop ());
8945 gsi_commit_edge_inserts ();
8950 /* Now add fake edges to the function exit for any non constant
8951 calls since there is no way that we can determine if they will
8953 for (i
= 0; i
< last_bb
; i
++)
8955 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8956 gimple_stmt_iterator gsi
;
8957 gimple
*stmt
, *last_stmt
;
8962 if (blocks
&& !bitmap_bit_p (blocks
, i
))
8965 gsi
= gsi_last_nondebug_bb (bb
);
8966 if (!gsi_end_p (gsi
))
8968 last_stmt
= gsi_stmt (gsi
);
8971 stmt
= gsi_stmt (gsi
);
8972 if (stmt_can_terminate_bb_p (stmt
))
8976 /* The handling above of the final block before the
8977 epilogue should be enough to verify that there is
8978 no edge to the exit block in CFG already.
8979 Calling make_edge in such case would cause us to
8980 mark that edge as fake and remove it later. */
8981 if (flag_checking
&& stmt
== last_stmt
)
8983 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8984 gcc_assert (e
== NULL
);
8987 /* Note that the following may create a new basic block
8988 and renumber the existing basic blocks. */
8989 if (stmt
!= last_stmt
)
8991 e
= split_block (bb
, stmt
);
8995 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
8996 e
->probability
= profile_probability::guessed_never ();
9000 while (!gsi_end_p (gsi
));
9005 checking_verify_flow_info ();
9007 return blocks_split
;
9010 /* Removes edge E and all the blocks dominated by it, and updates dominance
9011 information. The IL in E->src needs to be updated separately.
9012 If dominance info is not available, only the edge E is removed.*/
9015 remove_edge_and_dominated_blocks (edge e
)
9017 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
9020 bool none_removed
= false;
9022 basic_block bb
, dbb
;
9025 /* If we are removing a path inside a non-root loop that may change
9026 loop ownership of blocks or remove loops. Mark loops for fixup. */
9027 class loop
*src_loop
= e
->src
->loop_father
;
9029 && loop_outer (src_loop
) != NULL
9030 && src_loop
== e
->dest
->loop_father
)
9032 loops_state_set (LOOPS_NEED_FIXUP
);
9033 /* If we are removing a backedge clear the number of iterations
9035 class loop
*dest_loop
= e
->dest
->loop_father
;
9036 if (e
->dest
== src_loop
->header
9037 || (e
->dest
== dest_loop
->header
9038 && flow_loop_nested_p (dest_loop
, src_loop
)))
9040 free_numbers_of_iterations_estimates (dest_loop
);
9041 /* If we removed the last backedge mark the loop for removal. */
9042 FOR_EACH_EDGE (f
, ei
, dest_loop
->header
->preds
)
9044 && (f
->src
->loop_father
== dest_loop
9045 || flow_loop_nested_p (dest_loop
, f
->src
->loop_father
)))
9048 mark_loop_for_removal (dest_loop
);
9052 if (!dom_info_available_p (CDI_DOMINATORS
))
9058 /* No updating is needed for edges to exit. */
9059 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
9061 if (cfgcleanup_altered_bbs
)
9062 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
9067 /* First, we find the basic blocks to remove. If E->dest has a predecessor
9068 that is not dominated by E->dest, then this set is empty. Otherwise,
9069 all the basic blocks dominated by E->dest are removed.
9071 Also, to DF_IDOM we store the immediate dominators of the blocks in
9072 the dominance frontier of E (i.e., of the successors of the
9073 removed blocks, if there are any, and of E->dest otherwise). */
9074 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
9079 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
9081 none_removed
= true;
9086 auto_bitmap df
, df_idom
;
9087 auto_vec
<basic_block
> bbs_to_remove
;
9089 bitmap_set_bit (df_idom
,
9090 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
9093 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
9094 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
9096 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
9098 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
9099 bitmap_set_bit (df
, f
->dest
->index
);
9102 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
9103 bitmap_clear_bit (df
, bb
->index
);
9105 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
9107 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
9108 bitmap_set_bit (df_idom
,
9109 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
9113 if (cfgcleanup_altered_bbs
)
9115 /* Record the set of the altered basic blocks. */
9116 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
9117 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
9120 /* Remove E and the cancelled blocks. */
9125 /* Walk backwards so as to get a chance to substitute all
9126 released DEFs into debug stmts. See
9127 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
9129 for (i
= bbs_to_remove
.length (); i
-- > 0; )
9130 delete_basic_block (bbs_to_remove
[i
]);
9133 /* Update the dominance information. The immediate dominator may change only
9134 for blocks whose immediate dominator belongs to DF_IDOM:
9136 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
9137 removal. Let Z the arbitrary block such that idom(Z) = Y and
9138 Z dominates X after the removal. Before removal, there exists a path P
9139 from Y to X that avoids Z. Let F be the last edge on P that is
9140 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
9141 dominates W, and because of P, Z does not dominate W), and W belongs to
9142 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
9143 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
9145 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
9146 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
9148 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
9149 bbs_to_fix_dom
.safe_push (dbb
);
9152 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
9154 bbs_to_fix_dom
.release ();
9157 /* Purge dead EH edges from basic block BB. */
9160 gimple_purge_dead_eh_edges (basic_block bb
)
9162 bool changed
= false;
9165 gimple
*stmt
= *gsi_last_bb (bb
);
9167 if (stmt
&& stmt_can_throw_internal (cfun
, stmt
))
9170 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
9172 if (e
->flags
& EDGE_EH
)
9174 remove_edge_and_dominated_blocks (e
);
9184 /* Purge dead EH edges from basic block listed in BLOCKS. */
9187 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
9189 bool changed
= false;
9193 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
9195 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
9197 /* Earlier gimple_purge_dead_eh_edges could have removed
9198 this basic block already. */
9199 gcc_assert (bb
|| changed
);
9201 changed
|= gimple_purge_dead_eh_edges (bb
);
9207 /* Purge dead abnormal call edges from basic block BB. */
9210 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
9212 bool changed
= false;
9215 gimple
*stmt
= *gsi_last_bb (bb
);
9217 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
9220 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
9222 if (e
->flags
& EDGE_ABNORMAL
)
9224 if (e
->flags
& EDGE_FALLTHRU
)
9225 e
->flags
&= ~EDGE_ABNORMAL
;
9227 remove_edge_and_dominated_blocks (e
);
9237 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
9240 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
9242 bool changed
= false;
9246 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
9248 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
9250 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
9251 this basic block already. */
9252 gcc_assert (bb
|| changed
);
9254 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
9260 /* This function is called whenever a new edge is created or
9264 gimple_execute_on_growing_pred (edge e
)
9266 basic_block bb
= e
->dest
;
9268 if (!gimple_seq_empty_p (phi_nodes (bb
)))
9269 reserve_phi_args_for_new_edge (bb
);
9272 /* This function is called immediately before edge E is removed from
9273 the edge vector E->dest->preds. */
9276 gimple_execute_on_shrinking_pred (edge e
)
9278 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
9279 remove_phi_args (e
);
9282 /*---------------------------------------------------------------------------
9283 Helper functions for Loop versioning
9284 ---------------------------------------------------------------------------*/
9286 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
9287 of 'first'. Both of them are dominated by 'new_head' basic block. When
9288 'new_head' was created by 'second's incoming edge it received phi arguments
9289 on the edge by split_edge(). Later, additional edge 'e' was created to
9290 connect 'new_head' and 'first'. Now this routine adds phi args on this
9291 additional edge 'e' that new_head to second edge received as part of edge
9295 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
9296 basic_block new_head
, edge e
)
9299 gphi_iterator psi1
, psi2
;
9301 edge e2
= find_edge (new_head
, second
);
9303 /* Because NEW_HEAD has been created by splitting SECOND's incoming
9304 edge, we should always have an edge from NEW_HEAD to SECOND. */
9305 gcc_assert (e2
!= NULL
);
9307 /* Browse all 'second' basic block phi nodes and add phi args to
9308 edge 'e' for 'first' head. PHI args are always in correct order. */
9310 for (psi2
= gsi_start_phis (second
),
9311 psi1
= gsi_start_phis (first
);
9312 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
9313 gsi_next (&psi2
), gsi_next (&psi1
))
9317 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
9318 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
9323 /* Adds a if else statement to COND_BB with condition COND_EXPR.
9324 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
9325 the destination of the ELSE part. */
9328 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
9329 basic_block second_head ATTRIBUTE_UNUSED
,
9330 basic_block cond_bb
, void *cond_e
)
9332 gimple_stmt_iterator gsi
;
9333 gimple
*new_cond_expr
;
9334 tree cond_expr
= (tree
) cond_e
;
9337 /* Build new conditional expr */
9338 gsi
= gsi_last_bb (cond_bb
);
9340 cond_expr
= force_gimple_operand_gsi_1 (&gsi
, cond_expr
,
9341 is_gimple_condexpr_for_cond
,
9343 GSI_CONTINUE_LINKING
);
9344 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
9345 NULL_TREE
, NULL_TREE
);
9347 /* Add new cond in cond_bb. */
9348 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
9350 /* Adjust edges appropriately to connect new head with first head
9351 as well as second head. */
9352 e0
= single_succ_edge (cond_bb
);
9353 e0
->flags
&= ~EDGE_FALLTHRU
;
9354 e0
->flags
|= EDGE_FALSE_VALUE
;
9358 /* Do book-keeping of basic block BB for the profile consistency checker.
9359 Store the counting in RECORD. */
9361 gimple_account_profile_record (basic_block bb
,
9362 struct profile_record
*record
)
9364 gimple_stmt_iterator i
;
9365 for (i
= gsi_start_nondebug_after_labels_bb (bb
); !gsi_end_p (i
);
9366 gsi_next_nondebug (&i
))
9369 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
9372 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
.ipa ().initialized_p ()
9373 && ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
.ipa ().nonzero_p ()
9374 && bb
->count
.ipa ().initialized_p ())
9376 += estimate_num_insns (gsi_stmt (i
),
9378 * bb
->count
.ipa ().to_gcov_type ();
9380 else if (bb
->count
.initialized_p ()
9381 && ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
.initialized_p ())
9383 += estimate_num_insns
9386 * bb
->count
.to_sreal_scale
9387 (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
).to_double ();
9390 += estimate_num_insns (gsi_stmt (i
), &eni_time_weights
);
9394 struct cfg_hooks gimple_cfg_hooks
= {
9396 gimple_verify_flow_info
,
9397 gimple_dump_bb
, /* dump_bb */
9398 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
9399 create_bb
, /* create_basic_block */
9400 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
9401 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
9402 gimple_can_remove_branch_p
, /* can_remove_branch_p */
9403 remove_bb
, /* delete_basic_block */
9404 gimple_split_block
, /* split_block */
9405 gimple_move_block_after
, /* move_block_after */
9406 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
9407 gimple_merge_blocks
, /* merge_blocks */
9408 gimple_predict_edge
, /* predict_edge */
9409 gimple_predicted_by_p
, /* predicted_by_p */
9410 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
9411 gimple_duplicate_bb
, /* duplicate_block */
9412 gimple_split_edge
, /* split_edge */
9413 gimple_make_forwarder_block
, /* make_forward_block */
9414 NULL
, /* tidy_fallthru_edge */
9415 NULL
, /* force_nonfallthru */
9416 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
9417 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
9418 gimple_flow_call_edges_add
, /* flow_call_edges_add */
9419 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
9420 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
9421 gimple_duplicate_loop_body_to_header_edge
, /* duplicate loop for trees */
9422 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
9423 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
9424 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
9425 flush_pending_stmts
, /* flush_pending_stmts */
9426 gimple_empty_block_p
, /* block_empty_p */
9427 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
9428 gimple_account_profile_record
,
9432 /* Split all critical edges. Split some extra (not necessarily critical) edges
9433 if FOR_EDGE_INSERTION_P is true. */
9436 split_critical_edges (bool for_edge_insertion_p
/* = false */)
9442 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9443 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9444 mappings around the calls to split_edge. */
9445 start_recording_case_labels ();
9446 FOR_ALL_BB_FN (bb
, cfun
)
9448 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
9450 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
9452 /* PRE inserts statements to edges and expects that
9453 since split_critical_edges was done beforehand, committing edge
9454 insertions will not split more edges. In addition to critical
9455 edges we must split edges that have multiple successors and
9456 end by control flow statements, such as RESX.
9457 Go ahead and split them too. This matches the logic in
9458 gimple_find_edge_insert_loc. */
9459 else if (for_edge_insertion_p
9460 && (!single_pred_p (e
->dest
)
9461 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
9462 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
9463 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
9464 && !(e
->flags
& EDGE_ABNORMAL
))
9466 gimple_stmt_iterator gsi
;
9468 gsi
= gsi_last_bb (e
->src
);
9469 if (!gsi_end_p (gsi
)
9470 && stmt_ends_bb_p (gsi_stmt (gsi
))
9471 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
9472 && !gimple_call_builtin_p (gsi_stmt (gsi
),
9478 end_recording_case_labels ();
9484 const pass_data pass_data_split_crit_edges
=
9486 GIMPLE_PASS
, /* type */
9487 "crited", /* name */
9488 OPTGROUP_NONE
, /* optinfo_flags */
9489 TV_TREE_SPLIT_EDGES
, /* tv_id */
9490 PROP_cfg
, /* properties_required */
9491 PROP_no_crit_edges
, /* properties_provided */
9492 0, /* properties_destroyed */
9493 0, /* todo_flags_start */
9494 0, /* todo_flags_finish */
9497 class pass_split_crit_edges
: public gimple_opt_pass
9500 pass_split_crit_edges (gcc::context
*ctxt
)
9501 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
9504 /* opt_pass methods: */
9505 unsigned int execute (function
*) final override
9507 return split_critical_edges ();
9510 opt_pass
* clone () final override
9512 return new pass_split_crit_edges (m_ctxt
);
9514 }; // class pass_split_crit_edges
9519 make_pass_split_crit_edges (gcc::context
*ctxt
)
9521 return new pass_split_crit_edges (ctxt
);
9525 /* Insert COND expression which is GIMPLE_COND after STMT
9526 in basic block BB with appropriate basic block split
9527 and creation of a new conditionally executed basic block.
9528 Update profile so the new bb is visited with probability PROB.
9529 Return created basic block. */
9531 insert_cond_bb (basic_block bb
, gimple
*stmt
, gimple
*cond
,
9532 profile_probability prob
)
9534 edge fall
= split_block (bb
, stmt
);
9535 gimple_stmt_iterator iter
= gsi_last_bb (bb
);
9538 /* Insert cond statement. */
9539 gcc_assert (gimple_code (cond
) == GIMPLE_COND
);
9540 if (gsi_end_p (iter
))
9541 gsi_insert_before (&iter
, cond
, GSI_CONTINUE_LINKING
);
9543 gsi_insert_after (&iter
, cond
, GSI_CONTINUE_LINKING
);
9545 /* Create conditionally executed block. */
9546 new_bb
= create_empty_bb (bb
);
9547 edge e
= make_edge (bb
, new_bb
, EDGE_TRUE_VALUE
);
9548 e
->probability
= prob
;
9549 new_bb
->count
= e
->count ();
9550 make_single_succ_edge (new_bb
, fall
->dest
, EDGE_FALLTHRU
);
9552 /* Fix edge for split bb. */
9553 fall
->flags
= EDGE_FALSE_VALUE
;
9554 fall
->probability
-= e
->probability
;
9556 /* Update dominance info. */
9557 if (dom_info_available_p (CDI_DOMINATORS
))
9559 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, bb
);
9560 set_immediate_dominator (CDI_DOMINATORS
, fall
->dest
, bb
);
9563 /* Update loop info. */
9565 add_bb_to_loop (new_bb
, bb
->loop_father
);
9572 /* Given a basic block B which ends with a conditional and has
9573 precisely two successors, determine which of the edges is taken if
9574 the conditional is true and which is taken if the conditional is
9575 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9578 extract_true_false_edges_from_block (basic_block b
,
9582 edge e
= EDGE_SUCC (b
, 0);
9584 if (e
->flags
& EDGE_TRUE_VALUE
)
9587 *false_edge
= EDGE_SUCC (b
, 1);
9592 *true_edge
= EDGE_SUCC (b
, 1);
9597 /* From a controlling predicate in the immediate dominator DOM of
9598 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9599 predicate evaluates to true and false and store them to
9600 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9601 they are non-NULL. Returns true if the edges can be determined,
9602 else return false. */
9605 extract_true_false_controlled_edges (basic_block dom
, basic_block phiblock
,
9606 edge
*true_controlled_edge
,
9607 edge
*false_controlled_edge
)
9609 basic_block bb
= phiblock
;
9610 edge true_edge
, false_edge
, tem
;
9611 edge e0
= NULL
, e1
= NULL
;
9613 /* We have to verify that one edge into the PHI node is dominated
9614 by the true edge of the predicate block and the other edge
9615 dominated by the false edge. This ensures that the PHI argument
9616 we are going to take is completely determined by the path we
9617 take from the predicate block.
9618 We can only use BB dominance checks below if the destination of
9619 the true/false edges are dominated by their edge, thus only
9620 have a single predecessor. */
9621 extract_true_false_edges_from_block (dom
, &true_edge
, &false_edge
);
9622 tem
= EDGE_PRED (bb
, 0);
9623 if (tem
== true_edge
9624 || (single_pred_p (true_edge
->dest
)
9625 && (tem
->src
== true_edge
->dest
9626 || dominated_by_p (CDI_DOMINATORS
,
9627 tem
->src
, true_edge
->dest
))))
9629 else if (tem
== false_edge
9630 || (single_pred_p (false_edge
->dest
)
9631 && (tem
->src
== false_edge
->dest
9632 || dominated_by_p (CDI_DOMINATORS
,
9633 tem
->src
, false_edge
->dest
))))
9637 tem
= EDGE_PRED (bb
, 1);
9638 if (tem
== true_edge
9639 || (single_pred_p (true_edge
->dest
)
9640 && (tem
->src
== true_edge
->dest
9641 || dominated_by_p (CDI_DOMINATORS
,
9642 tem
->src
, true_edge
->dest
))))
9644 else if (tem
== false_edge
9645 || (single_pred_p (false_edge
->dest
)
9646 && (tem
->src
== false_edge
->dest
9647 || dominated_by_p (CDI_DOMINATORS
,
9648 tem
->src
, false_edge
->dest
))))
9655 if (true_controlled_edge
)
9656 *true_controlled_edge
= e0
;
9657 if (false_controlled_edge
)
9658 *false_controlled_edge
= e1
;
9663 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9664 range [low, high]. Place associated stmts before *GSI. */
9667 generate_range_test (basic_block bb
, tree index
, tree low
, tree high
,
9668 tree
*lhs
, tree
*rhs
)
9670 tree type
= TREE_TYPE (index
);
9671 tree utype
= range_check_type (type
);
9673 low
= fold_convert (utype
, low
);
9674 high
= fold_convert (utype
, high
);
9676 gimple_seq seq
= NULL
;
9677 index
= gimple_convert (&seq
, utype
, index
);
9678 *lhs
= gimple_build (&seq
, MINUS_EXPR
, utype
, index
, low
);
9679 *rhs
= const_binop (MINUS_EXPR
, utype
, high
, low
);
9681 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
9682 gsi_insert_seq_before (&gsi
, seq
, GSI_SAME_STMT
);
9685 /* Return the basic block that belongs to label numbered INDEX
9686 of a switch statement. */
9689 gimple_switch_label_bb (function
*ifun
, gswitch
*gs
, unsigned index
)
9691 return label_to_block (ifun
, CASE_LABEL (gimple_switch_label (gs
, index
)));
9694 /* Return the default basic block of a switch statement. */
9697 gimple_switch_default_bb (function
*ifun
, gswitch
*gs
)
9699 return gimple_switch_label_bb (ifun
, gs
, 0);
9702 /* Return the edge that belongs to label numbered INDEX
9703 of a switch statement. */
9706 gimple_switch_edge (function
*ifun
, gswitch
*gs
, unsigned index
)
9708 return find_edge (gimple_bb (gs
), gimple_switch_label_bb (ifun
, gs
, index
));
9711 /* Return the default edge of a switch statement. */
9714 gimple_switch_default_edge (function
*ifun
, gswitch
*gs
)
9716 return gimple_switch_edge (ifun
, gs
, 0);
9719 /* Return true if the only executable statement in BB is a GIMPLE_COND. */
9722 cond_only_block_p (basic_block bb
)
9724 /* BB must have no executable statements. */
9725 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
9728 while (!gsi_end_p (gsi
))
9730 gimple
*stmt
= gsi_stmt (gsi
);
9731 if (is_gimple_debug (stmt
))
9733 else if (gimple_code (stmt
) == GIMPLE_NOP
9734 || gimple_code (stmt
) == GIMPLE_PREDICT
9735 || gimple_code (stmt
) == GIMPLE_COND
)
9745 /* Emit return warnings. */
9749 const pass_data pass_data_warn_function_return
=
9751 GIMPLE_PASS
, /* type */
9752 "*warn_function_return", /* name */
9753 OPTGROUP_NONE
, /* optinfo_flags */
9754 TV_NONE
, /* tv_id */
9755 PROP_cfg
, /* properties_required */
9756 0, /* properties_provided */
9757 0, /* properties_destroyed */
9758 0, /* todo_flags_start */
9759 0, /* todo_flags_finish */
9762 class pass_warn_function_return
: public gimple_opt_pass
9765 pass_warn_function_return (gcc::context
*ctxt
)
9766 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
9769 /* opt_pass methods: */
9770 unsigned int execute (function
*) final override
;
9772 }; // class pass_warn_function_return
9775 pass_warn_function_return::execute (function
*fun
)
9777 location_t location
;
9782 if (!targetm
.warn_func_return (fun
->decl
))
9785 /* If we have a path to EXIT, then we do return. */
9786 if (TREE_THIS_VOLATILE (fun
->decl
)
9787 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0)
9789 location
= UNKNOWN_LOCATION
;
9790 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
);
9791 (e
= ei_safe_edge (ei
)); )
9793 last
= *gsi_last_bb (e
->src
);
9794 if ((gimple_code (last
) == GIMPLE_RETURN
9795 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
9796 && location
== UNKNOWN_LOCATION
9797 && ((location
= LOCATION_LOCUS (gimple_location (last
)))
9798 != UNKNOWN_LOCATION
)
9801 /* When optimizing, replace return stmts in noreturn functions
9802 with __builtin_unreachable () call. */
9803 if (optimize
&& gimple_code (last
) == GIMPLE_RETURN
)
9805 location_t loc
= gimple_location (last
);
9806 gimple
*new_stmt
= gimple_build_builtin_unreachable (loc
);
9807 gimple_stmt_iterator gsi
= gsi_for_stmt (last
);
9808 gsi_replace (&gsi
, new_stmt
, true);
9814 if (location
== UNKNOWN_LOCATION
)
9815 location
= cfun
->function_end_locus
;
9816 warning_at (location
, 0, "%<noreturn%> function does return");
9819 /* If we see "return;" in some basic block, then we do reach the end
9820 without returning a value. */
9821 else if (warn_return_type
> 0
9822 && !warning_suppressed_p (fun
->decl
, OPT_Wreturn_type
)
9823 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun
->decl
))))
9825 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
9827 greturn
*return_stmt
= dyn_cast
<greturn
*> (*gsi_last_bb (e
->src
));
9829 && gimple_return_retval (return_stmt
) == NULL
9830 && !warning_suppressed_p (return_stmt
, OPT_Wreturn_type
))
9832 location
= gimple_location (return_stmt
);
9833 if (LOCATION_LOCUS (location
) == UNKNOWN_LOCATION
)
9834 location
= fun
->function_end_locus
;
9835 if (warning_at (location
, OPT_Wreturn_type
,
9836 "control reaches end of non-void function"))
9837 suppress_warning (fun
->decl
, OPT_Wreturn_type
);
9841 /* The C++ FE turns fallthrough from the end of non-void function
9842 into __builtin_unreachable () call with BUILTINS_LOCATION.
9843 Recognize those as well as calls from ubsan_instrument_return. */
9845 if (!warning_suppressed_p (fun
->decl
, OPT_Wreturn_type
))
9846 FOR_EACH_BB_FN (bb
, fun
)
9847 if (EDGE_COUNT (bb
->succs
) == 0)
9849 gimple
*last
= *gsi_last_bb (bb
);
9850 const enum built_in_function ubsan_missing_ret
9851 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN
;
9853 && ((LOCATION_LOCUS (gimple_location (last
))
9854 == BUILTINS_LOCATION
9855 && (gimple_call_builtin_p (last
, BUILT_IN_UNREACHABLE
)
9856 || gimple_call_builtin_p (last
,
9857 BUILT_IN_UNREACHABLE_TRAP
)
9858 || gimple_call_builtin_p (last
, BUILT_IN_TRAP
)))
9859 || gimple_call_builtin_p (last
, ubsan_missing_ret
)))
9861 gimple_stmt_iterator gsi
= gsi_for_stmt (last
);
9862 gsi_prev_nondebug (&gsi
);
9863 gimple
*prev
= gsi_stmt (gsi
);
9865 location
= UNKNOWN_LOCATION
;
9867 location
= gimple_location (prev
);
9868 if (LOCATION_LOCUS (location
) == UNKNOWN_LOCATION
)
9869 location
= fun
->function_end_locus
;
9870 if (warning_at (location
, OPT_Wreturn_type
,
9871 "control reaches end of non-void function"))
9872 suppress_warning (fun
->decl
, OPT_Wreturn_type
);
9883 make_pass_warn_function_return (gcc::context
*ctxt
)
9885 return new pass_warn_function_return (ctxt
);
9888 /* Walk a gimplified function and warn for functions whose return value is
9889 ignored and attribute((warn_unused_result)) is set. This is done before
9890 inlining, so we don't have to worry about that. */
9893 do_warn_unused_result (gimple_seq seq
)
9896 gimple_stmt_iterator i
;
9898 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
9900 gimple
*g
= gsi_stmt (i
);
9902 switch (gimple_code (g
))
9905 do_warn_unused_result (gimple_bind_body (as_a
<gbind
*>(g
)));
9908 do_warn_unused_result (gimple_try_eval (g
));
9909 do_warn_unused_result (gimple_try_cleanup (g
));
9912 do_warn_unused_result (gimple_catch_handler (
9913 as_a
<gcatch
*> (g
)));
9915 case GIMPLE_EH_FILTER
:
9916 do_warn_unused_result (gimple_eh_filter_failure (g
));
9920 if (gimple_call_lhs (g
))
9922 if (gimple_call_internal_p (g
))
9925 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9926 LHS. All calls whose value is ignored should be
9927 represented like this. Look for the attribute. */
9928 fdecl
= gimple_call_fndecl (g
);
9929 ftype
= gimple_call_fntype (g
);
9931 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
9933 location_t loc
= gimple_location (g
);
9936 warning_at (loc
, OPT_Wunused_result
,
9937 "ignoring return value of %qD "
9938 "declared with attribute %<warn_unused_result%>",
9941 warning_at (loc
, OPT_Wunused_result
,
9942 "ignoring return value of function "
9943 "declared with attribute %<warn_unused_result%>");
9948 /* Not a container, not a call, or a call whose value is used. */
9956 const pass_data pass_data_warn_unused_result
=
9958 GIMPLE_PASS
, /* type */
9959 "*warn_unused_result", /* name */
9960 OPTGROUP_NONE
, /* optinfo_flags */
9961 TV_NONE
, /* tv_id */
9962 PROP_gimple_any
, /* properties_required */
9963 0, /* properties_provided */
9964 0, /* properties_destroyed */
9965 0, /* todo_flags_start */
9966 0, /* todo_flags_finish */
9969 class pass_warn_unused_result
: public gimple_opt_pass
9972 pass_warn_unused_result (gcc::context
*ctxt
)
9973 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
9976 /* opt_pass methods: */
9977 bool gate (function
*) final override
{ return flag_warn_unused_result
; }
9978 unsigned int execute (function
*) final override
9980 do_warn_unused_result (gimple_body (current_function_decl
));
9984 }; // class pass_warn_unused_result
9989 make_pass_warn_unused_result (gcc::context
*ctxt
)
9991 return new pass_warn_unused_result (ctxt
);
9994 /* Maybe Remove stores to variables we marked write-only.
9995 Return true if a store was removed. */
9997 maybe_remove_writeonly_store (gimple_stmt_iterator
&gsi
, gimple
*stmt
,
9998 bitmap dce_ssa_names
)
10000 /* Keep access when store has side effect, i.e. in case when source
10002 if (!gimple_store_p (stmt
)
10003 || gimple_has_side_effects (stmt
)
10007 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
10010 || (!TREE_STATIC (lhs
) && !DECL_EXTERNAL (lhs
))
10011 || !varpool_node::get (lhs
)->writeonly
)
10014 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
10016 fprintf (dump_file
, "Removing statement, writes"
10017 " to write only var:\n");
10018 print_gimple_stmt (dump_file
, stmt
, 0,
10019 TDF_VOPS
|TDF_MEMSYMS
);
10022 /* Mark ssa name defining to be checked for simple dce. */
10023 if (gimple_assign_single_p (stmt
))
10025 tree rhs
= gimple_assign_rhs1 (stmt
);
10026 if (TREE_CODE (rhs
) == SSA_NAME
10027 && !SSA_NAME_IS_DEFAULT_DEF (rhs
))
10028 bitmap_set_bit (dce_ssa_names
, SSA_NAME_VERSION (rhs
));
10030 unlink_stmt_vdef (stmt
);
10031 gsi_remove (&gsi
, true);
10032 release_defs (stmt
);
10036 /* IPA passes, compilation of earlier functions or inlining
10037 might have changed some properties, such as marked functions nothrow,
10038 pure, const or noreturn.
10039 Remove redundant edges and basic blocks, and create new ones if necessary. */
10042 execute_fixup_cfg (void)
10045 gimple_stmt_iterator gsi
;
10047 cgraph_node
*node
= cgraph_node::get (current_function_decl
);
10048 /* Same scaling is also done by ipa_merge_profiles. */
10049 profile_count num
= node
->count
;
10050 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
10051 bool scale
= num
.initialized_p () && !(num
== den
);
10052 auto_bitmap dce_ssa_names
;
10056 profile_count::adjust_for_ipa_scaling (&num
, &den
);
10057 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= node
->count
;
10058 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
10059 = EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
.apply_scale (num
, den
);
10062 FOR_EACH_BB_FN (bb
, cfun
)
10065 bb
->count
= bb
->count
.apply_scale (num
, den
);
10066 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
10068 gimple
*stmt
= gsi_stmt (gsi
);
10069 tree decl
= is_gimple_call (stmt
)
10070 ? gimple_call_fndecl (stmt
)
10074 int flags
= gimple_call_flags (stmt
);
10075 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
10077 if (gimple_in_ssa_p (cfun
))
10079 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
10080 update_stmt (stmt
);
10083 if (flags
& ECF_NORETURN
10084 && fixup_noreturn_call (stmt
))
10085 todo
|= TODO_cleanup_cfg
;
10088 /* Remove stores to variables we marked write-only. */
10089 if (maybe_remove_writeonly_store (gsi
, stmt
, dce_ssa_names
))
10091 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
10095 /* For calls we can simply remove LHS when it is known
10096 to be write-only. */
10097 if (is_gimple_call (stmt
)
10098 && gimple_get_lhs (stmt
))
10100 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
10103 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
10104 && varpool_node::get (lhs
)->writeonly
)
10106 gimple_call_set_lhs (stmt
, NULL
);
10107 update_stmt (stmt
);
10108 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
10114 if (gimple
*last
= *gsi_last_bb (bb
))
10116 if (maybe_clean_eh_stmt (last
)
10117 && gimple_purge_dead_eh_edges (bb
))
10118 todo
|= TODO_cleanup_cfg
;
10119 if (gimple_purge_dead_abnormal_call_edges (bb
))
10120 todo
|= TODO_cleanup_cfg
;
10123 /* If we have a basic block with no successors that does not
10124 end with a control statement or a noreturn call end it with
10125 a call to __builtin_unreachable. This situation can occur
10126 when inlining a noreturn call that does in fact return. */
10127 if (EDGE_COUNT (bb
->succs
) == 0)
10129 gimple
*stmt
= last_nondebug_stmt (bb
);
10131 || (!is_ctrl_stmt (stmt
)
10132 && (!is_gimple_call (stmt
)
10133 || !gimple_call_noreturn_p (stmt
))))
10135 if (stmt
&& is_gimple_call (stmt
))
10136 gimple_call_set_ctrl_altering (stmt
, false);
10137 stmt
= gimple_build_builtin_unreachable (UNKNOWN_LOCATION
);
10138 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
10139 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
10140 if (!cfun
->after_inlining
)
10141 if (tree fndecl
= gimple_call_fndecl (stmt
))
10143 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
10144 node
->create_edge (cgraph_node::get_create (fndecl
),
10145 call_stmt
, bb
->count
);
10152 update_max_bb_count ();
10153 compute_function_frequency ();
10157 && (todo
& TODO_cleanup_cfg
))
10158 loops_state_set (LOOPS_NEED_FIXUP
);
10160 simple_dce_from_worklist (dce_ssa_names
);
10167 const pass_data pass_data_fixup_cfg
=
10169 GIMPLE_PASS
, /* type */
10170 "fixup_cfg", /* name */
10171 OPTGROUP_NONE
, /* optinfo_flags */
10172 TV_NONE
, /* tv_id */
10173 PROP_cfg
, /* properties_required */
10174 0, /* properties_provided */
10175 0, /* properties_destroyed */
10176 0, /* todo_flags_start */
10177 0, /* todo_flags_finish */
10180 class pass_fixup_cfg
: public gimple_opt_pass
10183 pass_fixup_cfg (gcc::context
*ctxt
)
10184 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
10187 /* opt_pass methods: */
10188 opt_pass
* clone () final override
{ return new pass_fixup_cfg (m_ctxt
); }
10189 unsigned int execute (function
*) final override
10191 return execute_fixup_cfg ();
10194 }; // class pass_fixup_cfg
10196 } // anon namespace
10199 make_pass_fixup_cfg (gcc::context
*ctxt
)
10201 return new pass_fixup_cfg (ctxt
);
10204 /* Garbage collection support for edge_def. */
10206 extern void gt_ggc_mx (tree
&);
10207 extern void gt_ggc_mx (gimple
*&);
10208 extern void gt_ggc_mx (rtx
&);
10209 extern void gt_ggc_mx (basic_block
&);
10212 gt_ggc_mx (rtx_insn
*& x
)
10215 gt_ggc_mx_rtx_def ((void *) x
);
10219 gt_ggc_mx (edge_def
*e
)
10221 tree block
= LOCATION_BLOCK (e
->goto_locus
);
10222 gt_ggc_mx (e
->src
);
10223 gt_ggc_mx (e
->dest
);
10224 if (current_ir_type () == IR_GIMPLE
)
10225 gt_ggc_mx (e
->insns
.g
);
10227 gt_ggc_mx (e
->insns
.r
);
10231 /* PCH support for edge_def. */
10233 extern void gt_pch_nx (tree
&);
10234 extern void gt_pch_nx (gimple
*&);
10235 extern void gt_pch_nx (rtx
&);
10236 extern void gt_pch_nx (basic_block
&);
10239 gt_pch_nx (rtx_insn
*& x
)
10242 gt_pch_nx_rtx_def ((void *) x
);
10246 gt_pch_nx (edge_def
*e
)
10248 tree block
= LOCATION_BLOCK (e
->goto_locus
);
10249 gt_pch_nx (e
->src
);
10250 gt_pch_nx (e
->dest
);
10251 if (current_ir_type () == IR_GIMPLE
)
10252 gt_pch_nx (e
->insns
.g
);
10254 gt_pch_nx (e
->insns
.r
);
10259 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
10261 tree block
= LOCATION_BLOCK (e
->goto_locus
);
10262 op (&(e
->src
), NULL
, cookie
);
10263 op (&(e
->dest
), NULL
, cookie
);
10264 if (current_ir_type () == IR_GIMPLE
)
10265 op (&(e
->insns
.g
), NULL
, cookie
);
10267 op (&(e
->insns
.r
), NULL
, cookie
);
10268 op (&(block
), &(block
), cookie
);
10273 namespace selftest
{
10275 /* Helper function for CFG selftests: create a dummy function decl
10276 and push it as cfun. */
10279 push_fndecl (const char *name
)
10281 tree fn_type
= build_function_type_array (integer_type_node
, 0, NULL
);
10282 /* FIXME: this uses input_location: */
10283 tree fndecl
= build_fn_decl (name
, fn_type
);
10284 tree retval
= build_decl (UNKNOWN_LOCATION
, RESULT_DECL
,
10285 NULL_TREE
, integer_type_node
);
10286 DECL_RESULT (fndecl
) = retval
;
10287 push_struct_function (fndecl
);
10288 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
10289 ASSERT_TRUE (fun
!= NULL
);
10290 init_empty_tree_cfg_for_function (fun
);
10291 ASSERT_EQ (2, n_basic_blocks_for_fn (fun
));
10292 ASSERT_EQ (0, n_edges_for_fn (fun
));
10296 /* These tests directly create CFGs.
10297 Compare with the static fns within tree-cfg.cc:
10299 - make_blocks: calls create_basic_block (seq, bb);
10302 /* Verify a simple cfg of the form:
10303 ENTRY -> A -> B -> C -> EXIT. */
10306 test_linear_chain ()
10308 gimple_register_cfg_hooks ();
10310 tree fndecl
= push_fndecl ("cfg_test_linear_chain");
10311 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
10313 /* Create some empty blocks. */
10314 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
10315 basic_block bb_b
= create_empty_bb (bb_a
);
10316 basic_block bb_c
= create_empty_bb (bb_b
);
10318 ASSERT_EQ (5, n_basic_blocks_for_fn (fun
));
10319 ASSERT_EQ (0, n_edges_for_fn (fun
));
10321 /* Create some edges: a simple linear chain of BBs. */
10322 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
10323 make_edge (bb_a
, bb_b
, 0);
10324 make_edge (bb_b
, bb_c
, 0);
10325 make_edge (bb_c
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
10327 /* Verify the edges. */
10328 ASSERT_EQ (4, n_edges_for_fn (fun
));
10329 ASSERT_EQ (NULL
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->preds
);
10330 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
->length ());
10331 ASSERT_EQ (1, bb_a
->preds
->length ());
10332 ASSERT_EQ (1, bb_a
->succs
->length ());
10333 ASSERT_EQ (1, bb_b
->preds
->length ());
10334 ASSERT_EQ (1, bb_b
->succs
->length ());
10335 ASSERT_EQ (1, bb_c
->preds
->length ());
10336 ASSERT_EQ (1, bb_c
->succs
->length ());
10337 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
->length ());
10338 ASSERT_EQ (NULL
, EXIT_BLOCK_PTR_FOR_FN (fun
)->succs
);
10340 /* Verify the dominance information
10341 Each BB in our simple chain should be dominated by the one before
10343 calculate_dominance_info (CDI_DOMINATORS
);
10344 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
10345 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
10346 auto_vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
10347 ASSERT_EQ (1, dom_by_b
.length ());
10348 ASSERT_EQ (bb_c
, dom_by_b
[0]);
10349 free_dominance_info (CDI_DOMINATORS
);
10351 /* Similarly for post-dominance: each BB in our chain is post-dominated
10352 by the one after it. */
10353 calculate_dominance_info (CDI_POST_DOMINATORS
);
10354 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
10355 ASSERT_EQ (bb_c
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
10356 auto_vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
10357 ASSERT_EQ (1, postdom_by_b
.length ());
10358 ASSERT_EQ (bb_a
, postdom_by_b
[0]);
10359 free_dominance_info (CDI_POST_DOMINATORS
);
10364 /* Verify a simple CFG of the form:
10380 gimple_register_cfg_hooks ();
10382 tree fndecl
= push_fndecl ("cfg_test_diamond");
10383 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
10385 /* Create some empty blocks. */
10386 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
10387 basic_block bb_b
= create_empty_bb (bb_a
);
10388 basic_block bb_c
= create_empty_bb (bb_a
);
10389 basic_block bb_d
= create_empty_bb (bb_b
);
10391 ASSERT_EQ (6, n_basic_blocks_for_fn (fun
));
10392 ASSERT_EQ (0, n_edges_for_fn (fun
));
10394 /* Create the edges. */
10395 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
10396 make_edge (bb_a
, bb_b
, EDGE_TRUE_VALUE
);
10397 make_edge (bb_a
, bb_c
, EDGE_FALSE_VALUE
);
10398 make_edge (bb_b
, bb_d
, 0);
10399 make_edge (bb_c
, bb_d
, 0);
10400 make_edge (bb_d
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
10402 /* Verify the edges. */
10403 ASSERT_EQ (6, n_edges_for_fn (fun
));
10404 ASSERT_EQ (1, bb_a
->preds
->length ());
10405 ASSERT_EQ (2, bb_a
->succs
->length ());
10406 ASSERT_EQ (1, bb_b
->preds
->length ());
10407 ASSERT_EQ (1, bb_b
->succs
->length ());
10408 ASSERT_EQ (1, bb_c
->preds
->length ());
10409 ASSERT_EQ (1, bb_c
->succs
->length ());
10410 ASSERT_EQ (2, bb_d
->preds
->length ());
10411 ASSERT_EQ (1, bb_d
->succs
->length ());
10413 /* Verify the dominance information. */
10414 calculate_dominance_info (CDI_DOMINATORS
);
10415 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
10416 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
10417 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_d
));
10418 auto_vec
<basic_block
> dom_by_a
= get_dominated_by (CDI_DOMINATORS
, bb_a
);
10419 ASSERT_EQ (3, dom_by_a
.length ()); /* B, C, D, in some order. */
10420 dom_by_a
.release ();
10421 auto_vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
10422 ASSERT_EQ (0, dom_by_b
.length ());
10423 dom_by_b
.release ();
10424 free_dominance_info (CDI_DOMINATORS
);
10426 /* Similarly for post-dominance. */
10427 calculate_dominance_info (CDI_POST_DOMINATORS
);
10428 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
10429 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
10430 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_c
));
10431 auto_vec
<basic_block
> postdom_by_d
= get_dominated_by (CDI_POST_DOMINATORS
, bb_d
);
10432 ASSERT_EQ (3, postdom_by_d
.length ()); /* A, B, C in some order. */
10433 postdom_by_d
.release ();
10434 auto_vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
10435 ASSERT_EQ (0, postdom_by_b
.length ());
10436 postdom_by_b
.release ();
10437 free_dominance_info (CDI_POST_DOMINATORS
);
10442 /* Verify that we can handle a CFG containing a "complete" aka
10443 fully-connected subgraph (where A B C D below all have edges
10444 pointing to each other node, also to themselves).
10462 test_fully_connected ()
10464 gimple_register_cfg_hooks ();
10466 tree fndecl
= push_fndecl ("cfg_fully_connected");
10467 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
10471 /* Create some empty blocks. */
10472 auto_vec
<basic_block
> subgraph_nodes
;
10473 for (int i
= 0; i
< n
; i
++)
10474 subgraph_nodes
.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
)));
10476 ASSERT_EQ (n
+ 2, n_basic_blocks_for_fn (fun
));
10477 ASSERT_EQ (0, n_edges_for_fn (fun
));
10479 /* Create the edges. */
10480 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), subgraph_nodes
[0], EDGE_FALLTHRU
);
10481 make_edge (subgraph_nodes
[0], EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
10482 for (int i
= 0; i
< n
; i
++)
10483 for (int j
= 0; j
< n
; j
++)
10484 make_edge (subgraph_nodes
[i
], subgraph_nodes
[j
], 0);
10486 /* Verify the edges. */
10487 ASSERT_EQ (2 + (n
* n
), n_edges_for_fn (fun
));
10488 /* The first one is linked to ENTRY/EXIT as well as itself and
10489 everything else. */
10490 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->preds
->length ());
10491 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->succs
->length ());
10492 /* The other ones in the subgraph are linked to everything in
10493 the subgraph (including themselves). */
10494 for (int i
= 1; i
< n
; i
++)
10496 ASSERT_EQ (n
, subgraph_nodes
[i
]->preds
->length ());
10497 ASSERT_EQ (n
, subgraph_nodes
[i
]->succs
->length ());
10500 /* Verify the dominance information. */
10501 calculate_dominance_info (CDI_DOMINATORS
);
10502 /* The initial block in the subgraph should be dominated by ENTRY. */
10503 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun
),
10504 get_immediate_dominator (CDI_DOMINATORS
,
10505 subgraph_nodes
[0]));
10506 /* Every other block in the subgraph should be dominated by the
10508 for (int i
= 1; i
< n
; i
++)
10509 ASSERT_EQ (subgraph_nodes
[0],
10510 get_immediate_dominator (CDI_DOMINATORS
,
10511 subgraph_nodes
[i
]));
10512 free_dominance_info (CDI_DOMINATORS
);
10514 /* Similarly for post-dominance. */
10515 calculate_dominance_info (CDI_POST_DOMINATORS
);
10516 /* The initial block in the subgraph should be postdominated by EXIT. */
10517 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun
),
10518 get_immediate_dominator (CDI_POST_DOMINATORS
,
10519 subgraph_nodes
[0]));
10520 /* Every other block in the subgraph should be postdominated by the
10521 initial block, since that leads to EXIT. */
10522 for (int i
= 1; i
< n
; i
++)
10523 ASSERT_EQ (subgraph_nodes
[0],
10524 get_immediate_dominator (CDI_POST_DOMINATORS
,
10525 subgraph_nodes
[i
]));
10526 free_dominance_info (CDI_POST_DOMINATORS
);
10531 /* Run all of the selftests within this file. */
10534 tree_cfg_cc_tests ()
10536 test_linear_chain ();
10538 test_fully_connected ();
10541 } // namespace selftest
10543 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10546 - switch statement (a block with many out-edges)
10547 - something that jumps to itself
10550 #endif /* CHECKING_P */