1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "fold-const.h"
29 #include "trans-mem.h"
30 #include "stor-layout.h"
31 #include "print-tree.h"
34 #include "hard-reg-set.h"
36 #include "dominance.h"
39 #include "basic-block.h"
41 #include "gimple-pretty-print.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-fold.h"
46 #include "gimple-expr.h"
48 #include "gimple-iterator.h"
49 #include "gimplify-me.h"
50 #include "gimple-walk.h"
51 #include "gimple-ssa.h"
52 #include "plugin-api.h"
56 #include "tree-phinodes.h"
57 #include "ssa-iterators.h"
58 #include "stringpool.h"
59 #include "tree-ssanames.h"
60 #include "tree-ssa-loop-manip.h"
61 #include "tree-ssa-loop-niter.h"
62 #include "tree-into-ssa.h"
64 #include "insn-config.h"
75 #include "tree-dump.h"
76 #include "tree-pass.h"
77 #include "diagnostic-core.h"
80 #include "tree-ssa-propagate.h"
81 #include "value-prof.h"
82 #include "tree-inline.h"
84 #include "tree-ssa-live.h"
86 #include "tree-cfgcleanup.h"
87 #include "wide-int-print.h"
89 /* This file contains functions for building the Control Flow Graph (CFG)
90 for a function tree. */
92 /* Local declarations. */
94 /* Initial capacity for the basic block array. */
95 static const int initial_cfg_capacity
= 20;
97 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
98 which use a particular edge. The CASE_LABEL_EXPRs are chained together
99 via their CASE_CHAIN field, which we clear after we're done with the
100 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
102 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
103 update the case vector in response to edge redirections.
105 Right now this table is set up and torn down at key points in the
106 compilation process. It would be nice if we could make the table
107 more persistent. The key is getting notification of changes to
108 the CFG (particularly edge removal, creation and redirection). */
110 static hash_map
<edge
, tree
> *edge_to_cases
;
112 /* If we record edge_to_cases, this bitmap will hold indexes
113 of basic blocks that end in a GIMPLE_SWITCH which we touched
114 due to edge manipulations. */
116 static bitmap touched_switch_bbs
;
118 /* CFG statistics. */
121 long num_merged_labels
;
124 static struct cfg_stats_d cfg_stats
;
126 /* Hash table to store last discriminator assigned for each locus. */
127 struct locus_discrim_map
133 /* Hashtable helpers. */
135 struct locus_discrim_hasher
: free_ptr_hash
<locus_discrim_map
>
137 static inline hashval_t
hash (const locus_discrim_map
*);
138 static inline bool equal (const locus_discrim_map
*,
139 const locus_discrim_map
*);
142 /* Trivial hash function for a location_t. ITEM is a pointer to
143 a hash table entry that maps a location_t to a discriminator. */
146 locus_discrim_hasher::hash (const locus_discrim_map
*item
)
148 return LOCATION_LINE (item
->locus
);
151 /* Equality function for the locus-to-discriminator map. A and B
152 point to the two hash table entries to compare. */
155 locus_discrim_hasher::equal (const locus_discrim_map
*a
,
156 const locus_discrim_map
*b
)
158 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
161 static hash_table
<locus_discrim_hasher
> *discriminator_per_locus
;
163 /* Basic blocks and flowgraphs. */
164 static void make_blocks (gimple_seq
);
167 static void make_edges (void);
168 static void assign_discriminators (void);
169 static void make_cond_expr_edges (basic_block
);
170 static void make_gimple_switch_edges (gswitch
*, basic_block
);
171 static bool make_goto_expr_edges (basic_block
);
172 static void make_gimple_asm_edges (basic_block
);
173 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
174 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
176 /* Various helpers. */
177 static inline bool stmt_starts_bb_p (gimple
, gimple
);
178 static int gimple_verify_flow_info (void);
179 static void gimple_make_forwarder_block (edge
);
180 static gimple
first_non_label_stmt (basic_block
);
181 static bool verify_gimple_transaction (gtransaction
*);
182 static bool call_can_make_abnormal_goto (gimple
);
184 /* Flowgraph optimization and cleanup. */
185 static void gimple_merge_blocks (basic_block
, basic_block
);
186 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
187 static void remove_bb (basic_block
);
188 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
189 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
190 static edge
find_taken_edge_switch_expr (gswitch
*, basic_block
, tree
);
191 static tree
find_case_label_for_value (gswitch
*, tree
);
194 init_empty_tree_cfg_for_function (struct function
*fn
)
196 /* Initialize the basic block array. */
198 profile_status_for_fn (fn
) = PROFILE_ABSENT
;
199 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
200 last_basic_block_for_fn (fn
) = NUM_FIXED_BLOCKS
;
201 vec_alloc (basic_block_info_for_fn (fn
), initial_cfg_capacity
);
202 vec_safe_grow_cleared (basic_block_info_for_fn (fn
),
203 initial_cfg_capacity
);
205 /* Build a mapping of labels to their associated blocks. */
206 vec_alloc (label_to_block_map_for_fn (fn
), initial_cfg_capacity
);
207 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
),
208 initial_cfg_capacity
);
210 SET_BASIC_BLOCK_FOR_FN (fn
, ENTRY_BLOCK
, ENTRY_BLOCK_PTR_FOR_FN (fn
));
211 SET_BASIC_BLOCK_FOR_FN (fn
, EXIT_BLOCK
, EXIT_BLOCK_PTR_FOR_FN (fn
));
213 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
214 = EXIT_BLOCK_PTR_FOR_FN (fn
);
215 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
216 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
220 init_empty_tree_cfg (void)
222 init_empty_tree_cfg_for_function (cfun
);
225 /*---------------------------------------------------------------------------
227 ---------------------------------------------------------------------------*/
229 /* Entry point to the CFG builder for trees. SEQ is the sequence of
230 statements to be added to the flowgraph. */
233 build_gimple_cfg (gimple_seq seq
)
235 /* Register specific gimple functions. */
236 gimple_register_cfg_hooks ();
238 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
240 init_empty_tree_cfg ();
244 /* Make sure there is always at least one block, even if it's empty. */
245 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
246 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
248 /* Adjust the size of the array. */
249 if (basic_block_info_for_fn (cfun
)->length ()
250 < (size_t) n_basic_blocks_for_fn (cfun
))
251 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
252 n_basic_blocks_for_fn (cfun
));
254 /* To speed up statement iterator walks, we first purge dead labels. */
255 cleanup_dead_labels ();
257 /* Group case nodes to reduce the number of edges.
258 We do this after cleaning up dead labels because otherwise we miss
259 a lot of obvious case merging opportunities. */
260 group_case_labels ();
262 /* Create the edges of the flowgraph. */
263 discriminator_per_locus
= new hash_table
<locus_discrim_hasher
> (13);
265 assign_discriminators ();
266 cleanup_dead_labels ();
267 delete discriminator_per_locus
;
268 discriminator_per_locus
= NULL
;
271 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
272 them and propagate the information to LOOP. We assume that the annotations
273 come immediately before the condition in BB, if any. */
276 replace_loop_annotate_in_block (basic_block bb
, struct loop
*loop
)
278 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
279 gimple stmt
= gsi_stmt (gsi
);
281 if (!(stmt
&& gimple_code (stmt
) == GIMPLE_COND
))
284 for (gsi_prev_nondebug (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
286 stmt
= gsi_stmt (gsi
);
287 if (gimple_code (stmt
) != GIMPLE_CALL
)
289 if (!gimple_call_internal_p (stmt
)
290 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
293 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
295 case annot_expr_ivdep_kind
:
296 loop
->safelen
= INT_MAX
;
298 case annot_expr_no_vector_kind
:
299 loop
->dont_vectorize
= true;
301 case annot_expr_vector_kind
:
302 loop
->force_vectorize
= true;
303 cfun
->has_force_vectorize_loops
= true;
309 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
310 gimple_call_arg (stmt
, 0));
311 gsi_replace (&gsi
, stmt
, true);
315 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
316 them and propagate the information to the loop. We assume that the
317 annotations come immediately before the condition of the loop. */
320 replace_loop_annotate (void)
324 gimple_stmt_iterator gsi
;
327 FOR_EACH_LOOP (loop
, 0)
329 /* First look into the header. */
330 replace_loop_annotate_in_block (loop
->header
, loop
);
332 /* Then look into the latch, if any. */
334 replace_loop_annotate_in_block (loop
->latch
, loop
);
337 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
338 FOR_EACH_BB_FN (bb
, cfun
)
340 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
342 stmt
= gsi_stmt (gsi
);
343 if (gimple_code (stmt
) != GIMPLE_CALL
)
345 if (!gimple_call_internal_p (stmt
)
346 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
349 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
351 case annot_expr_ivdep_kind
:
352 case annot_expr_no_vector_kind
:
353 case annot_expr_vector_kind
:
359 warning_at (gimple_location (stmt
), 0, "ignoring loop annotation");
360 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
361 gimple_call_arg (stmt
, 0));
362 gsi_replace (&gsi
, stmt
, true);
369 execute_build_cfg (void)
371 gimple_seq body
= gimple_body (current_function_decl
);
373 build_gimple_cfg (body
);
374 gimple_set_body (current_function_decl
, NULL
);
375 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
377 fprintf (dump_file
, "Scope blocks:\n");
378 dump_scope_blocks (dump_file
, dump_flags
);
381 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
382 replace_loop_annotate ();
388 const pass_data pass_data_build_cfg
=
390 GIMPLE_PASS
, /* type */
392 OPTGROUP_NONE
, /* optinfo_flags */
393 TV_TREE_CFG
, /* tv_id */
394 PROP_gimple_leh
, /* properties_required */
395 ( PROP_cfg
| PROP_loops
), /* properties_provided */
396 0, /* properties_destroyed */
397 0, /* todo_flags_start */
398 0, /* todo_flags_finish */
401 class pass_build_cfg
: public gimple_opt_pass
404 pass_build_cfg (gcc::context
*ctxt
)
405 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
408 /* opt_pass methods: */
409 virtual unsigned int execute (function
*) { return execute_build_cfg (); }
411 }; // class pass_build_cfg
416 make_pass_build_cfg (gcc::context
*ctxt
)
418 return new pass_build_cfg (ctxt
);
422 /* Return true if T is a computed goto. */
425 computed_goto_p (gimple t
)
427 return (gimple_code (t
) == GIMPLE_GOTO
428 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
431 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
432 the other edge points to a bb with just __builtin_unreachable ().
433 I.e. return true for C->M edge in:
441 __builtin_unreachable ();
445 assert_unreachable_fallthru_edge_p (edge e
)
447 basic_block pred_bb
= e
->src
;
448 gimple last
= last_stmt (pred_bb
);
449 if (last
&& gimple_code (last
) == GIMPLE_COND
)
451 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
452 if (other_bb
== e
->dest
)
453 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
454 if (EDGE_COUNT (other_bb
->succs
) == 0)
456 gimple_stmt_iterator gsi
= gsi_after_labels (other_bb
);
461 stmt
= gsi_stmt (gsi
);
462 while (is_gimple_debug (stmt
) || gimple_clobber_p (stmt
))
467 stmt
= gsi_stmt (gsi
);
469 return gimple_call_builtin_p (stmt
, BUILT_IN_UNREACHABLE
);
476 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
477 could alter control flow except via eh. We initialize the flag at
478 CFG build time and only ever clear it later. */
481 gimple_call_initialize_ctrl_altering (gimple stmt
)
483 int flags
= gimple_call_flags (stmt
);
485 /* A call alters control flow if it can make an abnormal goto. */
486 if (call_can_make_abnormal_goto (stmt
)
487 /* A call also alters control flow if it does not return. */
488 || flags
& ECF_NORETURN
489 /* TM ending statements have backedges out of the transaction.
490 Return true so we split the basic block containing them.
491 Note that the TM_BUILTIN test is merely an optimization. */
492 || ((flags
& ECF_TM_BUILTIN
)
493 && is_tm_ending_fndecl (gimple_call_fndecl (stmt
)))
494 /* BUILT_IN_RETURN call is same as return statement. */
495 || gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
496 gimple_call_set_ctrl_altering (stmt
, true);
498 gimple_call_set_ctrl_altering (stmt
, false);
502 /* Insert SEQ after BB and build a flowgraph. */
505 make_blocks_1 (gimple_seq seq
, basic_block bb
)
507 gimple_stmt_iterator i
= gsi_start (seq
);
509 bool start_new_block
= true;
510 bool first_stmt_of_seq
= true;
512 while (!gsi_end_p (i
))
519 if (stmt
&& is_gimple_call (stmt
))
520 gimple_call_initialize_ctrl_altering (stmt
);
522 /* If the statement starts a new basic block or if we have determined
523 in a previous pass that we need to create a new block for STMT, do
525 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
527 if (!first_stmt_of_seq
)
528 gsi_split_seq_before (&i
, &seq
);
529 bb
= create_basic_block (seq
, bb
);
530 start_new_block
= false;
533 /* Now add STMT to BB and create the subgraphs for special statement
535 gimple_set_bb (stmt
, bb
);
537 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
539 if (stmt_ends_bb_p (stmt
))
541 /* If the stmt can make abnormal goto use a new temporary
542 for the assignment to the LHS. This makes sure the old value
543 of the LHS is available on the abnormal edge. Otherwise
544 we will end up with overlapping life-ranges for abnormal
546 if (gimple_has_lhs (stmt
)
547 && stmt_can_make_abnormal_goto (stmt
)
548 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
550 tree lhs
= gimple_get_lhs (stmt
);
551 tree tmp
= create_tmp_var (TREE_TYPE (lhs
));
552 gimple s
= gimple_build_assign (lhs
, tmp
);
553 gimple_set_location (s
, gimple_location (stmt
));
554 gimple_set_block (s
, gimple_block (stmt
));
555 gimple_set_lhs (stmt
, tmp
);
556 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
557 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
558 DECL_GIMPLE_REG_P (tmp
) = 1;
559 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
561 start_new_block
= true;
565 first_stmt_of_seq
= false;
570 /* Build a flowgraph for the sequence of stmts SEQ. */
573 make_blocks (gimple_seq seq
)
575 make_blocks_1 (seq
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
578 /* Create and return a new empty basic block after bb AFTER. */
581 create_bb (void *h
, void *e
, basic_block after
)
587 /* Create and initialize a new basic block. Since alloc_block uses
588 GC allocation that clears memory to allocate a basic block, we do
589 not have to clear the newly allocated basic block here. */
592 bb
->index
= last_basic_block_for_fn (cfun
);
594 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
596 /* Add the new block to the linked list of blocks. */
597 link_block (bb
, after
);
599 /* Grow the basic block array if needed. */
600 if ((size_t) last_basic_block_for_fn (cfun
)
601 == basic_block_info_for_fn (cfun
)->length ())
604 (last_basic_block_for_fn (cfun
)
605 + (last_basic_block_for_fn (cfun
) + 3) / 4);
606 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
), new_size
);
609 /* Add the newly created block to the array. */
610 SET_BASIC_BLOCK_FOR_FN (cfun
, last_basic_block_for_fn (cfun
), bb
);
612 n_basic_blocks_for_fn (cfun
)++;
613 last_basic_block_for_fn (cfun
)++;
619 /*---------------------------------------------------------------------------
621 ---------------------------------------------------------------------------*/
623 /* Fold COND_EXPR_COND of each COND_EXPR. */
626 fold_cond_expr_cond (void)
630 FOR_EACH_BB_FN (bb
, cfun
)
632 gimple stmt
= last_stmt (bb
);
634 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
636 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
637 location_t loc
= gimple_location (stmt
);
641 fold_defer_overflow_warnings ();
642 cond
= fold_binary_loc (loc
, gimple_cond_code (cond_stmt
),
644 gimple_cond_lhs (cond_stmt
),
645 gimple_cond_rhs (cond_stmt
));
648 zerop
= integer_zerop (cond
);
649 onep
= integer_onep (cond
);
652 zerop
= onep
= false;
654 fold_undefer_overflow_warnings (zerop
|| onep
,
656 WARN_STRICT_OVERFLOW_CONDITIONAL
);
658 gimple_cond_make_false (cond_stmt
);
660 gimple_cond_make_true (cond_stmt
);
665 /* If basic block BB has an abnormal edge to a basic block
666 containing IFN_ABNORMAL_DISPATCHER internal call, return
667 that the dispatcher's basic block, otherwise return NULL. */
670 get_abnormal_succ_dispatcher (basic_block bb
)
675 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
676 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)) == EDGE_ABNORMAL
)
678 gimple_stmt_iterator gsi
679 = gsi_start_nondebug_after_labels_bb (e
->dest
);
680 gimple g
= gsi_stmt (gsi
);
682 && is_gimple_call (g
)
683 && gimple_call_internal_p (g
)
684 && gimple_call_internal_fn (g
) == IFN_ABNORMAL_DISPATCHER
)
690 /* Helper function for make_edges. Create a basic block with
691 with ABNORMAL_DISPATCHER internal call in it if needed, and
692 create abnormal edges from BBS to it and from it to FOR_BB
693 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
696 handle_abnormal_edges (basic_block
*dispatcher_bbs
,
697 basic_block for_bb
, int *bb_to_omp_idx
,
698 auto_vec
<basic_block
> *bbs
, bool computed_goto
)
700 basic_block
*dispatcher
= dispatcher_bbs
+ (computed_goto
? 1 : 0);
701 unsigned int idx
= 0;
707 dispatcher
= dispatcher_bbs
+ 2 * bb_to_omp_idx
[for_bb
->index
];
708 if (bb_to_omp_idx
[for_bb
->index
] != 0)
712 /* If the dispatcher has been created already, then there are basic
713 blocks with abnormal edges to it, so just make a new edge to
715 if (*dispatcher
== NULL
)
717 /* Check if there are any basic blocks that need to have
718 abnormal edges to this dispatcher. If there are none, return
720 if (bb_to_omp_idx
== NULL
)
722 if (bbs
->is_empty ())
727 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
728 if (bb_to_omp_idx
[bb
->index
] == bb_to_omp_idx
[for_bb
->index
])
734 /* Create the dispatcher bb. */
735 *dispatcher
= create_basic_block (NULL
, for_bb
);
738 /* Factor computed gotos into a common computed goto site. Also
739 record the location of that site so that we can un-factor the
740 gotos after we have converted back to normal form. */
741 gimple_stmt_iterator gsi
= gsi_start_bb (*dispatcher
);
743 /* Create the destination of the factored goto. Each original
744 computed goto will put its desired destination into this
745 variable and jump to the label we create immediately below. */
746 tree var
= create_tmp_var (ptr_type_node
, "gotovar");
748 /* Build a label for the new block which will contain the
749 factored computed goto. */
750 tree factored_label_decl
751 = create_artificial_label (UNKNOWN_LOCATION
);
752 gimple factored_computed_goto_label
753 = gimple_build_label (factored_label_decl
);
754 gsi_insert_after (&gsi
, factored_computed_goto_label
, GSI_NEW_STMT
);
756 /* Build our new computed goto. */
757 gimple factored_computed_goto
= gimple_build_goto (var
);
758 gsi_insert_after (&gsi
, factored_computed_goto
, GSI_NEW_STMT
);
760 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
763 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
766 gsi
= gsi_last_bb (bb
);
767 gimple last
= gsi_stmt (gsi
);
769 gcc_assert (computed_goto_p (last
));
771 /* Copy the original computed goto's destination into VAR. */
773 = gimple_build_assign (var
, gimple_goto_dest (last
));
774 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
776 edge e
= make_edge (bb
, *dispatcher
, EDGE_FALLTHRU
);
777 e
->goto_locus
= gimple_location (last
);
778 gsi_remove (&gsi
, true);
783 tree arg
= inner
? boolean_true_node
: boolean_false_node
;
784 gimple g
= gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER
,
786 gimple_stmt_iterator gsi
= gsi_after_labels (*dispatcher
);
787 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
789 /* Create predecessor edges of the dispatcher. */
790 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
793 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
795 make_edge (bb
, *dispatcher
, EDGE_ABNORMAL
);
800 make_edge (*dispatcher
, for_bb
, EDGE_ABNORMAL
);
803 /* Creates outgoing edges for BB. Returns 1 when it ends with an
804 computed goto, returns 2 when it ends with a statement that
805 might return to this function via an nonlocal goto, otherwise
806 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
809 make_edges_bb (basic_block bb
, struct omp_region
**pcur_region
, int *pomp_index
)
811 gimple last
= last_stmt (bb
);
812 bool fallthru
= false;
818 switch (gimple_code (last
))
821 if (make_goto_expr_edges (bb
))
827 edge e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
828 e
->goto_locus
= gimple_location (last
);
833 make_cond_expr_edges (bb
);
837 make_gimple_switch_edges (as_a
<gswitch
*> (last
), bb
);
841 make_eh_edges (last
);
844 case GIMPLE_EH_DISPATCH
:
845 fallthru
= make_eh_dispatch_edges (as_a
<geh_dispatch
*> (last
));
849 /* If this function receives a nonlocal goto, then we need to
850 make edges from this call site to all the nonlocal goto
852 if (stmt_can_make_abnormal_goto (last
))
855 /* If this statement has reachable exception handlers, then
856 create abnormal edges to them. */
857 make_eh_edges (last
);
859 /* BUILTIN_RETURN is really a return statement. */
860 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
862 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
865 /* Some calls are known not to return. */
867 fallthru
= !(gimple_call_flags (last
) & ECF_NORETURN
);
871 /* A GIMPLE_ASSIGN may throw internally and thus be considered
873 if (is_ctrl_altering_stmt (last
))
874 make_eh_edges (last
);
879 make_gimple_asm_edges (bb
);
884 fallthru
= make_gimple_omp_edges (bb
, pcur_region
, pomp_index
);
887 case GIMPLE_TRANSACTION
:
890 = gimple_transaction_label (as_a
<gtransaction
*> (last
));
892 make_edge (bb
, label_to_block (abort_label
), EDGE_TM_ABORT
);
898 gcc_assert (!stmt_ends_bb_p (last
));
904 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
909 /* Join all the blocks in the flowgraph. */
915 struct omp_region
*cur_region
= NULL
;
916 auto_vec
<basic_block
> ab_edge_goto
;
917 auto_vec
<basic_block
> ab_edge_call
;
918 int *bb_to_omp_idx
= NULL
;
919 int cur_omp_region_idx
= 0;
921 /* Create an edge from entry to the first block with executable
923 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
924 BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
),
927 /* Traverse the basic block array placing edges. */
928 FOR_EACH_BB_FN (bb
, cfun
)
933 bb_to_omp_idx
[bb
->index
] = cur_omp_region_idx
;
935 mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
937 ab_edge_goto
.safe_push (bb
);
939 ab_edge_call
.safe_push (bb
);
941 if (cur_region
&& bb_to_omp_idx
== NULL
)
942 bb_to_omp_idx
= XCNEWVEC (int, n_basic_blocks_for_fn (cfun
));
945 /* Computed gotos are hell to deal with, especially if there are
946 lots of them with a large number of destinations. So we factor
947 them to a common computed goto location before we build the
948 edge list. After we convert back to normal form, we will un-factor
949 the computed gotos since factoring introduces an unwanted jump.
950 For non-local gotos and abnormal edges from calls to calls that return
951 twice or forced labels, factor the abnormal edges too, by having all
952 abnormal edges from the calls go to a common artificial basic block
953 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
954 basic block to all forced labels and calls returning twice.
955 We do this per-OpenMP structured block, because those regions
956 are guaranteed to be single entry single exit by the standard,
957 so it is not allowed to enter or exit such regions abnormally this way,
958 thus all computed gotos, non-local gotos and setjmp/longjmp calls
959 must not transfer control across SESE region boundaries. */
960 if (!ab_edge_goto
.is_empty () || !ab_edge_call
.is_empty ())
962 gimple_stmt_iterator gsi
;
963 basic_block dispatcher_bb_array
[2] = { NULL
, NULL
};
964 basic_block
*dispatcher_bbs
= dispatcher_bb_array
;
965 int count
= n_basic_blocks_for_fn (cfun
);
968 dispatcher_bbs
= XCNEWVEC (basic_block
, 2 * count
);
970 FOR_EACH_BB_FN (bb
, cfun
)
972 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
974 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
980 target
= gimple_label_label (label_stmt
);
982 /* Make an edge to every label block that has been marked as a
983 potential target for a computed goto or a non-local goto. */
984 if (FORCED_LABEL (target
))
985 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
986 &ab_edge_goto
, true);
987 if (DECL_NONLOCAL (target
))
989 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
990 &ab_edge_call
, false);
995 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
996 gsi_next_nondebug (&gsi
);
997 if (!gsi_end_p (gsi
))
999 /* Make an edge to every setjmp-like call. */
1000 gimple call_stmt
= gsi_stmt (gsi
);
1001 if (is_gimple_call (call_stmt
)
1002 && ((gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
)
1003 || gimple_call_builtin_p (call_stmt
,
1004 BUILT_IN_SETJMP_RECEIVER
)))
1005 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1006 &ab_edge_call
, false);
1011 XDELETE (dispatcher_bbs
);
1014 XDELETE (bb_to_omp_idx
);
1016 free_omp_regions ();
1018 /* Fold COND_EXPR_COND of each COND_EXPR. */
1019 fold_cond_expr_cond ();
1022 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1023 needed. Returns true if new bbs were created.
1024 Note: This is transitional code, and should not be used for new code. We
1025 should be able to get rid of this by rewriting all target va-arg
1026 gimplification hooks to use an interface gimple_build_cond_value as described
1027 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1030 gimple_find_sub_bbs (gimple_seq seq
, gimple_stmt_iterator
*gsi
)
1032 gimple stmt
= gsi_stmt (*gsi
);
1033 basic_block bb
= gimple_bb (stmt
);
1034 basic_block lastbb
, afterbb
;
1035 int old_num_bbs
= n_basic_blocks_for_fn (cfun
);
1037 lastbb
= make_blocks_1 (seq
, bb
);
1038 if (old_num_bbs
== n_basic_blocks_for_fn (cfun
))
1040 e
= split_block (bb
, stmt
);
1041 /* Move e->dest to come after the new basic blocks. */
1043 unlink_block (afterbb
);
1044 link_block (afterbb
, lastbb
);
1045 redirect_edge_succ (e
, bb
->next_bb
);
1047 while (bb
!= afterbb
)
1049 struct omp_region
*cur_region
= NULL
;
1050 int cur_omp_region_idx
= 0;
1051 int mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
1052 gcc_assert (!mer
&& !cur_region
);
1053 add_bb_to_loop (bb
, afterbb
->loop_father
);
1059 /* Find the next available discriminator value for LOCUS. The
1060 discriminator distinguishes among several basic blocks that
1061 share a common locus, allowing for more accurate sample-based
1065 next_discriminator_for_locus (location_t locus
)
1067 struct locus_discrim_map item
;
1068 struct locus_discrim_map
**slot
;
1071 item
.discriminator
= 0;
1072 slot
= discriminator_per_locus
->find_slot_with_hash (
1073 &item
, LOCATION_LINE (locus
), INSERT
);
1075 if (*slot
== HTAB_EMPTY_ENTRY
)
1077 *slot
= XNEW (struct locus_discrim_map
);
1079 (*slot
)->locus
= locus
;
1080 (*slot
)->discriminator
= 0;
1082 (*slot
)->discriminator
++;
1083 return (*slot
)->discriminator
;
1086 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1089 same_line_p (location_t locus1
, location_t locus2
)
1091 expanded_location from
, to
;
1093 if (locus1
== locus2
)
1096 from
= expand_location (locus1
);
1097 to
= expand_location (locus2
);
1099 if (from
.line
!= to
.line
)
1101 if (from
.file
== to
.file
)
1103 return (from
.file
!= NULL
1105 && filename_cmp (from
.file
, to
.file
) == 0);
1108 /* Assign discriminators to each basic block. */
1111 assign_discriminators (void)
1115 FOR_EACH_BB_FN (bb
, cfun
)
1119 gimple last
= last_stmt (bb
);
1120 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
1122 if (locus
== UNKNOWN_LOCATION
)
1125 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1127 gimple first
= first_non_label_stmt (e
->dest
);
1128 gimple last
= last_stmt (e
->dest
);
1129 if ((first
&& same_line_p (locus
, gimple_location (first
)))
1130 || (last
&& same_line_p (locus
, gimple_location (last
))))
1132 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
1133 bb
->discriminator
= next_discriminator_for_locus (locus
);
1135 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
1141 /* Create the edges for a GIMPLE_COND starting at block BB. */
1144 make_cond_expr_edges (basic_block bb
)
1146 gcond
*entry
= as_a
<gcond
*> (last_stmt (bb
));
1147 gimple then_stmt
, else_stmt
;
1148 basic_block then_bb
, else_bb
;
1149 tree then_label
, else_label
;
1153 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
1155 /* Entry basic blocks for each component. */
1156 then_label
= gimple_cond_true_label (entry
);
1157 else_label
= gimple_cond_false_label (entry
);
1158 then_bb
= label_to_block (then_label
);
1159 else_bb
= label_to_block (else_label
);
1160 then_stmt
= first_stmt (then_bb
);
1161 else_stmt
= first_stmt (else_bb
);
1163 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1164 e
->goto_locus
= gimple_location (then_stmt
);
1165 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1167 e
->goto_locus
= gimple_location (else_stmt
);
1169 /* We do not need the labels anymore. */
1170 gimple_cond_set_true_label (entry
, NULL_TREE
);
1171 gimple_cond_set_false_label (entry
, NULL_TREE
);
1175 /* Called for each element in the hash table (P) as we delete the
1176 edge to cases hash table.
1178 Clear all the TREE_CHAINs to prevent problems with copying of
1179 SWITCH_EXPRs and structure sharing rules, then free the hash table
1183 edge_to_cases_cleanup (edge
const &, tree
const &value
, void *)
1187 for (t
= value
; t
; t
= next
)
1189 next
= CASE_CHAIN (t
);
1190 CASE_CHAIN (t
) = NULL
;
1196 /* Start recording information mapping edges to case labels. */
1199 start_recording_case_labels (void)
1201 gcc_assert (edge_to_cases
== NULL
);
1202 edge_to_cases
= new hash_map
<edge
, tree
>;
1203 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
1206 /* Return nonzero if we are recording information for case labels. */
1209 recording_case_labels_p (void)
1211 return (edge_to_cases
!= NULL
);
1214 /* Stop recording information mapping edges to case labels and
1215 remove any information we have recorded. */
1217 end_recording_case_labels (void)
1221 edge_to_cases
->traverse
<void *, edge_to_cases_cleanup
> (NULL
);
1222 delete edge_to_cases
;
1223 edge_to_cases
= NULL
;
1224 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
1226 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
1229 gimple stmt
= last_stmt (bb
);
1230 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1231 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1234 BITMAP_FREE (touched_switch_bbs
);
1237 /* If we are inside a {start,end}_recording_cases block, then return
1238 a chain of CASE_LABEL_EXPRs from T which reference E.
1240 Otherwise return NULL. */
1243 get_cases_for_edge (edge e
, gswitch
*t
)
1248 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1249 chains available. Return NULL so the caller can detect this case. */
1250 if (!recording_case_labels_p ())
1253 slot
= edge_to_cases
->get (e
);
1257 /* If we did not find E in the hash table, then this must be the first
1258 time we have been queried for information about E & T. Add all the
1259 elements from T to the hash table then perform the query again. */
1261 n
= gimple_switch_num_labels (t
);
1262 for (i
= 0; i
< n
; i
++)
1264 tree elt
= gimple_switch_label (t
, i
);
1265 tree lab
= CASE_LABEL (elt
);
1266 basic_block label_bb
= label_to_block (lab
);
1267 edge this_edge
= find_edge (e
->src
, label_bb
);
1269 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1271 tree
&s
= edge_to_cases
->get_or_insert (this_edge
);
1272 CASE_CHAIN (elt
) = s
;
1276 return *edge_to_cases
->get (e
);
1279 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1282 make_gimple_switch_edges (gswitch
*entry
, basic_block bb
)
1286 n
= gimple_switch_num_labels (entry
);
1288 for (i
= 0; i
< n
; ++i
)
1290 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1291 basic_block label_bb
= label_to_block (lab
);
1292 make_edge (bb
, label_bb
, 0);
1297 /* Return the basic block holding label DEST. */
1300 label_to_block_fn (struct function
*ifun
, tree dest
)
1302 int uid
= LABEL_DECL_UID (dest
);
1304 /* We would die hard when faced by an undefined label. Emit a label to
1305 the very first basic block. This will hopefully make even the dataflow
1306 and undefined variable warnings quite right. */
1307 if (seen_error () && uid
< 0)
1309 gimple_stmt_iterator gsi
=
1310 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
));
1313 stmt
= gimple_build_label (dest
);
1314 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1315 uid
= LABEL_DECL_UID (dest
);
1317 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1319 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1322 /* Create edges for a goto statement at block BB. Returns true
1323 if abnormal edges should be created. */
1326 make_goto_expr_edges (basic_block bb
)
1328 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1329 gimple goto_t
= gsi_stmt (last
);
1331 /* A simple GOTO creates normal edges. */
1332 if (simple_goto_p (goto_t
))
1334 tree dest
= gimple_goto_dest (goto_t
);
1335 basic_block label_bb
= label_to_block (dest
);
1336 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1337 e
->goto_locus
= gimple_location (goto_t
);
1338 gsi_remove (&last
, true);
1342 /* A computed GOTO creates abnormal edges. */
1346 /* Create edges for an asm statement with labels at block BB. */
1349 make_gimple_asm_edges (basic_block bb
)
1351 gasm
*stmt
= as_a
<gasm
*> (last_stmt (bb
));
1352 int i
, n
= gimple_asm_nlabels (stmt
);
1354 for (i
= 0; i
< n
; ++i
)
1356 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1357 basic_block label_bb
= label_to_block (label
);
1358 make_edge (bb
, label_bb
, 0);
1362 /*---------------------------------------------------------------------------
1364 ---------------------------------------------------------------------------*/
1366 /* Cleanup useless labels in basic blocks. This is something we wish
1367 to do early because it allows us to group case labels before creating
1368 the edges for the CFG, and it speeds up block statement iterators in
1369 all passes later on.
1370 We rerun this pass after CFG is created, to get rid of the labels that
1371 are no longer referenced. After then we do not run it any more, since
1372 (almost) no new labels should be created. */
1374 /* A map from basic block index to the leading label of that block. */
1375 static struct label_record
1380 /* True if the label is referenced from somewhere. */
1384 /* Given LABEL return the first label in the same basic block. */
1387 main_block_label (tree label
)
1389 basic_block bb
= label_to_block (label
);
1390 tree main_label
= label_for_bb
[bb
->index
].label
;
1392 /* label_to_block possibly inserted undefined label into the chain. */
1395 label_for_bb
[bb
->index
].label
= label
;
1399 label_for_bb
[bb
->index
].used
= true;
1403 /* Clean up redundant labels within the exception tree. */
1406 cleanup_dead_labels_eh (void)
1413 if (cfun
->eh
== NULL
)
1416 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1417 if (lp
&& lp
->post_landing_pad
)
1419 lab
= main_block_label (lp
->post_landing_pad
);
1420 if (lab
!= lp
->post_landing_pad
)
1422 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1423 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1427 FOR_ALL_EH_REGION (r
)
1431 case ERT_MUST_NOT_THROW
:
1437 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1441 c
->label
= main_block_label (lab
);
1446 case ERT_ALLOWED_EXCEPTIONS
:
1447 lab
= r
->u
.allowed
.label
;
1449 r
->u
.allowed
.label
= main_block_label (lab
);
1455 /* Cleanup redundant labels. This is a three-step process:
1456 1) Find the leading label for each block.
1457 2) Redirect all references to labels to the leading labels.
1458 3) Cleanup all useless labels. */
1461 cleanup_dead_labels (void)
1464 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block_for_fn (cfun
));
1466 /* Find a suitable label for each block. We use the first user-defined
1467 label if there is one, or otherwise just the first label we see. */
1468 FOR_EACH_BB_FN (bb
, cfun
)
1470 gimple_stmt_iterator i
;
1472 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1475 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1480 label
= gimple_label_label (label_stmt
);
1482 /* If we have not yet seen a label for the current block,
1483 remember this one and see if there are more labels. */
1484 if (!label_for_bb
[bb
->index
].label
)
1486 label_for_bb
[bb
->index
].label
= label
;
1490 /* If we did see a label for the current block already, but it
1491 is an artificially created label, replace it if the current
1492 label is a user defined label. */
1493 if (!DECL_ARTIFICIAL (label
)
1494 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1496 label_for_bb
[bb
->index
].label
= label
;
1502 /* Now redirect all jumps/branches to the selected label.
1503 First do so for each block ending in a control statement. */
1504 FOR_EACH_BB_FN (bb
, cfun
)
1506 gimple stmt
= last_stmt (bb
);
1507 tree label
, new_label
;
1512 switch (gimple_code (stmt
))
1516 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
1517 label
= gimple_cond_true_label (cond_stmt
);
1520 new_label
= main_block_label (label
);
1521 if (new_label
!= label
)
1522 gimple_cond_set_true_label (cond_stmt
, new_label
);
1525 label
= gimple_cond_false_label (cond_stmt
);
1528 new_label
= main_block_label (label
);
1529 if (new_label
!= label
)
1530 gimple_cond_set_false_label (cond_stmt
, new_label
);
1537 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
1538 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
1540 /* Replace all destination labels. */
1541 for (i
= 0; i
< n
; ++i
)
1543 tree case_label
= gimple_switch_label (switch_stmt
, i
);
1544 label
= CASE_LABEL (case_label
);
1545 new_label
= main_block_label (label
);
1546 if (new_label
!= label
)
1547 CASE_LABEL (case_label
) = new_label
;
1554 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1555 int i
, n
= gimple_asm_nlabels (asm_stmt
);
1557 for (i
= 0; i
< n
; ++i
)
1559 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
1560 tree label
= main_block_label (TREE_VALUE (cons
));
1561 TREE_VALUE (cons
) = label
;
1566 /* We have to handle gotos until they're removed, and we don't
1567 remove them until after we've created the CFG edges. */
1569 if (!computed_goto_p (stmt
))
1571 ggoto
*goto_stmt
= as_a
<ggoto
*> (stmt
);
1572 label
= gimple_goto_dest (goto_stmt
);
1573 new_label
= main_block_label (label
);
1574 if (new_label
!= label
)
1575 gimple_goto_set_dest (goto_stmt
, new_label
);
1579 case GIMPLE_TRANSACTION
:
1581 gtransaction
*trans_stmt
= as_a
<gtransaction
*> (stmt
);
1582 tree label
= gimple_transaction_label (trans_stmt
);
1585 tree new_label
= main_block_label (label
);
1586 if (new_label
!= label
)
1587 gimple_transaction_set_label (trans_stmt
, new_label
);
1597 /* Do the same for the exception region tree labels. */
1598 cleanup_dead_labels_eh ();
1600 /* Finally, purge dead labels. All user-defined labels and labels that
1601 can be the target of non-local gotos and labels which have their
1602 address taken are preserved. */
1603 FOR_EACH_BB_FN (bb
, cfun
)
1605 gimple_stmt_iterator i
;
1606 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1608 if (!label_for_this_bb
)
1611 /* If the main label of the block is unused, we may still remove it. */
1612 if (!label_for_bb
[bb
->index
].used
)
1613 label_for_this_bb
= NULL
;
1615 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1618 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1623 label
= gimple_label_label (label_stmt
);
1625 if (label
== label_for_this_bb
1626 || !DECL_ARTIFICIAL (label
)
1627 || DECL_NONLOCAL (label
)
1628 || FORCED_LABEL (label
))
1631 gsi_remove (&i
, true);
1635 free (label_for_bb
);
1638 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1639 the ones jumping to the same label.
1640 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1643 group_case_labels_stmt (gswitch
*stmt
)
1645 int old_size
= gimple_switch_num_labels (stmt
);
1646 int i
, j
, new_size
= old_size
;
1647 basic_block default_bb
= NULL
;
1649 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1651 /* Look for possible opportunities to merge cases. */
1653 while (i
< old_size
)
1655 tree base_case
, base_high
;
1656 basic_block base_bb
;
1658 base_case
= gimple_switch_label (stmt
, i
);
1660 gcc_assert (base_case
);
1661 base_bb
= label_to_block (CASE_LABEL (base_case
));
1663 /* Discard cases that have the same destination as the
1665 if (base_bb
== default_bb
)
1667 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1673 base_high
= CASE_HIGH (base_case
)
1674 ? CASE_HIGH (base_case
)
1675 : CASE_LOW (base_case
);
1678 /* Try to merge case labels. Break out when we reach the end
1679 of the label vector or when we cannot merge the next case
1680 label with the current one. */
1681 while (i
< old_size
)
1683 tree merge_case
= gimple_switch_label (stmt
, i
);
1684 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1685 wide_int bhp1
= wi::add (base_high
, 1);
1687 /* Merge the cases if they jump to the same place,
1688 and their ranges are consecutive. */
1689 if (merge_bb
== base_bb
1690 && wi::eq_p (CASE_LOW (merge_case
), bhp1
))
1692 base_high
= CASE_HIGH (merge_case
) ?
1693 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1694 CASE_HIGH (base_case
) = base_high
;
1695 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1704 /* Compress the case labels in the label vector, and adjust the
1705 length of the vector. */
1706 for (i
= 0, j
= 0; i
< new_size
; i
++)
1708 while (! gimple_switch_label (stmt
, j
))
1710 gimple_switch_set_label (stmt
, i
,
1711 gimple_switch_label (stmt
, j
++));
1714 gcc_assert (new_size
<= old_size
);
1715 gimple_switch_set_num_labels (stmt
, new_size
);
1718 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1719 and scan the sorted vector of cases. Combine the ones jumping to the
1723 group_case_labels (void)
1727 FOR_EACH_BB_FN (bb
, cfun
)
1729 gimple stmt
= last_stmt (bb
);
1730 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1731 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1735 /* Checks whether we can merge block B into block A. */
1738 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1742 if (!single_succ_p (a
))
1745 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1748 if (single_succ (a
) != b
)
1751 if (!single_pred_p (b
))
1754 if (a
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
1755 || b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1758 /* If A ends by a statement causing exceptions or something similar, we
1759 cannot merge the blocks. */
1760 stmt
= last_stmt (a
);
1761 if (stmt
&& stmt_ends_bb_p (stmt
))
1764 /* Do not allow a block with only a non-local label to be merged. */
1766 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
1767 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
1770 /* Examine the labels at the beginning of B. */
1771 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);
1775 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1778 lab
= gimple_label_label (label_stmt
);
1780 /* Do not remove user forced labels or for -O0 any user labels. */
1781 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1785 /* Protect simple loop latches. We only want to avoid merging
1786 the latch with the loop header or with a block in another
1787 loop in this case. */
1789 && b
->loop_father
->latch
== b
1790 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
)
1791 && (b
->loop_father
->header
== a
1792 || b
->loop_father
!= a
->loop_father
))
1795 /* It must be possible to eliminate all phi nodes in B. If ssa form
1796 is not up-to-date and a name-mapping is registered, we cannot eliminate
1797 any phis. Symbols marked for renaming are never a problem though. */
1798 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);
1801 gphi
*phi
= gsi
.phi ();
1802 /* Technically only new names matter. */
1803 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1807 /* When not optimizing, don't merge if we'd lose goto_locus. */
1809 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1811 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1812 gimple_stmt_iterator prev
, next
;
1813 prev
= gsi_last_nondebug_bb (a
);
1814 next
= gsi_after_labels (b
);
1815 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1816 gsi_next_nondebug (&next
);
1817 if ((gsi_end_p (prev
)
1818 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1819 && (gsi_end_p (next
)
1820 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1827 /* Replaces all uses of NAME by VAL. */
1830 replace_uses_by (tree name
, tree val
)
1832 imm_use_iterator imm_iter
;
1837 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1839 /* Mark the block if we change the last stmt in it. */
1840 if (cfgcleanup_altered_bbs
1841 && stmt_ends_bb_p (stmt
))
1842 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1844 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1846 replace_exp (use
, val
);
1848 if (gimple_code (stmt
) == GIMPLE_PHI
)
1850 e
= gimple_phi_arg_edge (as_a
<gphi
*> (stmt
),
1851 PHI_ARG_INDEX_FROM_USE (use
));
1852 if (e
->flags
& EDGE_ABNORMAL
1853 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
))
1855 /* This can only occur for virtual operands, since
1856 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1857 would prevent replacement. */
1858 gcc_checking_assert (virtual_operand_p (name
));
1859 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1864 if (gimple_code (stmt
) != GIMPLE_PHI
)
1866 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1867 gimple orig_stmt
= stmt
;
1870 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1871 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1872 only change sth from non-invariant to invariant, and only
1873 when propagating constants. */
1874 if (is_gimple_min_invariant (val
))
1875 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1877 tree op
= gimple_op (stmt
, i
);
1878 /* Operands may be empty here. For example, the labels
1879 of a GIMPLE_COND are nulled out following the creation
1880 of the corresponding CFG edges. */
1881 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
1882 recompute_tree_invariant_for_addr_expr (op
);
1885 if (fold_stmt (&gsi
))
1886 stmt
= gsi_stmt (gsi
);
1888 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
1889 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
1895 gcc_checking_assert (has_zero_uses (name
));
1897 /* Also update the trees stored in loop structures. */
1902 FOR_EACH_LOOP (loop
, 0)
1904 substitute_in_loop_info (loop
, name
, val
);
1909 /* Merge block B into block A. */
1912 gimple_merge_blocks (basic_block a
, basic_block b
)
1914 gimple_stmt_iterator last
, gsi
;
1918 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1920 /* Remove all single-valued PHI nodes from block B of the form
1921 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1922 gsi
= gsi_last_bb (a
);
1923 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
1925 gimple phi
= gsi_stmt (psi
);
1926 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
1928 bool may_replace_uses
= (virtual_operand_p (def
)
1929 || may_propagate_copy (def
, use
));
1931 /* In case we maintain loop closed ssa form, do not propagate arguments
1932 of loop exit phi nodes. */
1934 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
1935 && !virtual_operand_p (def
)
1936 && TREE_CODE (use
) == SSA_NAME
1937 && a
->loop_father
!= b
->loop_father
)
1938 may_replace_uses
= false;
1940 if (!may_replace_uses
)
1942 gcc_assert (!virtual_operand_p (def
));
1944 /* Note that just emitting the copies is fine -- there is no problem
1945 with ordering of phi nodes. This is because A is the single
1946 predecessor of B, therefore results of the phi nodes cannot
1947 appear as arguments of the phi nodes. */
1948 copy
= gimple_build_assign (def
, use
);
1949 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
1950 remove_phi_node (&psi
, false);
1954 /* If we deal with a PHI for virtual operands, we can simply
1955 propagate these without fussing with folding or updating
1957 if (virtual_operand_p (def
))
1959 imm_use_iterator iter
;
1960 use_operand_p use_p
;
1963 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
1964 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1965 SET_USE (use_p
, use
);
1967 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
1968 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
1971 replace_uses_by (def
, use
);
1973 remove_phi_node (&psi
, true);
1977 /* Ensure that B follows A. */
1978 move_block_after (b
, a
);
1980 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
1981 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1983 /* Remove labels from B and set gimple_bb to A for other statements. */
1984 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
1986 gimple stmt
= gsi_stmt (gsi
);
1987 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
1989 tree label
= gimple_label_label (label_stmt
);
1992 gsi_remove (&gsi
, false);
1994 /* Now that we can thread computed gotos, we might have
1995 a situation where we have a forced label in block B
1996 However, the label at the start of block B might still be
1997 used in other ways (think about the runtime checking for
1998 Fortran assigned gotos). So we can not just delete the
1999 label. Instead we move the label to the start of block A. */
2000 if (FORCED_LABEL (label
))
2002 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
2003 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
2005 /* Other user labels keep around in a form of a debug stmt. */
2006 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_STMTS
)
2008 gimple dbg
= gimple_build_debug_bind (label
,
2011 gimple_debug_bind_reset_value (dbg
);
2012 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
2015 lp_nr
= EH_LANDING_PAD_NR (label
);
2018 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
2019 lp
->post_landing_pad
= NULL
;
2024 gimple_set_bb (stmt
, a
);
2029 /* When merging two BBs, if their counts are different, the larger count
2030 is selected as the new bb count. This is to handle inconsistent
2032 if (a
->loop_father
== b
->loop_father
)
2034 a
->count
= MAX (a
->count
, b
->count
);
2035 a
->frequency
= MAX (a
->frequency
, b
->frequency
);
2038 /* Merge the sequences. */
2039 last
= gsi_last_bb (a
);
2040 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
2041 set_bb_seq (b
, NULL
);
2043 if (cfgcleanup_altered_bbs
)
2044 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
2048 /* Return the one of two successors of BB that is not reachable by a
2049 complex edge, if there is one. Else, return BB. We use
2050 this in optimizations that use post-dominators for their heuristics,
2051 to catch the cases in C++ where function calls are involved. */
2054 single_noncomplex_succ (basic_block bb
)
2057 if (EDGE_COUNT (bb
->succs
) != 2)
2060 e0
= EDGE_SUCC (bb
, 0);
2061 e1
= EDGE_SUCC (bb
, 1);
2062 if (e0
->flags
& EDGE_COMPLEX
)
2064 if (e1
->flags
& EDGE_COMPLEX
)
2070 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2073 notice_special_calls (gcall
*call
)
2075 int flags
= gimple_call_flags (call
);
2077 if (flags
& ECF_MAY_BE_ALLOCA
)
2078 cfun
->calls_alloca
= true;
2079 if (flags
& ECF_RETURNS_TWICE
)
2080 cfun
->calls_setjmp
= true;
2084 /* Clear flags set by notice_special_calls. Used by dead code removal
2085 to update the flags. */
2088 clear_special_calls (void)
2090 cfun
->calls_alloca
= false;
2091 cfun
->calls_setjmp
= false;
2094 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2097 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
2099 /* Since this block is no longer reachable, we can just delete all
2100 of its PHI nodes. */
2101 remove_phi_nodes (bb
);
2103 /* Remove edges to BB's successors. */
2104 while (EDGE_COUNT (bb
->succs
) > 0)
2105 remove_edge (EDGE_SUCC (bb
, 0));
2109 /* Remove statements of basic block BB. */
2112 remove_bb (basic_block bb
)
2114 gimple_stmt_iterator i
;
2118 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
2119 if (dump_flags
& TDF_DETAILS
)
2121 dump_bb (dump_file
, bb
, 0, TDF_BLOCKS
);
2122 fprintf (dump_file
, "\n");
2128 struct loop
*loop
= bb
->loop_father
;
2130 /* If a loop gets removed, clean up the information associated
2132 if (loop
->latch
== bb
2133 || loop
->header
== bb
)
2134 free_numbers_of_iterations_estimates_loop (loop
);
2137 /* Remove all the instructions in the block. */
2138 if (bb_seq (bb
) != NULL
)
2140 /* Walk backwards so as to get a chance to substitute all
2141 released DEFs into debug stmts. See
2142 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2144 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
2146 gimple stmt
= gsi_stmt (i
);
2147 glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
);
2149 && (FORCED_LABEL (gimple_label_label (label_stmt
))
2150 || DECL_NONLOCAL (gimple_label_label (label_stmt
))))
2153 gimple_stmt_iterator new_gsi
;
2155 /* A non-reachable non-local label may still be referenced.
2156 But it no longer needs to carry the extra semantics of
2158 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
2160 DECL_NONLOCAL (gimple_label_label (label_stmt
)) = 0;
2161 FORCED_LABEL (gimple_label_label (label_stmt
)) = 1;
2164 new_bb
= bb
->prev_bb
;
2165 new_gsi
= gsi_start_bb (new_bb
);
2166 gsi_remove (&i
, false);
2167 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
2171 /* Release SSA definitions if we are in SSA. Note that we
2172 may be called when not in SSA. For example,
2173 final_cleanup calls this function via
2174 cleanup_tree_cfg. */
2175 if (gimple_in_ssa_p (cfun
))
2176 release_defs (stmt
);
2178 gsi_remove (&i
, true);
2182 i
= gsi_last_bb (bb
);
2188 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2189 bb
->il
.gimple
.seq
= NULL
;
2190 bb
->il
.gimple
.phi_nodes
= NULL
;
2194 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2195 predicate VAL, return the edge that will be taken out of the block.
2196 If VAL does not match a unique edge, NULL is returned. */
2199 find_taken_edge (basic_block bb
, tree val
)
2203 stmt
= last_stmt (bb
);
2206 gcc_assert (is_ctrl_stmt (stmt
));
2211 if (!is_gimple_min_invariant (val
))
2214 if (gimple_code (stmt
) == GIMPLE_COND
)
2215 return find_taken_edge_cond_expr (bb
, val
);
2217 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
2218 return find_taken_edge_switch_expr (as_a
<gswitch
*> (stmt
), bb
, val
);
2220 if (computed_goto_p (stmt
))
2222 /* Only optimize if the argument is a label, if the argument is
2223 not a label then we can not construct a proper CFG.
2225 It may be the case that we only need to allow the LABEL_REF to
2226 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2227 appear inside a LABEL_EXPR just to be safe. */
2228 if ((TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
2229 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
2230 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
2237 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2238 statement, determine which of the outgoing edges will be taken out of the
2239 block. Return NULL if either edge may be taken. */
2242 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2247 dest
= label_to_block (val
);
2250 e
= find_edge (bb
, dest
);
2251 gcc_assert (e
!= NULL
);
2257 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2258 statement, determine which of the two edges will be taken out of the
2259 block. Return NULL if either edge may be taken. */
2262 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2264 edge true_edge
, false_edge
;
2266 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2268 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2269 return (integer_zerop (val
) ? false_edge
: true_edge
);
2272 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2273 statement, determine which edge will be taken out of the block. Return
2274 NULL if any edge may be taken. */
2277 find_taken_edge_switch_expr (gswitch
*switch_stmt
, basic_block bb
,
2280 basic_block dest_bb
;
2284 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2285 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2287 e
= find_edge (bb
, dest_bb
);
2293 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2294 We can make optimal use here of the fact that the case labels are
2295 sorted: We can do a binary search for a case matching VAL. */
2298 find_case_label_for_value (gswitch
*switch_stmt
, tree val
)
2300 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2301 tree default_case
= gimple_switch_default_label (switch_stmt
);
2303 for (low
= 0, high
= n
; high
- low
> 1; )
2305 size_t i
= (high
+ low
) / 2;
2306 tree t
= gimple_switch_label (switch_stmt
, i
);
2309 /* Cache the result of comparing CASE_LOW and val. */
2310 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2317 if (CASE_HIGH (t
) == NULL
)
2319 /* A singe-valued case label. */
2325 /* A case range. We can only handle integer ranges. */
2326 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2331 return default_case
;
2335 /* Dump a basic block on stderr. */
2338 gimple_debug_bb (basic_block bb
)
2340 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2344 /* Dump basic block with index N on stderr. */
2347 gimple_debug_bb_n (int n
)
2349 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun
, n
));
2350 return BASIC_BLOCK_FOR_FN (cfun
, n
);
2354 /* Dump the CFG on stderr.
2356 FLAGS are the same used by the tree dumping functions
2357 (see TDF_* in dumpfile.h). */
2360 gimple_debug_cfg (int flags
)
2362 gimple_dump_cfg (stderr
, flags
);
2366 /* Dump the program showing basic block boundaries on the given FILE.
2368 FLAGS are the same used by the tree dumping functions (see TDF_* in
2372 gimple_dump_cfg (FILE *file
, int flags
)
2374 if (flags
& TDF_DETAILS
)
2376 dump_function_header (file
, current_function_decl
, flags
);
2377 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2378 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2379 last_basic_block_for_fn (cfun
));
2381 brief_dump_cfg (file
, flags
| TDF_COMMENT
);
2382 fprintf (file
, "\n");
2385 if (flags
& TDF_STATS
)
2386 dump_cfg_stats (file
);
2388 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2392 /* Dump CFG statistics on FILE. */
2395 dump_cfg_stats (FILE *file
)
2397 static long max_num_merged_labels
= 0;
2398 unsigned long size
, total
= 0;
2401 const char * const fmt_str
= "%-30s%-13s%12s\n";
2402 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2403 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2404 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2405 const char *funcname
= current_function_name ();
2407 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2409 fprintf (file
, "---------------------------------------------------------\n");
2410 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2411 fprintf (file
, fmt_str
, "", " instances ", "used ");
2412 fprintf (file
, "---------------------------------------------------------\n");
2414 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2416 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2417 SCALE (size
), LABEL (size
));
2420 FOR_EACH_BB_FN (bb
, cfun
)
2421 num_edges
+= EDGE_COUNT (bb
->succs
);
2422 size
= num_edges
* sizeof (struct edge_def
);
2424 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2426 fprintf (file
, "---------------------------------------------------------\n");
2427 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2429 fprintf (file
, "---------------------------------------------------------\n");
2430 fprintf (file
, "\n");
2432 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2433 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2435 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2436 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2438 fprintf (file
, "\n");
2442 /* Dump CFG statistics on stderr. Keep extern so that it's always
2443 linked in the final executable. */
2446 debug_cfg_stats (void)
2448 dump_cfg_stats (stderr
);
2451 /*---------------------------------------------------------------------------
2452 Miscellaneous helpers
2453 ---------------------------------------------------------------------------*/
2455 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2456 flow. Transfers of control flow associated with EH are excluded. */
2459 call_can_make_abnormal_goto (gimple t
)
2461 /* If the function has no non-local labels, then a call cannot make an
2462 abnormal transfer of control. */
2463 if (!cfun
->has_nonlocal_label
2464 && !cfun
->calls_setjmp
)
2467 /* Likewise if the call has no side effects. */
2468 if (!gimple_has_side_effects (t
))
2471 /* Likewise if the called function is leaf. */
2472 if (gimple_call_flags (t
) & ECF_LEAF
)
2479 /* Return true if T can make an abnormal transfer of control flow.
2480 Transfers of control flow associated with EH are excluded. */
2483 stmt_can_make_abnormal_goto (gimple t
)
2485 if (computed_goto_p (t
))
2487 if (is_gimple_call (t
))
2488 return call_can_make_abnormal_goto (t
);
2493 /* Return true if T represents a stmt that always transfers control. */
2496 is_ctrl_stmt (gimple t
)
2498 switch (gimple_code (t
))
2512 /* Return true if T is a statement that may alter the flow of control
2513 (e.g., a call to a non-returning function). */
2516 is_ctrl_altering_stmt (gimple t
)
2520 switch (gimple_code (t
))
2523 /* Per stmt call flag indicates whether the call could alter
2525 if (gimple_call_ctrl_altering_p (t
))
2529 case GIMPLE_EH_DISPATCH
:
2530 /* EH_DISPATCH branches to the individual catch handlers at
2531 this level of a try or allowed-exceptions region. It can
2532 fallthru to the next statement as well. */
2536 if (gimple_asm_nlabels (as_a
<gasm
*> (t
)) > 0)
2541 /* OpenMP directives alter control flow. */
2544 case GIMPLE_TRANSACTION
:
2545 /* A transaction start alters control flow. */
2552 /* If a statement can throw, it alters control flow. */
2553 return stmt_can_throw_internal (t
);
2557 /* Return true if T is a simple local goto. */
2560 simple_goto_p (gimple t
)
2562 return (gimple_code (t
) == GIMPLE_GOTO
2563 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2567 /* Return true if STMT should start a new basic block. PREV_STMT is
2568 the statement preceding STMT. It is used when STMT is a label or a
2569 case label. Labels should only start a new basic block if their
2570 previous statement wasn't a label. Otherwise, sequence of labels
2571 would generate unnecessary basic blocks that only contain a single
2575 stmt_starts_bb_p (gimple stmt
, gimple prev_stmt
)
2580 /* Labels start a new basic block only if the preceding statement
2581 wasn't a label of the same type. This prevents the creation of
2582 consecutive blocks that have nothing but a single label. */
2583 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2585 /* Nonlocal and computed GOTO targets always start a new block. */
2586 if (DECL_NONLOCAL (gimple_label_label (label_stmt
))
2587 || FORCED_LABEL (gimple_label_label (label_stmt
)))
2590 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2592 if (DECL_NONLOCAL (gimple_label_label (
2593 as_a
<glabel
*> (prev_stmt
))))
2596 cfg_stats
.num_merged_labels
++;
2602 else if (gimple_code (stmt
) == GIMPLE_CALL
2603 && gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2604 /* setjmp acts similar to a nonlocal GOTO target and thus should
2605 start a new block. */
2612 /* Return true if T should end a basic block. */
2615 stmt_ends_bb_p (gimple t
)
2617 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2620 /* Remove block annotations and other data structures. */
2623 delete_tree_cfg_annotations (void)
2625 vec_free (label_to_block_map_for_fn (cfun
));
2629 /* Return the first statement in basic block BB. */
2632 first_stmt (basic_block bb
)
2634 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2637 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2645 /* Return the first non-label statement in basic block BB. */
2648 first_non_label_stmt (basic_block bb
)
2650 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2651 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2653 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2656 /* Return the last statement in basic block BB. */
2659 last_stmt (basic_block bb
)
2661 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2664 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2672 /* Return the last statement of an otherwise empty block. Return NULL
2673 if the block is totally empty, or if it contains more than one
2677 last_and_only_stmt (basic_block bb
)
2679 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2685 last
= gsi_stmt (i
);
2686 gsi_prev_nondebug (&i
);
2690 /* Empty statements should no longer appear in the instruction stream.
2691 Everything that might have appeared before should be deleted by
2692 remove_useless_stmts, and the optimizers should just gsi_remove
2693 instead of smashing with build_empty_stmt.
2695 Thus the only thing that should appear here in a block containing
2696 one executable statement is a label. */
2697 prev
= gsi_stmt (i
);
2698 if (gimple_code (prev
) == GIMPLE_LABEL
)
2704 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2707 reinstall_phi_args (edge new_edge
, edge old_edge
)
2713 vec
<edge_var_map
> *v
= redirect_edge_var_map_vector (old_edge
);
2717 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2718 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2719 i
++, gsi_next (&phis
))
2721 gphi
*phi
= phis
.phi ();
2722 tree result
= redirect_edge_var_map_result (vm
);
2723 tree arg
= redirect_edge_var_map_def (vm
);
2725 gcc_assert (result
== gimple_phi_result (phi
));
2727 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2730 redirect_edge_var_map_clear (old_edge
);
2733 /* Returns the basic block after which the new basic block created
2734 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2735 near its "logical" location. This is of most help to humans looking
2736 at debugging dumps. */
2739 split_edge_bb_loc (edge edge_in
)
2741 basic_block dest
= edge_in
->dest
;
2742 basic_block dest_prev
= dest
->prev_bb
;
2746 edge e
= find_edge (dest_prev
, dest
);
2747 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2748 return edge_in
->src
;
2753 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2754 Abort on abnormal edges. */
2757 gimple_split_edge (edge edge_in
)
2759 basic_block new_bb
, after_bb
, dest
;
2762 /* Abnormal edges cannot be split. */
2763 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2765 dest
= edge_in
->dest
;
2767 after_bb
= split_edge_bb_loc (edge_in
);
2769 new_bb
= create_empty_bb (after_bb
);
2770 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
2771 new_bb
->count
= edge_in
->count
;
2772 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2773 new_edge
->probability
= REG_BR_PROB_BASE
;
2774 new_edge
->count
= edge_in
->count
;
2776 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2777 gcc_assert (e
== edge_in
);
2778 reinstall_phi_args (new_edge
, e
);
2784 /* Verify properties of the address expression T with base object BASE. */
2787 verify_address (tree t
, tree base
)
2790 bool old_side_effects
;
2792 bool new_side_effects
;
2794 old_constant
= TREE_CONSTANT (t
);
2795 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2797 recompute_tree_invariant_for_addr_expr (t
);
2798 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2799 new_constant
= TREE_CONSTANT (t
);
2801 if (old_constant
!= new_constant
)
2803 error ("constant not recomputed when ADDR_EXPR changed");
2806 if (old_side_effects
!= new_side_effects
)
2808 error ("side effects not recomputed when ADDR_EXPR changed");
2812 if (!(TREE_CODE (base
) == VAR_DECL
2813 || TREE_CODE (base
) == PARM_DECL
2814 || TREE_CODE (base
) == RESULT_DECL
))
2817 if (DECL_GIMPLE_REG_P (base
))
2819 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2826 /* Callback for walk_tree, check that all elements with address taken are
2827 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2828 inside a PHI node. */
2831 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2838 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2839 #define CHECK_OP(N, MSG) \
2840 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2841 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2843 switch (TREE_CODE (t
))
2846 if (SSA_NAME_IN_FREE_LIST (t
))
2848 error ("SSA name in freelist but still referenced");
2854 error ("INDIRECT_REF in gimple IL");
2858 x
= TREE_OPERAND (t
, 0);
2859 if (!POINTER_TYPE_P (TREE_TYPE (x
))
2860 || !is_gimple_mem_ref_addr (x
))
2862 error ("invalid first operand of MEM_REF");
2865 if (TREE_CODE (TREE_OPERAND (t
, 1)) != INTEGER_CST
2866 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
2868 error ("invalid offset operand of MEM_REF");
2869 return TREE_OPERAND (t
, 1);
2871 if (TREE_CODE (x
) == ADDR_EXPR
2872 && (x
= verify_address (x
, TREE_OPERAND (x
, 0))))
2878 x
= fold (ASSERT_EXPR_COND (t
));
2879 if (x
== boolean_false_node
)
2881 error ("ASSERT_EXPR with an always-false condition");
2887 error ("MODIFY_EXPR not expected while having tuples");
2894 gcc_assert (is_gimple_address (t
));
2896 /* Skip any references (they will be checked when we recurse down the
2897 tree) and ensure that any variable used as a prefix is marked
2899 for (x
= TREE_OPERAND (t
, 0);
2900 handled_component_p (x
);
2901 x
= TREE_OPERAND (x
, 0))
2904 if ((tem
= verify_address (t
, x
)))
2907 if (!(TREE_CODE (x
) == VAR_DECL
2908 || TREE_CODE (x
) == PARM_DECL
2909 || TREE_CODE (x
) == RESULT_DECL
))
2912 if (!TREE_ADDRESSABLE (x
))
2914 error ("address taken, but ADDRESSABLE bit not set");
2922 x
= COND_EXPR_COND (t
);
2923 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
2925 error ("non-integral used in condition");
2928 if (!is_gimple_condexpr (x
))
2930 error ("invalid conditional operand");
2935 case NON_LVALUE_EXPR
:
2936 case TRUTH_NOT_EXPR
:
2940 case FIX_TRUNC_EXPR
:
2945 CHECK_OP (0, "invalid operand to unary operator");
2951 if (!is_gimple_reg_type (TREE_TYPE (t
)))
2953 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2957 if (TREE_CODE (t
) == BIT_FIELD_REF
)
2959 tree t0
= TREE_OPERAND (t
, 0);
2960 tree t1
= TREE_OPERAND (t
, 1);
2961 tree t2
= TREE_OPERAND (t
, 2);
2962 if (!tree_fits_uhwi_p (t1
)
2963 || !tree_fits_uhwi_p (t2
))
2965 error ("invalid position or size operand to BIT_FIELD_REF");
2968 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
2969 && (TYPE_PRECISION (TREE_TYPE (t
))
2970 != tree_to_uhwi (t1
)))
2972 error ("integral result type precision does not match "
2973 "field size of BIT_FIELD_REF");
2976 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
2977 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
2978 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t
)))
2979 != tree_to_uhwi (t1
)))
2981 error ("mode precision of non-integral result does not "
2982 "match field size of BIT_FIELD_REF");
2985 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0
))
2986 && (tree_to_uhwi (t1
) + tree_to_uhwi (t2
)
2987 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0
)))))
2989 error ("position plus size exceeds size of referenced object in "
2994 t
= TREE_OPERAND (t
, 0);
2999 case ARRAY_RANGE_REF
:
3000 case VIEW_CONVERT_EXPR
:
3001 /* We have a nest of references. Verify that each of the operands
3002 that determine where to reference is either a constant or a variable,
3003 verify that the base is valid, and then show we've already checked
3005 while (handled_component_p (t
))
3007 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
3008 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3009 else if (TREE_CODE (t
) == ARRAY_REF
3010 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
3012 CHECK_OP (1, "invalid array index");
3013 if (TREE_OPERAND (t
, 2))
3014 CHECK_OP (2, "invalid array lower bound");
3015 if (TREE_OPERAND (t
, 3))
3016 CHECK_OP (3, "invalid array stride");
3018 else if (TREE_CODE (t
) == BIT_FIELD_REF
3019 || TREE_CODE (t
) == REALPART_EXPR
3020 || TREE_CODE (t
) == IMAGPART_EXPR
)
3022 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3027 t
= TREE_OPERAND (t
, 0);
3030 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
3032 error ("invalid reference prefix");
3039 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3040 POINTER_PLUS_EXPR. */
3041 if (POINTER_TYPE_P (TREE_TYPE (t
)))
3043 error ("invalid operand to plus/minus, type is a pointer");
3046 CHECK_OP (0, "invalid operand to binary operator");
3047 CHECK_OP (1, "invalid operand to binary operator");
3050 case POINTER_PLUS_EXPR
:
3051 /* Check to make sure the first operand is a pointer or reference type. */
3052 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
3054 error ("invalid operand to pointer plus, first operand is not a pointer");
3057 /* Check to make sure the second operand is a ptrofftype. */
3058 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
3060 error ("invalid operand to pointer plus, second operand is not an "
3061 "integer type of appropriate width");
3071 case UNORDERED_EXPR
:
3080 case TRUNC_DIV_EXPR
:
3082 case FLOOR_DIV_EXPR
:
3083 case ROUND_DIV_EXPR
:
3084 case TRUNC_MOD_EXPR
:
3086 case FLOOR_MOD_EXPR
:
3087 case ROUND_MOD_EXPR
:
3089 case EXACT_DIV_EXPR
:
3099 CHECK_OP (0, "invalid operand to binary operator");
3100 CHECK_OP (1, "invalid operand to binary operator");
3104 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
3108 case CASE_LABEL_EXPR
:
3111 error ("invalid CASE_CHAIN");
3125 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3126 Returns true if there is an error, otherwise false. */
3129 verify_types_in_gimple_min_lval (tree expr
)
3133 if (is_gimple_id (expr
))
3136 if (TREE_CODE (expr
) != TARGET_MEM_REF
3137 && TREE_CODE (expr
) != MEM_REF
)
3139 error ("invalid expression for min lvalue");
3143 /* TARGET_MEM_REFs are strange beasts. */
3144 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3147 op
= TREE_OPERAND (expr
, 0);
3148 if (!is_gimple_val (op
))
3150 error ("invalid operand in indirect reference");
3151 debug_generic_stmt (op
);
3154 /* Memory references now generally can involve a value conversion. */
3159 /* Verify if EXPR is a valid GIMPLE reference expression. If
3160 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3161 if there is an error, otherwise false. */
3164 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
3166 while (handled_component_p (expr
))
3168 tree op
= TREE_OPERAND (expr
, 0);
3170 if (TREE_CODE (expr
) == ARRAY_REF
3171 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
3173 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
3174 || (TREE_OPERAND (expr
, 2)
3175 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3176 || (TREE_OPERAND (expr
, 3)
3177 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
3179 error ("invalid operands to array reference");
3180 debug_generic_stmt (expr
);
3185 /* Verify if the reference array element types are compatible. */
3186 if (TREE_CODE (expr
) == ARRAY_REF
3187 && !useless_type_conversion_p (TREE_TYPE (expr
),
3188 TREE_TYPE (TREE_TYPE (op
))))
3190 error ("type mismatch in array reference");
3191 debug_generic_stmt (TREE_TYPE (expr
));
3192 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3195 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
3196 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
3197 TREE_TYPE (TREE_TYPE (op
))))
3199 error ("type mismatch in array range reference");
3200 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
3201 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3205 if ((TREE_CODE (expr
) == REALPART_EXPR
3206 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3207 && !useless_type_conversion_p (TREE_TYPE (expr
),
3208 TREE_TYPE (TREE_TYPE (op
))))
3210 error ("type mismatch in real/imagpart reference");
3211 debug_generic_stmt (TREE_TYPE (expr
));
3212 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3216 if (TREE_CODE (expr
) == COMPONENT_REF
3217 && !useless_type_conversion_p (TREE_TYPE (expr
),
3218 TREE_TYPE (TREE_OPERAND (expr
, 1))))
3220 error ("type mismatch in component reference");
3221 debug_generic_stmt (TREE_TYPE (expr
));
3222 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
3226 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3228 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3229 that their operand is not an SSA name or an invariant when
3230 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3231 bug). Otherwise there is nothing to verify, gross mismatches at
3232 most invoke undefined behavior. */
3234 && (TREE_CODE (op
) == SSA_NAME
3235 || is_gimple_min_invariant (op
)))
3237 error ("conversion of an SSA_NAME on the left hand side");
3238 debug_generic_stmt (expr
);
3241 else if (TREE_CODE (op
) == SSA_NAME
3242 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3244 error ("conversion of register to a different size");
3245 debug_generic_stmt (expr
);
3248 else if (!handled_component_p (op
))
3255 if (TREE_CODE (expr
) == MEM_REF
)
3257 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
3259 error ("invalid address operand in MEM_REF");
3260 debug_generic_stmt (expr
);
3263 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
3264 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3266 error ("invalid offset operand in MEM_REF");
3267 debug_generic_stmt (expr
);
3271 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3273 if (!TMR_BASE (expr
)
3274 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3276 error ("invalid address operand in TARGET_MEM_REF");
3279 if (!TMR_OFFSET (expr
)
3280 || TREE_CODE (TMR_OFFSET (expr
)) != INTEGER_CST
3281 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3283 error ("invalid offset operand in TARGET_MEM_REF");
3284 debug_generic_stmt (expr
);
3289 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3290 && verify_types_in_gimple_min_lval (expr
));
3293 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3294 list of pointer-to types that is trivially convertible to DEST. */
3297 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3301 if (!TYPE_POINTER_TO (src_obj
))
3304 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3305 if (useless_type_conversion_p (dest
, src
))
3311 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3312 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3315 valid_fixed_convert_types_p (tree type1
, tree type2
)
3317 return (FIXED_POINT_TYPE_P (type1
)
3318 && (INTEGRAL_TYPE_P (type2
)
3319 || SCALAR_FLOAT_TYPE_P (type2
)
3320 || FIXED_POINT_TYPE_P (type2
)));
3323 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3324 is a problem, otherwise false. */
3327 verify_gimple_call (gcall
*stmt
)
3329 tree fn
= gimple_call_fn (stmt
);
3330 tree fntype
, fndecl
;
3333 if (gimple_call_internal_p (stmt
))
3337 error ("gimple call has two targets");
3338 debug_generic_stmt (fn
);
3346 error ("gimple call has no target");
3351 if (fn
&& !is_gimple_call_addr (fn
))
3353 error ("invalid function in gimple call");
3354 debug_generic_stmt (fn
);
3359 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3360 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3361 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3363 error ("non-function in gimple call");
3367 fndecl
= gimple_call_fndecl (stmt
);
3369 && TREE_CODE (fndecl
) == FUNCTION_DECL
3370 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3371 && !DECL_PURE_P (fndecl
)
3372 && !TREE_READONLY (fndecl
))
3374 error ("invalid pure const state for function");
3378 tree lhs
= gimple_call_lhs (stmt
);
3380 && (!is_gimple_lvalue (lhs
)
3381 || verify_types_in_gimple_reference (lhs
, true)))
3383 error ("invalid LHS in gimple call");
3388 && gimple_call_ctrl_altering_p (stmt
)
3389 && gimple_call_noreturn_p (stmt
)
3390 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs
))) == INTEGER_CST
)
3392 error ("LHS in noreturn call");
3396 fntype
= gimple_call_fntype (stmt
);
3399 && !useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (fntype
))
3400 /* ??? At least C++ misses conversions at assignments from
3401 void * call results.
3402 ??? Java is completely off. Especially with functions
3403 returning java.lang.Object.
3404 For now simply allow arbitrary pointer type conversions. */
3405 && !(POINTER_TYPE_P (TREE_TYPE (lhs
))
3406 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3408 error ("invalid conversion in gimple call");
3409 debug_generic_stmt (TREE_TYPE (lhs
));
3410 debug_generic_stmt (TREE_TYPE (fntype
));
3414 if (gimple_call_chain (stmt
)
3415 && !is_gimple_val (gimple_call_chain (stmt
)))
3417 error ("invalid static chain in gimple call");
3418 debug_generic_stmt (gimple_call_chain (stmt
));
3422 /* If there is a static chain argument, the call should either be
3423 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3424 if (gimple_call_chain (stmt
)
3426 && !DECL_STATIC_CHAIN (fndecl
))
3428 error ("static chain with function that doesn%'t use one");
3432 /* ??? The C frontend passes unpromoted arguments in case it
3433 didn't see a function declaration before the call. So for now
3434 leave the call arguments mostly unverified. Once we gimplify
3435 unit-at-a-time we have a chance to fix this. */
3437 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3439 tree arg
= gimple_call_arg (stmt
, i
);
3440 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3441 && !is_gimple_val (arg
))
3442 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3443 && !is_gimple_lvalue (arg
)))
3445 error ("invalid argument to gimple call");
3446 debug_generic_expr (arg
);
3454 /* Verifies the gimple comparison with the result type TYPE and
3455 the operands OP0 and OP1. */
3458 verify_gimple_comparison (tree type
, tree op0
, tree op1
)
3460 tree op0_type
= TREE_TYPE (op0
);
3461 tree op1_type
= TREE_TYPE (op1
);
3463 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3465 error ("invalid operands in gimple comparison");
3469 /* For comparisons we do not have the operations type as the
3470 effective type the comparison is carried out in. Instead
3471 we require that either the first operand is trivially
3472 convertible into the second, or the other way around.
3473 Because we special-case pointers to void we allow
3474 comparisons of pointers with the same mode as well. */
3475 if (!useless_type_conversion_p (op0_type
, op1_type
)
3476 && !useless_type_conversion_p (op1_type
, op0_type
)
3477 && (!POINTER_TYPE_P (op0_type
)
3478 || !POINTER_TYPE_P (op1_type
)
3479 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3481 error ("mismatching comparison operand types");
3482 debug_generic_expr (op0_type
);
3483 debug_generic_expr (op1_type
);
3487 /* The resulting type of a comparison may be an effective boolean type. */
3488 if (INTEGRAL_TYPE_P (type
)
3489 && (TREE_CODE (type
) == BOOLEAN_TYPE
3490 || TYPE_PRECISION (type
) == 1))
3492 if (TREE_CODE (op0_type
) == VECTOR_TYPE
3493 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3495 error ("vector comparison returning a boolean");
3496 debug_generic_expr (op0_type
);
3497 debug_generic_expr (op1_type
);
3501 /* Or an integer vector type with the same size and element count
3502 as the comparison operand types. */
3503 else if (TREE_CODE (type
) == VECTOR_TYPE
3504 && TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
)
3506 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3507 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3509 error ("non-vector operands in vector comparison");
3510 debug_generic_expr (op0_type
);
3511 debug_generic_expr (op1_type
);
3515 if (TYPE_VECTOR_SUBPARTS (type
) != TYPE_VECTOR_SUBPARTS (op0_type
)
3516 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type
)))
3517 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type
))))
3518 /* The result of a vector comparison is of signed
3520 || TYPE_UNSIGNED (TREE_TYPE (type
)))
3522 error ("invalid vector comparison resulting type");
3523 debug_generic_expr (type
);
3529 error ("bogus comparison result type");
3530 debug_generic_expr (type
);
3537 /* Verify a gimple assignment statement STMT with an unary rhs.
3538 Returns true if anything is wrong. */
3541 verify_gimple_assign_unary (gassign
*stmt
)
3543 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3544 tree lhs
= gimple_assign_lhs (stmt
);
3545 tree lhs_type
= TREE_TYPE (lhs
);
3546 tree rhs1
= gimple_assign_rhs1 (stmt
);
3547 tree rhs1_type
= TREE_TYPE (rhs1
);
3549 if (!is_gimple_reg (lhs
))
3551 error ("non-register as LHS of unary operation");
3555 if (!is_gimple_val (rhs1
))
3557 error ("invalid operand in unary operation");
3561 /* First handle conversions. */
3566 /* Allow conversions from pointer type to integral type only if
3567 there is no sign or zero extension involved.
3568 For targets were the precision of ptrofftype doesn't match that
3569 of pointers we need to allow arbitrary conversions to ptrofftype. */
3570 if ((POINTER_TYPE_P (lhs_type
)
3571 && INTEGRAL_TYPE_P (rhs1_type
))
3572 || (POINTER_TYPE_P (rhs1_type
)
3573 && INTEGRAL_TYPE_P (lhs_type
)
3574 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3575 || ptrofftype_p (sizetype
))))
3578 /* Allow conversion from integral to offset type and vice versa. */
3579 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3580 && INTEGRAL_TYPE_P (rhs1_type
))
3581 || (INTEGRAL_TYPE_P (lhs_type
)
3582 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3585 /* Otherwise assert we are converting between types of the
3587 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3589 error ("invalid types in nop conversion");
3590 debug_generic_expr (lhs_type
);
3591 debug_generic_expr (rhs1_type
);
3598 case ADDR_SPACE_CONVERT_EXPR
:
3600 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3601 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3602 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3604 error ("invalid types in address space conversion");
3605 debug_generic_expr (lhs_type
);
3606 debug_generic_expr (rhs1_type
);
3613 case FIXED_CONVERT_EXPR
:
3615 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3616 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3618 error ("invalid types in fixed-point conversion");
3619 debug_generic_expr (lhs_type
);
3620 debug_generic_expr (rhs1_type
);
3629 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3630 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3631 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3633 error ("invalid types in conversion to floating point");
3634 debug_generic_expr (lhs_type
);
3635 debug_generic_expr (rhs1_type
);
3642 case FIX_TRUNC_EXPR
:
3644 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3645 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3646 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3648 error ("invalid types in conversion to integer");
3649 debug_generic_expr (lhs_type
);
3650 debug_generic_expr (rhs1_type
);
3656 case REDUC_MAX_EXPR
:
3657 case REDUC_MIN_EXPR
:
3658 case REDUC_PLUS_EXPR
:
3659 if (!VECTOR_TYPE_P (rhs1_type
)
3660 || !useless_type_conversion_p (lhs_type
, TREE_TYPE (rhs1_type
)))
3662 error ("reduction should convert from vector to element type");
3663 debug_generic_expr (lhs_type
);
3664 debug_generic_expr (rhs1_type
);
3669 case VEC_UNPACK_HI_EXPR
:
3670 case VEC_UNPACK_LO_EXPR
:
3671 case VEC_UNPACK_FLOAT_HI_EXPR
:
3672 case VEC_UNPACK_FLOAT_LO_EXPR
:
3687 /* For the remaining codes assert there is no conversion involved. */
3688 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3690 error ("non-trivial conversion in unary operation");
3691 debug_generic_expr (lhs_type
);
3692 debug_generic_expr (rhs1_type
);
3699 /* Verify a gimple assignment statement STMT with a binary rhs.
3700 Returns true if anything is wrong. */
3703 verify_gimple_assign_binary (gassign
*stmt
)
3705 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3706 tree lhs
= gimple_assign_lhs (stmt
);
3707 tree lhs_type
= TREE_TYPE (lhs
);
3708 tree rhs1
= gimple_assign_rhs1 (stmt
);
3709 tree rhs1_type
= TREE_TYPE (rhs1
);
3710 tree rhs2
= gimple_assign_rhs2 (stmt
);
3711 tree rhs2_type
= TREE_TYPE (rhs2
);
3713 if (!is_gimple_reg (lhs
))
3715 error ("non-register as LHS of binary operation");
3719 if (!is_gimple_val (rhs1
)
3720 || !is_gimple_val (rhs2
))
3722 error ("invalid operands in binary operation");
3726 /* First handle operations that involve different types. */
3731 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3732 || !(INTEGRAL_TYPE_P (rhs1_type
)
3733 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3734 || !(INTEGRAL_TYPE_P (rhs2_type
)
3735 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3737 error ("type mismatch in complex expression");
3738 debug_generic_expr (lhs_type
);
3739 debug_generic_expr (rhs1_type
);
3740 debug_generic_expr (rhs2_type
);
3752 /* Shifts and rotates are ok on integral types, fixed point
3753 types and integer vector types. */
3754 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3755 && !FIXED_POINT_TYPE_P (rhs1_type
)
3756 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3757 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3758 || (!INTEGRAL_TYPE_P (rhs2_type
)
3759 /* Vector shifts of vectors are also ok. */
3760 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3761 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3762 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3763 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3764 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3766 error ("type mismatch in shift expression");
3767 debug_generic_expr (lhs_type
);
3768 debug_generic_expr (rhs1_type
);
3769 debug_generic_expr (rhs2_type
);
3776 case WIDEN_LSHIFT_EXPR
:
3778 if (!INTEGRAL_TYPE_P (lhs_type
)
3779 || !INTEGRAL_TYPE_P (rhs1_type
)
3780 || TREE_CODE (rhs2
) != INTEGER_CST
3781 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3783 error ("type mismatch in widening vector shift expression");
3784 debug_generic_expr (lhs_type
);
3785 debug_generic_expr (rhs1_type
);
3786 debug_generic_expr (rhs2_type
);
3793 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3794 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3796 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3797 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3798 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3799 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3800 || TREE_CODE (rhs2
) != INTEGER_CST
3801 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3802 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3804 error ("type mismatch in widening vector shift expression");
3805 debug_generic_expr (lhs_type
);
3806 debug_generic_expr (rhs1_type
);
3807 debug_generic_expr (rhs2_type
);
3817 tree lhs_etype
= lhs_type
;
3818 tree rhs1_etype
= rhs1_type
;
3819 tree rhs2_etype
= rhs2_type
;
3820 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3822 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3823 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3825 error ("invalid non-vector operands to vector valued plus");
3828 lhs_etype
= TREE_TYPE (lhs_type
);
3829 rhs1_etype
= TREE_TYPE (rhs1_type
);
3830 rhs2_etype
= TREE_TYPE (rhs2_type
);
3832 if (POINTER_TYPE_P (lhs_etype
)
3833 || POINTER_TYPE_P (rhs1_etype
)
3834 || POINTER_TYPE_P (rhs2_etype
))
3836 error ("invalid (pointer) operands to plus/minus");
3840 /* Continue with generic binary expression handling. */
3844 case POINTER_PLUS_EXPR
:
3846 if (!POINTER_TYPE_P (rhs1_type
)
3847 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3848 || !ptrofftype_p (rhs2_type
))
3850 error ("type mismatch in pointer plus expression");
3851 debug_generic_stmt (lhs_type
);
3852 debug_generic_stmt (rhs1_type
);
3853 debug_generic_stmt (rhs2_type
);
3860 case TRUTH_ANDIF_EXPR
:
3861 case TRUTH_ORIF_EXPR
:
3862 case TRUTH_AND_EXPR
:
3864 case TRUTH_XOR_EXPR
:
3874 case UNORDERED_EXPR
:
3882 /* Comparisons are also binary, but the result type is not
3883 connected to the operand types. */
3884 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
);
3886 case WIDEN_MULT_EXPR
:
3887 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
3889 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
3890 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
3892 case WIDEN_SUM_EXPR
:
3893 case VEC_WIDEN_MULT_HI_EXPR
:
3894 case VEC_WIDEN_MULT_LO_EXPR
:
3895 case VEC_WIDEN_MULT_EVEN_EXPR
:
3896 case VEC_WIDEN_MULT_ODD_EXPR
:
3897 case VEC_PACK_TRUNC_EXPR
:
3898 case VEC_PACK_SAT_EXPR
:
3899 case VEC_PACK_FIX_TRUNC_EXPR
:
3904 case MULT_HIGHPART_EXPR
:
3905 case TRUNC_DIV_EXPR
:
3907 case FLOOR_DIV_EXPR
:
3908 case ROUND_DIV_EXPR
:
3909 case TRUNC_MOD_EXPR
:
3911 case FLOOR_MOD_EXPR
:
3912 case ROUND_MOD_EXPR
:
3914 case EXACT_DIV_EXPR
:
3920 /* Continue with generic binary expression handling. */
3927 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3928 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3930 error ("type mismatch in binary expression");
3931 debug_generic_stmt (lhs_type
);
3932 debug_generic_stmt (rhs1_type
);
3933 debug_generic_stmt (rhs2_type
);
3940 /* Verify a gimple assignment statement STMT with a ternary rhs.
3941 Returns true if anything is wrong. */
3944 verify_gimple_assign_ternary (gassign
*stmt
)
3946 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3947 tree lhs
= gimple_assign_lhs (stmt
);
3948 tree lhs_type
= TREE_TYPE (lhs
);
3949 tree rhs1
= gimple_assign_rhs1 (stmt
);
3950 tree rhs1_type
= TREE_TYPE (rhs1
);
3951 tree rhs2
= gimple_assign_rhs2 (stmt
);
3952 tree rhs2_type
= TREE_TYPE (rhs2
);
3953 tree rhs3
= gimple_assign_rhs3 (stmt
);
3954 tree rhs3_type
= TREE_TYPE (rhs3
);
3956 if (!is_gimple_reg (lhs
))
3958 error ("non-register as LHS of ternary operation");
3962 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
3963 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
3964 || !is_gimple_val (rhs2
)
3965 || !is_gimple_val (rhs3
))
3967 error ("invalid operands in ternary operation");
3971 /* First handle operations that involve different types. */
3974 case WIDEN_MULT_PLUS_EXPR
:
3975 case WIDEN_MULT_MINUS_EXPR
:
3976 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3977 && !FIXED_POINT_TYPE_P (rhs1_type
))
3978 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
3979 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
3980 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
3981 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
3983 error ("type mismatch in widening multiply-accumulate expression");
3984 debug_generic_expr (lhs_type
);
3985 debug_generic_expr (rhs1_type
);
3986 debug_generic_expr (rhs2_type
);
3987 debug_generic_expr (rhs3_type
);
3993 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3994 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
3995 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3997 error ("type mismatch in fused multiply-add expression");
3998 debug_generic_expr (lhs_type
);
3999 debug_generic_expr (rhs1_type
);
4000 debug_generic_expr (rhs2_type
);
4001 debug_generic_expr (rhs3_type
);
4008 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
4009 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4011 error ("type mismatch in conditional expression");
4012 debug_generic_expr (lhs_type
);
4013 debug_generic_expr (rhs2_type
);
4014 debug_generic_expr (rhs3_type
);
4020 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4021 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4023 error ("type mismatch in vector permute expression");
4024 debug_generic_expr (lhs_type
);
4025 debug_generic_expr (rhs1_type
);
4026 debug_generic_expr (rhs2_type
);
4027 debug_generic_expr (rhs3_type
);
4031 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4032 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4033 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4035 error ("vector types expected in vector permute expression");
4036 debug_generic_expr (lhs_type
);
4037 debug_generic_expr (rhs1_type
);
4038 debug_generic_expr (rhs2_type
);
4039 debug_generic_expr (rhs3_type
);
4043 if (TYPE_VECTOR_SUBPARTS (rhs1_type
) != TYPE_VECTOR_SUBPARTS (rhs2_type
)
4044 || TYPE_VECTOR_SUBPARTS (rhs2_type
)
4045 != TYPE_VECTOR_SUBPARTS (rhs3_type
)
4046 || TYPE_VECTOR_SUBPARTS (rhs3_type
)
4047 != TYPE_VECTOR_SUBPARTS (lhs_type
))
4049 error ("vectors with different element number found "
4050 "in vector permute expression");
4051 debug_generic_expr (lhs_type
);
4052 debug_generic_expr (rhs1_type
);
4053 debug_generic_expr (rhs2_type
);
4054 debug_generic_expr (rhs3_type
);
4058 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
4059 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type
)))
4060 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
))))
4062 error ("invalid mask type in vector permute expression");
4063 debug_generic_expr (lhs_type
);
4064 debug_generic_expr (rhs1_type
);
4065 debug_generic_expr (rhs2_type
);
4066 debug_generic_expr (rhs3_type
);
4073 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
)
4074 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4075 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4076 (TYPE_MODE (TREE_TYPE (rhs1_type
))))
4077 > GET_MODE_BITSIZE (GET_MODE_INNER
4078 (TYPE_MODE (TREE_TYPE (lhs_type
)))))
4080 error ("type mismatch in sad expression");
4081 debug_generic_expr (lhs_type
);
4082 debug_generic_expr (rhs1_type
);
4083 debug_generic_expr (rhs2_type
);
4084 debug_generic_expr (rhs3_type
);
4088 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4089 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4090 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4092 error ("vector types expected in sad expression");
4093 debug_generic_expr (lhs_type
);
4094 debug_generic_expr (rhs1_type
);
4095 debug_generic_expr (rhs2_type
);
4096 debug_generic_expr (rhs3_type
);
4103 case REALIGN_LOAD_EXPR
:
4113 /* Verify a gimple assignment statement STMT with a single rhs.
4114 Returns true if anything is wrong. */
4117 verify_gimple_assign_single (gassign
*stmt
)
4119 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4120 tree lhs
= gimple_assign_lhs (stmt
);
4121 tree lhs_type
= TREE_TYPE (lhs
);
4122 tree rhs1
= gimple_assign_rhs1 (stmt
);
4123 tree rhs1_type
= TREE_TYPE (rhs1
);
4126 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
4128 error ("non-trivial conversion at assignment");
4129 debug_generic_expr (lhs_type
);
4130 debug_generic_expr (rhs1_type
);
4134 if (gimple_clobber_p (stmt
)
4135 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
4137 error ("non-decl/MEM_REF LHS in clobber statement");
4138 debug_generic_expr (lhs
);
4142 if (handled_component_p (lhs
)
4143 || TREE_CODE (lhs
) == MEM_REF
4144 || TREE_CODE (lhs
) == TARGET_MEM_REF
)
4145 res
|= verify_types_in_gimple_reference (lhs
, true);
4147 /* Special codes we cannot handle via their class. */
4152 tree op
= TREE_OPERAND (rhs1
, 0);
4153 if (!is_gimple_addressable (op
))
4155 error ("invalid operand in unary expression");
4159 /* Technically there is no longer a need for matching types, but
4160 gimple hygiene asks for this check. In LTO we can end up
4161 combining incompatible units and thus end up with addresses
4162 of globals that change their type to a common one. */
4164 && !types_compatible_p (TREE_TYPE (op
),
4165 TREE_TYPE (TREE_TYPE (rhs1
)))
4166 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
4169 error ("type mismatch in address expression");
4170 debug_generic_stmt (TREE_TYPE (rhs1
));
4171 debug_generic_stmt (TREE_TYPE (op
));
4175 return verify_types_in_gimple_reference (op
, true);
4180 error ("INDIRECT_REF in gimple IL");
4186 case ARRAY_RANGE_REF
:
4187 case VIEW_CONVERT_EXPR
:
4190 case TARGET_MEM_REF
:
4192 if (!is_gimple_reg (lhs
)
4193 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4195 error ("invalid rhs for gimple memory store");
4196 debug_generic_stmt (lhs
);
4197 debug_generic_stmt (rhs1
);
4200 return res
|| verify_types_in_gimple_reference (rhs1
, false);
4212 /* tcc_declaration */
4217 if (!is_gimple_reg (lhs
)
4218 && !is_gimple_reg (rhs1
)
4219 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4221 error ("invalid rhs for gimple memory store");
4222 debug_generic_stmt (lhs
);
4223 debug_generic_stmt (rhs1
);
4229 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
4232 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
4234 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
4236 /* For vector CONSTRUCTORs we require that either it is empty
4237 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4238 (then the element count must be correct to cover the whole
4239 outer vector and index must be NULL on all elements, or it is
4240 a CONSTRUCTOR of scalar elements, where we as an exception allow
4241 smaller number of elements (assuming zero filling) and
4242 consecutive indexes as compared to NULL indexes (such
4243 CONSTRUCTORs can appear in the IL from FEs). */
4244 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4246 if (elt_t
== NULL_TREE
)
4248 elt_t
= TREE_TYPE (elt_v
);
4249 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4251 tree elt_t
= TREE_TYPE (elt_v
);
4252 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4255 error ("incorrect type of vector CONSTRUCTOR"
4257 debug_generic_stmt (rhs1
);
4260 else if (CONSTRUCTOR_NELTS (rhs1
)
4261 * TYPE_VECTOR_SUBPARTS (elt_t
)
4262 != TYPE_VECTOR_SUBPARTS (rhs1_type
))
4264 error ("incorrect number of vector CONSTRUCTOR"
4266 debug_generic_stmt (rhs1
);
4270 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4273 error ("incorrect type of vector CONSTRUCTOR elements");
4274 debug_generic_stmt (rhs1
);
4277 else if (CONSTRUCTOR_NELTS (rhs1
)
4278 > TYPE_VECTOR_SUBPARTS (rhs1_type
))
4280 error ("incorrect number of vector CONSTRUCTOR elements");
4281 debug_generic_stmt (rhs1
);
4285 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4287 error ("incorrect type of vector CONSTRUCTOR elements");
4288 debug_generic_stmt (rhs1
);
4291 if (elt_i
!= NULL_TREE
4292 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4293 || TREE_CODE (elt_i
) != INTEGER_CST
4294 || compare_tree_int (elt_i
, i
) != 0))
4296 error ("vector CONSTRUCTOR with non-NULL element index");
4297 debug_generic_stmt (rhs1
);
4300 if (!is_gimple_val (elt_v
))
4302 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4303 debug_generic_stmt (rhs1
);
4308 else if (CONSTRUCTOR_NELTS (rhs1
) != 0)
4310 error ("non-vector CONSTRUCTOR with elements");
4311 debug_generic_stmt (rhs1
);
4317 case WITH_SIZE_EXPR
:
4327 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4328 is a problem, otherwise false. */
4331 verify_gimple_assign (gassign
*stmt
)
4333 switch (gimple_assign_rhs_class (stmt
))
4335 case GIMPLE_SINGLE_RHS
:
4336 return verify_gimple_assign_single (stmt
);
4338 case GIMPLE_UNARY_RHS
:
4339 return verify_gimple_assign_unary (stmt
);
4341 case GIMPLE_BINARY_RHS
:
4342 return verify_gimple_assign_binary (stmt
);
4344 case GIMPLE_TERNARY_RHS
:
4345 return verify_gimple_assign_ternary (stmt
);
4352 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4353 is a problem, otherwise false. */
4356 verify_gimple_return (greturn
*stmt
)
4358 tree op
= gimple_return_retval (stmt
);
4359 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4361 /* We cannot test for present return values as we do not fix up missing
4362 return values from the original source. */
4366 if (!is_gimple_val (op
)
4367 && TREE_CODE (op
) != RESULT_DECL
)
4369 error ("invalid operand in return statement");
4370 debug_generic_stmt (op
);
4374 if ((TREE_CODE (op
) == RESULT_DECL
4375 && DECL_BY_REFERENCE (op
))
4376 || (TREE_CODE (op
) == SSA_NAME
4377 && SSA_NAME_VAR (op
)
4378 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4379 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4380 op
= TREE_TYPE (op
);
4382 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4384 error ("invalid conversion in return statement");
4385 debug_generic_stmt (restype
);
4386 debug_generic_stmt (TREE_TYPE (op
));
4394 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4395 is a problem, otherwise false. */
4398 verify_gimple_goto (ggoto
*stmt
)
4400 tree dest
= gimple_goto_dest (stmt
);
4402 /* ??? We have two canonical forms of direct goto destinations, a
4403 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4404 if (TREE_CODE (dest
) != LABEL_DECL
4405 && (!is_gimple_val (dest
)
4406 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4408 error ("goto destination is neither a label nor a pointer");
4415 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4416 is a problem, otherwise false. */
4419 verify_gimple_switch (gswitch
*stmt
)
4422 tree elt
, prev_upper_bound
= NULL_TREE
;
4423 tree index_type
, elt_type
= NULL_TREE
;
4425 if (!is_gimple_val (gimple_switch_index (stmt
)))
4427 error ("invalid operand to switch statement");
4428 debug_generic_stmt (gimple_switch_index (stmt
));
4432 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4433 if (! INTEGRAL_TYPE_P (index_type
))
4435 error ("non-integral type switch statement");
4436 debug_generic_expr (index_type
);
4440 elt
= gimple_switch_label (stmt
, 0);
4441 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4443 error ("invalid default case label in switch statement");
4444 debug_generic_expr (elt
);
4448 n
= gimple_switch_num_labels (stmt
);
4449 for (i
= 1; i
< n
; i
++)
4451 elt
= gimple_switch_label (stmt
, i
);
4453 if (! CASE_LOW (elt
))
4455 error ("invalid case label in switch statement");
4456 debug_generic_expr (elt
);
4460 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4462 error ("invalid case range in switch statement");
4463 debug_generic_expr (elt
);
4469 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4470 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4472 error ("type mismatch for case label in switch statement");
4473 debug_generic_expr (elt
);
4479 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4480 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4482 error ("type precision mismatch in switch statement");
4487 if (prev_upper_bound
)
4489 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4491 error ("case labels not sorted in switch statement");
4496 prev_upper_bound
= CASE_HIGH (elt
);
4497 if (! prev_upper_bound
)
4498 prev_upper_bound
= CASE_LOW (elt
);
4504 /* Verify a gimple debug statement STMT.
4505 Returns true if anything is wrong. */
4508 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED
)
4510 /* There isn't much that could be wrong in a gimple debug stmt. A
4511 gimple debug bind stmt, for example, maps a tree, that's usually
4512 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4513 component or member of an aggregate type, to another tree, that
4514 can be an arbitrary expression. These stmts expand into debug
4515 insns, and are converted to debug notes by var-tracking.c. */
4519 /* Verify a gimple label statement STMT.
4520 Returns true if anything is wrong. */
4523 verify_gimple_label (glabel
*stmt
)
4525 tree decl
= gimple_label_label (stmt
);
4529 if (TREE_CODE (decl
) != LABEL_DECL
)
4531 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4532 && DECL_CONTEXT (decl
) != current_function_decl
)
4534 error ("label's context is not the current function decl");
4538 uid
= LABEL_DECL_UID (decl
);
4541 || (*label_to_block_map_for_fn (cfun
))[uid
] != gimple_bb (stmt
)))
4543 error ("incorrect entry in label_to_block_map");
4547 uid
= EH_LANDING_PAD_NR (decl
);
4550 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4551 if (decl
!= lp
->post_landing_pad
)
4553 error ("incorrect setting of landing pad number");
4561 /* Verify a gimple cond statement STMT.
4562 Returns true if anything is wrong. */
4565 verify_gimple_cond (gcond
*stmt
)
4567 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4569 error ("invalid comparison code in gimple cond");
4572 if (!(!gimple_cond_true_label (stmt
)
4573 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4574 || !(!gimple_cond_false_label (stmt
)
4575 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4577 error ("invalid labels in gimple cond");
4581 return verify_gimple_comparison (boolean_type_node
,
4582 gimple_cond_lhs (stmt
),
4583 gimple_cond_rhs (stmt
));
4586 /* Verify the GIMPLE statement STMT. Returns true if there is an
4587 error, otherwise false. */
4590 verify_gimple_stmt (gimple stmt
)
4592 switch (gimple_code (stmt
))
4595 return verify_gimple_assign (as_a
<gassign
*> (stmt
));
4598 return verify_gimple_label (as_a
<glabel
*> (stmt
));
4601 return verify_gimple_call (as_a
<gcall
*> (stmt
));
4604 return verify_gimple_cond (as_a
<gcond
*> (stmt
));
4607 return verify_gimple_goto (as_a
<ggoto
*> (stmt
));
4610 return verify_gimple_switch (as_a
<gswitch
*> (stmt
));
4613 return verify_gimple_return (as_a
<greturn
*> (stmt
));
4618 case GIMPLE_TRANSACTION
:
4619 return verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4621 /* Tuples that do not have tree operands. */
4623 case GIMPLE_PREDICT
:
4625 case GIMPLE_EH_DISPATCH
:
4626 case GIMPLE_EH_MUST_NOT_THROW
:
4630 /* OpenMP directives are validated by the FE and never operated
4631 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4632 non-gimple expressions when the main index variable has had
4633 its address taken. This does not affect the loop itself
4634 because the header of an GIMPLE_OMP_FOR is merely used to determine
4635 how to setup the parallel iteration. */
4639 return verify_gimple_debug (stmt
);
4646 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4647 and false otherwise. */
4650 verify_gimple_phi (gimple phi
)
4654 tree phi_result
= gimple_phi_result (phi
);
4659 error ("invalid PHI result");
4663 virtual_p
= virtual_operand_p (phi_result
);
4664 if (TREE_CODE (phi_result
) != SSA_NAME
4666 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4668 error ("invalid PHI result");
4672 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4674 tree t
= gimple_phi_arg_def (phi
, i
);
4678 error ("missing PHI def");
4682 /* Addressable variables do have SSA_NAMEs but they
4683 are not considered gimple values. */
4684 else if ((TREE_CODE (t
) == SSA_NAME
4685 && virtual_p
!= virtual_operand_p (t
))
4687 && (TREE_CODE (t
) != SSA_NAME
4688 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
4690 && !is_gimple_val (t
)))
4692 error ("invalid PHI argument");
4693 debug_generic_expr (t
);
4696 #ifdef ENABLE_TYPES_CHECKING
4697 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
4699 error ("incompatible types in PHI argument %u", i
);
4700 debug_generic_stmt (TREE_TYPE (phi_result
));
4701 debug_generic_stmt (TREE_TYPE (t
));
4710 /* Verify the GIMPLE statements inside the sequence STMTS. */
4713 verify_gimple_in_seq_2 (gimple_seq stmts
)
4715 gimple_stmt_iterator ittr
;
4718 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
4720 gimple stmt
= gsi_stmt (ittr
);
4722 switch (gimple_code (stmt
))
4725 err
|= verify_gimple_in_seq_2 (
4726 gimple_bind_body (as_a
<gbind
*> (stmt
)));
4730 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
4731 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
4734 case GIMPLE_EH_FILTER
:
4735 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
4738 case GIMPLE_EH_ELSE
:
4740 geh_else
*eh_else
= as_a
<geh_else
*> (stmt
);
4741 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else
));
4742 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else
));
4747 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (
4748 as_a
<gcatch
*> (stmt
)));
4751 case GIMPLE_TRANSACTION
:
4752 err
|= verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4757 bool err2
= verify_gimple_stmt (stmt
);
4759 debug_gimple_stmt (stmt
);
4768 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4769 is a problem, otherwise false. */
4772 verify_gimple_transaction (gtransaction
*stmt
)
4774 tree lab
= gimple_transaction_label (stmt
);
4775 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4777 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
4781 /* Verify the GIMPLE statements inside the statement list STMTS. */
4784 verify_gimple_in_seq (gimple_seq stmts
)
4786 timevar_push (TV_TREE_STMT_VERIFY
);
4787 if (verify_gimple_in_seq_2 (stmts
))
4788 internal_error ("verify_gimple failed");
4789 timevar_pop (TV_TREE_STMT_VERIFY
);
4792 /* Return true when the T can be shared. */
4795 tree_node_can_be_shared (tree t
)
4797 if (IS_TYPE_OR_DECL_P (t
)
4798 || is_gimple_min_invariant (t
)
4799 || TREE_CODE (t
) == SSA_NAME
4800 || t
== error_mark_node
4801 || TREE_CODE (t
) == IDENTIFIER_NODE
)
4804 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
4813 /* Called via walk_tree. Verify tree sharing. */
4816 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4818 hash_set
<void *> *visited
= (hash_set
<void *> *) data
;
4820 if (tree_node_can_be_shared (*tp
))
4822 *walk_subtrees
= false;
4826 if (visited
->add (*tp
))
4832 /* Called via walk_gimple_stmt. Verify tree sharing. */
4835 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
4837 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4838 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
4841 static bool eh_error_found
;
4843 verify_eh_throw_stmt_node (const gimple
&stmt
, const int &,
4844 hash_set
<gimple
> *visited
)
4846 if (!visited
->contains (stmt
))
4848 error ("dead STMT in EH table");
4849 debug_gimple_stmt (stmt
);
4850 eh_error_found
= true;
4855 /* Verify if the location LOCs block is in BLOCKS. */
4858 verify_location (hash_set
<tree
> *blocks
, location_t loc
)
4860 tree block
= LOCATION_BLOCK (loc
);
4861 if (block
!= NULL_TREE
4862 && !blocks
->contains (block
))
4864 error ("location references block not in block tree");
4867 if (block
!= NULL_TREE
)
4868 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
4872 /* Called via walk_tree. Verify that expressions have no blocks. */
4875 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
4879 *walk_subtrees
= false;
4883 location_t loc
= EXPR_LOCATION (*tp
);
4884 if (LOCATION_BLOCK (loc
) != NULL
)
4890 /* Called via walk_tree. Verify locations of expressions. */
4893 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4895 hash_set
<tree
> *blocks
= (hash_set
<tree
> *) data
;
4897 if (TREE_CODE (*tp
) == VAR_DECL
4898 && DECL_HAS_DEBUG_EXPR_P (*tp
))
4900 tree t
= DECL_DEBUG_EXPR (*tp
);
4901 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4905 if ((TREE_CODE (*tp
) == VAR_DECL
4906 || TREE_CODE (*tp
) == PARM_DECL
4907 || TREE_CODE (*tp
) == RESULT_DECL
)
4908 && DECL_HAS_VALUE_EXPR_P (*tp
))
4910 tree t
= DECL_VALUE_EXPR (*tp
);
4911 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4918 *walk_subtrees
= false;
4922 location_t loc
= EXPR_LOCATION (*tp
);
4923 if (verify_location (blocks
, loc
))
4929 /* Called via walk_gimple_op. Verify locations of expressions. */
4932 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
4934 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4935 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
4938 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4941 collect_subblocks (hash_set
<tree
> *blocks
, tree block
)
4944 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
4947 collect_subblocks (blocks
, t
);
4951 /* Verify the GIMPLE statements in the CFG of FN. */
4954 verify_gimple_in_cfg (struct function
*fn
, bool verify_nothrow
)
4959 timevar_push (TV_TREE_STMT_VERIFY
);
4960 hash_set
<void *> visited
;
4961 hash_set
<gimple
> visited_stmts
;
4963 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4964 hash_set
<tree
> blocks
;
4965 if (DECL_INITIAL (fn
->decl
))
4967 blocks
.add (DECL_INITIAL (fn
->decl
));
4968 collect_subblocks (&blocks
, DECL_INITIAL (fn
->decl
));
4971 FOR_EACH_BB_FN (bb
, fn
)
4973 gimple_stmt_iterator gsi
;
4975 for (gphi_iterator gpi
= gsi_start_phis (bb
);
4979 gphi
*phi
= gpi
.phi ();
4983 visited_stmts
.add (phi
);
4985 if (gimple_bb (phi
) != bb
)
4987 error ("gimple_bb (phi) is set to a wrong basic block");
4991 err2
|= verify_gimple_phi (phi
);
4993 /* Only PHI arguments have locations. */
4994 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
4996 error ("PHI node with location");
5000 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5002 tree arg
= gimple_phi_arg_def (phi
, i
);
5003 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
5007 error ("incorrect sharing of tree nodes");
5008 debug_generic_expr (addr
);
5011 location_t loc
= gimple_phi_arg_location (phi
, i
);
5012 if (virtual_operand_p (gimple_phi_result (phi
))
5013 && loc
!= UNKNOWN_LOCATION
)
5015 error ("virtual PHI with argument locations");
5018 addr
= walk_tree (&arg
, verify_expr_location_1
, &blocks
, NULL
);
5021 debug_generic_expr (addr
);
5024 err2
|= verify_location (&blocks
, loc
);
5028 debug_gimple_stmt (phi
);
5032 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5034 gimple stmt
= gsi_stmt (gsi
);
5036 struct walk_stmt_info wi
;
5040 visited_stmts
.add (stmt
);
5042 if (gimple_bb (stmt
) != bb
)
5044 error ("gimple_bb (stmt) is set to a wrong basic block");
5048 err2
|= verify_gimple_stmt (stmt
);
5049 err2
|= verify_location (&blocks
, gimple_location (stmt
));
5051 memset (&wi
, 0, sizeof (wi
));
5052 wi
.info
= (void *) &visited
;
5053 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
5056 error ("incorrect sharing of tree nodes");
5057 debug_generic_expr (addr
);
5061 memset (&wi
, 0, sizeof (wi
));
5062 wi
.info
= (void *) &blocks
;
5063 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
5066 debug_generic_expr (addr
);
5070 /* ??? Instead of not checking these stmts at all the walker
5071 should know its context via wi. */
5072 if (!is_gimple_debug (stmt
)
5073 && !is_gimple_omp (stmt
))
5075 memset (&wi
, 0, sizeof (wi
));
5076 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
5079 debug_generic_expr (addr
);
5080 inform (gimple_location (stmt
), "in statement");
5085 /* If the statement is marked as part of an EH region, then it is
5086 expected that the statement could throw. Verify that when we
5087 have optimizations that simplify statements such that we prove
5088 that they cannot throw, that we update other data structures
5090 lp_nr
= lookup_stmt_eh_lp (stmt
);
5093 if (!stmt_could_throw_p (stmt
))
5097 error ("statement marked for throw, but doesn%'t");
5101 else if (!gsi_one_before_end_p (gsi
))
5103 error ("statement marked for throw in middle of block");
5109 debug_gimple_stmt (stmt
);
5114 eh_error_found
= false;
5115 hash_map
<gimple
, int> *eh_table
= get_eh_throw_stmt_table (cfun
);
5117 eh_table
->traverse
<hash_set
<gimple
> *, verify_eh_throw_stmt_node
>
5120 if (err
|| eh_error_found
)
5121 internal_error ("verify_gimple failed");
5123 verify_histograms ();
5124 timevar_pop (TV_TREE_STMT_VERIFY
);
5128 /* Verifies that the flow information is OK. */
5131 gimple_verify_flow_info (void)
5135 gimple_stmt_iterator gsi
;
5140 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5141 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5143 error ("ENTRY_BLOCK has IL associated with it");
5147 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5148 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5150 error ("EXIT_BLOCK has IL associated with it");
5154 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5155 if (e
->flags
& EDGE_FALLTHRU
)
5157 error ("fallthru to exit from bb %d", e
->src
->index
);
5161 FOR_EACH_BB_FN (bb
, cfun
)
5163 bool found_ctrl_stmt
= false;
5167 /* Skip labels on the start of basic block. */
5168 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5171 gimple prev_stmt
= stmt
;
5173 stmt
= gsi_stmt (gsi
);
5175 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5178 label
= gimple_label_label (as_a
<glabel
*> (stmt
));
5179 if (prev_stmt
&& DECL_NONLOCAL (label
))
5181 error ("nonlocal label ");
5182 print_generic_expr (stderr
, label
, 0);
5183 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5188 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
5190 error ("EH landing pad label ");
5191 print_generic_expr (stderr
, label
, 0);
5192 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5197 if (label_to_block (label
) != bb
)
5200 print_generic_expr (stderr
, label
, 0);
5201 fprintf (stderr
, " to block does not match in bb %d",
5206 if (decl_function_context (label
) != current_function_decl
)
5209 print_generic_expr (stderr
, label
, 0);
5210 fprintf (stderr
, " has incorrect context in bb %d",
5216 /* Verify that body of basic block BB is free of control flow. */
5217 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5219 gimple stmt
= gsi_stmt (gsi
);
5221 if (found_ctrl_stmt
)
5223 error ("control flow in the middle of basic block %d",
5228 if (stmt_ends_bb_p (stmt
))
5229 found_ctrl_stmt
= true;
5231 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
5234 print_generic_expr (stderr
, gimple_label_label (label_stmt
), 0);
5235 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
5240 gsi
= gsi_last_bb (bb
);
5241 if (gsi_end_p (gsi
))
5244 stmt
= gsi_stmt (gsi
);
5246 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5249 err
|= verify_eh_edges (stmt
);
5251 if (is_ctrl_stmt (stmt
))
5253 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5254 if (e
->flags
& EDGE_FALLTHRU
)
5256 error ("fallthru edge after a control statement in bb %d",
5262 if (gimple_code (stmt
) != GIMPLE_COND
)
5264 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5265 after anything else but if statement. */
5266 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5267 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5269 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5275 switch (gimple_code (stmt
))
5282 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5286 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5287 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5288 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5289 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5290 || EDGE_COUNT (bb
->succs
) >= 3)
5292 error ("wrong outgoing edge flags at end of bb %d",
5300 if (simple_goto_p (stmt
))
5302 error ("explicit goto at end of bb %d", bb
->index
);
5307 /* FIXME. We should double check that the labels in the
5308 destination blocks have their address taken. */
5309 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5310 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5311 | EDGE_FALSE_VALUE
))
5312 || !(e
->flags
& EDGE_ABNORMAL
))
5314 error ("wrong outgoing edge flags at end of bb %d",
5322 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5324 /* ... fallthru ... */
5326 if (!single_succ_p (bb
)
5327 || (single_succ_edge (bb
)->flags
5328 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5329 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5331 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5334 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5336 error ("return edge does not point to exit in bb %d",
5344 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5349 n
= gimple_switch_num_labels (switch_stmt
);
5351 /* Mark all the destination basic blocks. */
5352 for (i
= 0; i
< n
; ++i
)
5354 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5355 basic_block label_bb
= label_to_block (lab
);
5356 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5357 label_bb
->aux
= (void *)1;
5360 /* Verify that the case labels are sorted. */
5361 prev
= gimple_switch_label (switch_stmt
, 0);
5362 for (i
= 1; i
< n
; ++i
)
5364 tree c
= gimple_switch_label (switch_stmt
, i
);
5367 error ("found default case not at the start of "
5373 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5375 error ("case labels not sorted: ");
5376 print_generic_expr (stderr
, prev
, 0);
5377 fprintf (stderr
," is greater than ");
5378 print_generic_expr (stderr
, c
, 0);
5379 fprintf (stderr
," but comes before it.\n");
5384 /* VRP will remove the default case if it can prove it will
5385 never be executed. So do not verify there always exists
5386 a default case here. */
5388 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5392 error ("extra outgoing edge %d->%d",
5393 bb
->index
, e
->dest
->index
);
5397 e
->dest
->aux
= (void *)2;
5398 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5399 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5401 error ("wrong outgoing edge flags at end of bb %d",
5407 /* Check that we have all of them. */
5408 for (i
= 0; i
< n
; ++i
)
5410 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5411 basic_block label_bb
= label_to_block (lab
);
5413 if (label_bb
->aux
!= (void *)2)
5415 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5420 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5421 e
->dest
->aux
= (void *)0;
5425 case GIMPLE_EH_DISPATCH
:
5426 err
|= verify_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
));
5434 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5435 verify_dominators (CDI_DOMINATORS
);
5441 /* Updates phi nodes after creating a forwarder block joined
5442 by edge FALLTHRU. */
5445 gimple_make_forwarder_block (edge fallthru
)
5449 basic_block dummy
, bb
;
5453 dummy
= fallthru
->src
;
5454 bb
= fallthru
->dest
;
5456 if (single_pred_p (bb
))
5459 /* If we redirected a branch we must create new PHI nodes at the
5461 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5463 gphi
*phi
, *new_phi
;
5466 var
= gimple_phi_result (phi
);
5467 new_phi
= create_phi_node (var
, bb
);
5468 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5469 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5473 /* Add the arguments we have stored on edges. */
5474 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5479 flush_pending_stmts (e
);
5484 /* Return a non-special label in the head of basic block BLOCK.
5485 Create one if it doesn't exist. */
5488 gimple_block_label (basic_block bb
)
5490 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5495 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5497 stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
5500 label
= gimple_label_label (stmt
);
5501 if (!DECL_NONLOCAL (label
))
5504 gsi_move_before (&i
, &s
);
5509 label
= create_artificial_label (UNKNOWN_LOCATION
);
5510 stmt
= gimple_build_label (label
);
5511 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5516 /* Attempt to perform edge redirection by replacing a possibly complex
5517 jump instruction by a goto or by removing the jump completely.
5518 This can apply only if all edges now point to the same block. The
5519 parameters and return values are equivalent to
5520 redirect_edge_and_branch. */
5523 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5525 basic_block src
= e
->src
;
5526 gimple_stmt_iterator i
;
5529 /* We can replace or remove a complex jump only when we have exactly
5531 if (EDGE_COUNT (src
->succs
) != 2
5532 /* Verify that all targets will be TARGET. Specifically, the
5533 edge that is not E must also go to TARGET. */
5534 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5537 i
= gsi_last_bb (src
);
5541 stmt
= gsi_stmt (i
);
5543 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5545 gsi_remove (&i
, true);
5546 e
= ssa_redirect_edge (e
, target
);
5547 e
->flags
= EDGE_FALLTHRU
;
5555 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5556 edge representing the redirected branch. */
5559 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5561 basic_block bb
= e
->src
;
5562 gimple_stmt_iterator gsi
;
5566 if (e
->flags
& EDGE_ABNORMAL
)
5569 if (e
->dest
== dest
)
5572 if (e
->flags
& EDGE_EH
)
5573 return redirect_eh_edge (e
, dest
);
5575 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5577 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5582 gsi
= gsi_last_bb (bb
);
5583 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5585 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5588 /* For COND_EXPR, we only need to redirect the edge. */
5592 /* No non-abnormal edges should lead from a non-simple goto, and
5593 simple ones should be represented implicitly. */
5598 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5599 tree label
= gimple_block_label (dest
);
5600 tree cases
= get_cases_for_edge (e
, switch_stmt
);
5602 /* If we have a list of cases associated with E, then use it
5603 as it's a lot faster than walking the entire case vector. */
5606 edge e2
= find_edge (e
->src
, dest
);
5613 CASE_LABEL (cases
) = label
;
5614 cases
= CASE_CHAIN (cases
);
5617 /* If there was already an edge in the CFG, then we need
5618 to move all the cases associated with E to E2. */
5621 tree cases2
= get_cases_for_edge (e2
, switch_stmt
);
5623 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5624 CASE_CHAIN (cases2
) = first
;
5626 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5630 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
5632 for (i
= 0; i
< n
; i
++)
5634 tree elt
= gimple_switch_label (switch_stmt
, i
);
5635 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
5636 CASE_LABEL (elt
) = label
;
5644 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5645 int i
, n
= gimple_asm_nlabels (asm_stmt
);
5648 for (i
= 0; i
< n
; ++i
)
5650 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
5651 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
5654 label
= gimple_block_label (dest
);
5655 TREE_VALUE (cons
) = label
;
5659 /* If we didn't find any label matching the former edge in the
5660 asm labels, we must be redirecting the fallthrough
5662 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5667 gsi_remove (&gsi
, true);
5668 e
->flags
|= EDGE_FALLTHRU
;
5671 case GIMPLE_OMP_RETURN
:
5672 case GIMPLE_OMP_CONTINUE
:
5673 case GIMPLE_OMP_SECTIONS_SWITCH
:
5674 case GIMPLE_OMP_FOR
:
5675 /* The edges from OMP constructs can be simply redirected. */
5678 case GIMPLE_EH_DISPATCH
:
5679 if (!(e
->flags
& EDGE_FALLTHRU
))
5680 redirect_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
), e
, dest
);
5683 case GIMPLE_TRANSACTION
:
5684 /* The ABORT edge has a stored label associated with it, otherwise
5685 the edges are simply redirectable. */
5687 gimple_transaction_set_label (as_a
<gtransaction
*> (stmt
),
5688 gimple_block_label (dest
));
5692 /* Otherwise it must be a fallthru edge, and we don't need to
5693 do anything besides redirecting it. */
5694 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
5698 /* Update/insert PHI nodes as necessary. */
5700 /* Now update the edges in the CFG. */
5701 e
= ssa_redirect_edge (e
, dest
);
5706 /* Returns true if it is possible to remove edge E by redirecting
5707 it to the destination of the other edge from E->src. */
5710 gimple_can_remove_branch_p (const_edge e
)
5712 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
5718 /* Simple wrapper, as we can always redirect fallthru edges. */
5721 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
5723 e
= gimple_redirect_edge_and_branch (e
, dest
);
5730 /* Splits basic block BB after statement STMT (but at least after the
5731 labels). If STMT is NULL, BB is split just after the labels. */
5734 gimple_split_block (basic_block bb
, void *stmt
)
5736 gimple_stmt_iterator gsi
;
5737 gimple_stmt_iterator gsi_tgt
;
5743 new_bb
= create_empty_bb (bb
);
5745 /* Redirect the outgoing edges. */
5746 new_bb
->succs
= bb
->succs
;
5748 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
5751 /* Get a stmt iterator pointing to the first stmt to move. */
5752 if (!stmt
|| gimple_code ((gimple
) stmt
) == GIMPLE_LABEL
)
5753 gsi
= gsi_after_labels (bb
);
5756 gsi
= gsi_for_stmt ((gimple
) stmt
);
5760 /* Move everything from GSI to the new basic block. */
5761 if (gsi_end_p (gsi
))
5764 /* Split the statement list - avoid re-creating new containers as this
5765 brings ugly quadratic memory consumption in the inliner.
5766 (We are still quadratic since we need to update stmt BB pointers,
5768 gsi_split_seq_before (&gsi
, &list
);
5769 set_bb_seq (new_bb
, list
);
5770 for (gsi_tgt
= gsi_start (list
);
5771 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
5772 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
5778 /* Moves basic block BB after block AFTER. */
5781 gimple_move_block_after (basic_block bb
, basic_block after
)
5783 if (bb
->prev_bb
== after
)
5787 link_block (bb
, after
);
5793 /* Return TRUE if block BB has no executable statements, otherwise return
5797 gimple_empty_block_p (basic_block bb
)
5799 /* BB must have no executable statements. */
5800 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
5803 if (gsi_end_p (gsi
))
5805 if (is_gimple_debug (gsi_stmt (gsi
)))
5806 gsi_next_nondebug (&gsi
);
5807 return gsi_end_p (gsi
);
5811 /* Split a basic block if it ends with a conditional branch and if the
5812 other part of the block is not empty. */
5815 gimple_split_block_before_cond_jump (basic_block bb
)
5817 gimple last
, split_point
;
5818 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
5819 if (gsi_end_p (gsi
))
5821 last
= gsi_stmt (gsi
);
5822 if (gimple_code (last
) != GIMPLE_COND
5823 && gimple_code (last
) != GIMPLE_SWITCH
)
5825 gsi_prev_nondebug (&gsi
);
5826 split_point
= gsi_stmt (gsi
);
5827 return split_block (bb
, split_point
)->dest
;
5831 /* Return true if basic_block can be duplicated. */
5834 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
5839 /* Create a duplicate of the basic block BB. NOTE: This does not
5840 preserve SSA form. */
5843 gimple_duplicate_bb (basic_block bb
)
5846 gimple_stmt_iterator gsi_tgt
;
5848 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
5850 /* Copy the PHI nodes. We ignore PHI node arguments here because
5851 the incoming edges have not been setup yet. */
5852 for (gphi_iterator gpi
= gsi_start_phis (bb
);
5858 copy
= create_phi_node (NULL_TREE
, new_bb
);
5859 create_new_def_for (gimple_phi_result (phi
), copy
,
5860 gimple_phi_result_ptr (copy
));
5861 gimple_set_uid (copy
, gimple_uid (phi
));
5864 gsi_tgt
= gsi_start_bb (new_bb
);
5865 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5869 def_operand_p def_p
;
5870 ssa_op_iter op_iter
;
5874 stmt
= gsi_stmt (gsi
);
5875 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5878 /* Don't duplicate label debug stmts. */
5879 if (gimple_debug_bind_p (stmt
)
5880 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
5884 /* Create a new copy of STMT and duplicate STMT's virtual
5886 copy
= gimple_copy (stmt
);
5887 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
5889 maybe_duplicate_eh_stmt (copy
, stmt
);
5890 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
5892 /* When copying around a stmt writing into a local non-user
5893 aggregate, make sure it won't share stack slot with other
5895 lhs
= gimple_get_lhs (stmt
);
5896 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
5898 tree base
= get_base_address (lhs
);
5900 && (TREE_CODE (base
) == VAR_DECL
5901 || TREE_CODE (base
) == RESULT_DECL
)
5902 && DECL_IGNORED_P (base
)
5903 && !TREE_STATIC (base
)
5904 && !DECL_EXTERNAL (base
)
5905 && (TREE_CODE (base
) != VAR_DECL
5906 || !DECL_HAS_VALUE_EXPR_P (base
)))
5907 DECL_NONSHAREABLE (base
) = 1;
5910 /* Create new names for all the definitions created by COPY and
5911 add replacement mappings for each new name. */
5912 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
5913 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
5919 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5922 add_phi_args_after_copy_edge (edge e_copy
)
5924 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
5927 gphi
*phi
, *phi_copy
;
5929 gphi_iterator psi
, psi_copy
;
5931 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
5934 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
5936 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
5937 dest
= get_bb_original (e_copy
->dest
);
5939 dest
= e_copy
->dest
;
5941 e
= find_edge (bb
, dest
);
5944 /* During loop unrolling the target of the latch edge is copied.
5945 In this case we are not looking for edge to dest, but to
5946 duplicated block whose original was dest. */
5947 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5949 if ((e
->dest
->flags
& BB_DUPLICATED
)
5950 && get_bb_original (e
->dest
) == dest
)
5954 gcc_assert (e
!= NULL
);
5957 for (psi
= gsi_start_phis (e
->dest
),
5958 psi_copy
= gsi_start_phis (e_copy
->dest
);
5960 gsi_next (&psi
), gsi_next (&psi_copy
))
5963 phi_copy
= psi_copy
.phi ();
5964 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
5965 add_phi_arg (phi_copy
, def
, e_copy
,
5966 gimple_phi_arg_location_from_edge (phi
, e
));
5971 /* Basic block BB_COPY was created by code duplication. Add phi node
5972 arguments for edges going out of BB_COPY. The blocks that were
5973 duplicated have BB_DUPLICATED set. */
5976 add_phi_args_after_copy_bb (basic_block bb_copy
)
5981 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
5983 add_phi_args_after_copy_edge (e_copy
);
5987 /* Blocks in REGION_COPY array of length N_REGION were created by
5988 duplication of basic blocks. Add phi node arguments for edges
5989 going from these blocks. If E_COPY is not NULL, also add
5990 phi node arguments for its destination.*/
5993 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
5998 for (i
= 0; i
< n_region
; i
++)
5999 region_copy
[i
]->flags
|= BB_DUPLICATED
;
6001 for (i
= 0; i
< n_region
; i
++)
6002 add_phi_args_after_copy_bb (region_copy
[i
]);
6004 add_phi_args_after_copy_edge (e_copy
);
6006 for (i
= 0; i
< n_region
; i
++)
6007 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
6010 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6011 important exit edge EXIT. By important we mean that no SSA name defined
6012 inside region is live over the other exit edges of the region. All entry
6013 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6014 to the duplicate of the region. Dominance and loop information is
6015 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6016 UPDATE_DOMINANCE is false then we assume that the caller will update the
6017 dominance information after calling this function. The new basic
6018 blocks are stored to REGION_COPY in the same order as they had in REGION,
6019 provided that REGION_COPY is not NULL.
6020 The function returns false if it is unable to copy the region,
6024 gimple_duplicate_sese_region (edge entry
, edge exit
,
6025 basic_block
*region
, unsigned n_region
,
6026 basic_block
*region_copy
,
6027 bool update_dominance
)
6030 bool free_region_copy
= false, copying_header
= false;
6031 struct loop
*loop
= entry
->dest
->loop_father
;
6033 vec
<basic_block
> doms
;
6035 int total_freq
= 0, entry_freq
= 0;
6036 gcov_type total_count
= 0, entry_count
= 0;
6038 if (!can_copy_bbs_p (region
, n_region
))
6041 /* Some sanity checking. Note that we do not check for all possible
6042 missuses of the functions. I.e. if you ask to copy something weird,
6043 it will work, but the state of structures probably will not be
6045 for (i
= 0; i
< n_region
; i
++)
6047 /* We do not handle subloops, i.e. all the blocks must belong to the
6049 if (region
[i
]->loop_father
!= loop
)
6052 if (region
[i
] != entry
->dest
6053 && region
[i
] == loop
->header
)
6057 /* In case the function is used for loop header copying (which is the primary
6058 use), ensure that EXIT and its copy will be new latch and entry edges. */
6059 if (loop
->header
== entry
->dest
)
6061 copying_header
= true;
6063 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
6066 for (i
= 0; i
< n_region
; i
++)
6067 if (region
[i
] != exit
->src
6068 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
6072 initialize_original_copy_tables ();
6075 set_loop_copy (loop
, loop_outer (loop
));
6077 set_loop_copy (loop
, loop
);
6081 region_copy
= XNEWVEC (basic_block
, n_region
);
6082 free_region_copy
= true;
6085 /* Record blocks outside the region that are dominated by something
6087 if (update_dominance
)
6090 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6093 if (entry
->dest
->count
)
6095 total_count
= entry
->dest
->count
;
6096 entry_count
= entry
->count
;
6097 /* Fix up corner cases, to avoid division by zero or creation of negative
6099 if (entry_count
> total_count
)
6100 entry_count
= total_count
;
6104 total_freq
= entry
->dest
->frequency
;
6105 entry_freq
= EDGE_FREQUENCY (entry
);
6106 /* Fix up corner cases, to avoid division by zero or creation of negative
6108 if (total_freq
== 0)
6110 else if (entry_freq
> total_freq
)
6111 entry_freq
= total_freq
;
6114 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
6115 split_edge_bb_loc (entry
), update_dominance
);
6118 scale_bbs_frequencies_gcov_type (region
, n_region
,
6119 total_count
- entry_count
,
6121 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, entry_count
,
6126 scale_bbs_frequencies_int (region
, n_region
, total_freq
- entry_freq
,
6128 scale_bbs_frequencies_int (region_copy
, n_region
, entry_freq
, total_freq
);
6133 loop
->header
= exit
->dest
;
6134 loop
->latch
= exit
->src
;
6137 /* Redirect the entry and add the phi node arguments. */
6138 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
6139 gcc_assert (redirected
!= NULL
);
6140 flush_pending_stmts (entry
);
6142 /* Concerning updating of dominators: We must recount dominators
6143 for entry block and its copy. Anything that is outside of the
6144 region, but was dominated by something inside needs recounting as
6146 if (update_dominance
)
6148 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
6149 doms
.safe_push (get_bb_original (entry
->dest
));
6150 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6154 /* Add the other PHI node arguments. */
6155 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
6157 if (free_region_copy
)
6160 free_original_copy_tables ();
6164 /* Checks if BB is part of the region defined by N_REGION BBS. */
6166 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
6170 for (n
= 0; n
< n_region
; n
++)
6178 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6179 are stored to REGION_COPY in the same order in that they appear
6180 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6181 the region, EXIT an exit from it. The condition guarding EXIT
6182 is moved to ENTRY. Returns true if duplication succeeds, false
6208 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED
, edge exit ATTRIBUTE_UNUSED
,
6209 basic_block
*region ATTRIBUTE_UNUSED
, unsigned n_region ATTRIBUTE_UNUSED
,
6210 basic_block
*region_copy ATTRIBUTE_UNUSED
)
6213 bool free_region_copy
= false;
6214 struct loop
*loop
= exit
->dest
->loop_father
;
6215 struct loop
*orig_loop
= entry
->dest
->loop_father
;
6216 basic_block switch_bb
, entry_bb
, nentry_bb
;
6217 vec
<basic_block
> doms
;
6218 int total_freq
= 0, exit_freq
= 0;
6219 gcov_type total_count
= 0, exit_count
= 0;
6220 edge exits
[2], nexits
[2], e
;
6221 gimple_stmt_iterator gsi
;
6224 basic_block exit_bb
;
6228 struct loop
*target
, *aloop
, *cloop
;
6230 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
6232 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
6234 if (!can_copy_bbs_p (region
, n_region
))
6237 initialize_original_copy_tables ();
6238 set_loop_copy (orig_loop
, loop
);
6241 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
6243 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
6245 cloop
= duplicate_loop (aloop
, target
);
6246 duplicate_subloops (aloop
, cloop
);
6252 region_copy
= XNEWVEC (basic_block
, n_region
);
6253 free_region_copy
= true;
6256 gcc_assert (!need_ssa_update_p (cfun
));
6258 /* Record blocks outside the region that are dominated by something
6260 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6262 if (exit
->src
->count
)
6264 total_count
= exit
->src
->count
;
6265 exit_count
= exit
->count
;
6266 /* Fix up corner cases, to avoid division by zero or creation of negative
6268 if (exit_count
> total_count
)
6269 exit_count
= total_count
;
6273 total_freq
= exit
->src
->frequency
;
6274 exit_freq
= EDGE_FREQUENCY (exit
);
6275 /* Fix up corner cases, to avoid division by zero or creation of negative
6277 if (total_freq
== 0)
6279 if (exit_freq
> total_freq
)
6280 exit_freq
= total_freq
;
6283 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6284 split_edge_bb_loc (exit
), true);
6287 scale_bbs_frequencies_gcov_type (region
, n_region
,
6288 total_count
- exit_count
,
6290 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, exit_count
,
6295 scale_bbs_frequencies_int (region
, n_region
, total_freq
- exit_freq
,
6297 scale_bbs_frequencies_int (region_copy
, n_region
, exit_freq
, total_freq
);
6300 /* Create the switch block, and put the exit condition to it. */
6301 entry_bb
= entry
->dest
;
6302 nentry_bb
= get_bb_copy (entry_bb
);
6303 if (!last_stmt (entry
->src
)
6304 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6305 switch_bb
= entry
->src
;
6307 switch_bb
= split_edge (entry
);
6308 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6310 gsi
= gsi_last_bb (switch_bb
);
6311 cond_stmt
= last_stmt (exit
->src
);
6312 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6313 cond_stmt
= gimple_copy (cond_stmt
);
6315 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6317 sorig
= single_succ_edge (switch_bb
);
6318 sorig
->flags
= exits
[1]->flags
;
6319 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6321 /* Register the new edge from SWITCH_BB in loop exit lists. */
6322 rescan_loop_exit (snew
, true, false);
6324 /* Add the PHI node arguments. */
6325 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6327 /* Get rid of now superfluous conditions and associated edges (and phi node
6329 exit_bb
= exit
->dest
;
6331 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6332 PENDING_STMT (e
) = NULL
;
6334 /* The latch of ORIG_LOOP was copied, and so was the backedge
6335 to the original header. We redirect this backedge to EXIT_BB. */
6336 for (i
= 0; i
< n_region
; i
++)
6337 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6339 gcc_assert (single_succ_edge (region_copy
[i
]));
6340 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6341 PENDING_STMT (e
) = NULL
;
6342 for (psi
= gsi_start_phis (exit_bb
);
6347 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6348 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6351 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6352 PENDING_STMT (e
) = NULL
;
6354 /* Anything that is outside of the region, but was dominated by something
6355 inside needs to update dominance info. */
6356 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6358 /* Update the SSA web. */
6359 update_ssa (TODO_update_ssa
);
6361 if (free_region_copy
)
6364 free_original_copy_tables ();
6368 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6369 adding blocks when the dominator traversal reaches EXIT. This
6370 function silently assumes that ENTRY strictly dominates EXIT. */
6373 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6374 vec
<basic_block
> *bbs_p
)
6378 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6380 son
= next_dom_son (CDI_DOMINATORS
, son
))
6382 bbs_p
->safe_push (son
);
6384 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6388 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6389 The duplicates are recorded in VARS_MAP. */
6392 replace_by_duplicate_decl (tree
*tp
, hash_map
<tree
, tree
> *vars_map
,
6395 tree t
= *tp
, new_t
;
6396 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6398 if (DECL_CONTEXT (t
) == to_context
)
6402 tree
&loc
= vars_map
->get_or_insert (t
, &existed
);
6408 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6409 add_local_decl (f
, new_t
);
6413 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6414 new_t
= copy_node (t
);
6416 DECL_CONTEXT (new_t
) = to_context
;
6427 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6428 VARS_MAP maps old ssa names and var_decls to the new ones. */
6431 replace_ssa_name (tree name
, hash_map
<tree
, tree
> *vars_map
,
6436 gcc_assert (!virtual_operand_p (name
));
6438 tree
*loc
= vars_map
->get (name
);
6442 tree decl
= SSA_NAME_VAR (name
);
6445 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6446 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6447 decl
, SSA_NAME_DEF_STMT (name
));
6448 if (SSA_NAME_IS_DEFAULT_DEF (name
))
6449 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context
),
6453 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6454 name
, SSA_NAME_DEF_STMT (name
));
6456 vars_map
->put (name
, new_name
);
6470 hash_map
<tree
, tree
> *vars_map
;
6471 htab_t new_label_map
;
6472 hash_map
<void *, void *> *eh_map
;
6476 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6477 contained in *TP if it has been ORIG_BLOCK previously and change the
6478 DECL_CONTEXT of every local variable referenced in *TP. */
6481 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6483 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6484 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6489 tree block
= TREE_BLOCK (t
);
6490 if (block
== p
->orig_block
6491 || (p
->orig_block
== NULL_TREE
6492 && block
!= NULL_TREE
))
6493 TREE_SET_BLOCK (t
, p
->new_block
);
6494 #ifdef ENABLE_CHECKING
6495 else if (block
!= NULL_TREE
)
6497 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6498 block
= BLOCK_SUPERCONTEXT (block
);
6499 gcc_assert (block
== p
->orig_block
);
6503 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6505 if (TREE_CODE (t
) == SSA_NAME
)
6506 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6507 else if (TREE_CODE (t
) == LABEL_DECL
)
6509 if (p
->new_label_map
)
6511 struct tree_map in
, *out
;
6513 out
= (struct tree_map
*)
6514 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6519 DECL_CONTEXT (t
) = p
->to_context
;
6521 else if (p
->remap_decls_p
)
6523 /* Replace T with its duplicate. T should no longer appear in the
6524 parent function, so this looks wasteful; however, it may appear
6525 in referenced_vars, and more importantly, as virtual operands of
6526 statements, and in alias lists of other variables. It would be
6527 quite difficult to expunge it from all those places. ??? It might
6528 suffice to do this for addressable variables. */
6529 if ((TREE_CODE (t
) == VAR_DECL
6530 && !is_global_var (t
))
6531 || TREE_CODE (t
) == CONST_DECL
)
6532 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6536 else if (TYPE_P (t
))
6542 /* Helper for move_stmt_r. Given an EH region number for the source
6543 function, map that to the duplicate EH regio number in the dest. */
6546 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6548 eh_region old_r
, new_r
;
6550 old_r
= get_eh_region_from_number (old_nr
);
6551 new_r
= static_cast<eh_region
> (*p
->eh_map
->get (old_r
));
6553 return new_r
->index
;
6556 /* Similar, but operate on INTEGER_CSTs. */
6559 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6563 old_nr
= tree_to_shwi (old_t_nr
);
6564 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6566 return build_int_cst (integer_type_node
, new_nr
);
6569 /* Like move_stmt_op, but for gimple statements.
6571 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6572 contained in the current statement in *GSI_P and change the
6573 DECL_CONTEXT of every local variable referenced in the current
6577 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6578 struct walk_stmt_info
*wi
)
6580 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6581 gimple stmt
= gsi_stmt (*gsi_p
);
6582 tree block
= gimple_block (stmt
);
6584 if (block
== p
->orig_block
6585 || (p
->orig_block
== NULL_TREE
6586 && block
!= NULL_TREE
))
6587 gimple_set_block (stmt
, p
->new_block
);
6589 switch (gimple_code (stmt
))
6592 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6594 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6595 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
6596 switch (DECL_FUNCTION_CODE (fndecl
))
6598 case BUILT_IN_EH_COPY_VALUES
:
6599 r
= gimple_call_arg (stmt
, 1);
6600 r
= move_stmt_eh_region_tree_nr (r
, p
);
6601 gimple_call_set_arg (stmt
, 1, r
);
6604 case BUILT_IN_EH_POINTER
:
6605 case BUILT_IN_EH_FILTER
:
6606 r
= gimple_call_arg (stmt
, 0);
6607 r
= move_stmt_eh_region_tree_nr (r
, p
);
6608 gimple_call_set_arg (stmt
, 0, r
);
6619 gresx
*resx_stmt
= as_a
<gresx
*> (stmt
);
6620 int r
= gimple_resx_region (resx_stmt
);
6621 r
= move_stmt_eh_region_nr (r
, p
);
6622 gimple_resx_set_region (resx_stmt
, r
);
6626 case GIMPLE_EH_DISPATCH
:
6628 geh_dispatch
*eh_dispatch_stmt
= as_a
<geh_dispatch
*> (stmt
);
6629 int r
= gimple_eh_dispatch_region (eh_dispatch_stmt
);
6630 r
= move_stmt_eh_region_nr (r
, p
);
6631 gimple_eh_dispatch_set_region (eh_dispatch_stmt
, r
);
6635 case GIMPLE_OMP_RETURN
:
6636 case GIMPLE_OMP_CONTINUE
:
6639 if (is_gimple_omp (stmt
))
6641 /* Do not remap variables inside OMP directives. Variables
6642 referenced in clauses and directive header belong to the
6643 parent function and should not be moved into the child
6645 bool save_remap_decls_p
= p
->remap_decls_p
;
6646 p
->remap_decls_p
= false;
6647 *handled_ops_p
= true;
6649 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
6652 p
->remap_decls_p
= save_remap_decls_p
;
6660 /* Move basic block BB from function CFUN to function DEST_FN. The
6661 block is moved out of the original linked list and placed after
6662 block AFTER in the new list. Also, the block is removed from the
6663 original array of blocks and placed in DEST_FN's array of blocks.
6664 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6665 updated to reflect the moved edges.
6667 The local variables are remapped to new instances, VARS_MAP is used
6668 to record the mapping. */
6671 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
6672 basic_block after
, bool update_edge_count_p
,
6673 struct move_stmt_d
*d
)
6675 struct control_flow_graph
*cfg
;
6678 gimple_stmt_iterator si
;
6679 unsigned old_len
, new_len
;
6681 /* Remove BB from dominance structures. */
6682 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
6684 /* Move BB from its current loop to the copy in the new function. */
6687 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
6689 bb
->loop_father
= new_loop
;
6692 /* Link BB to the new linked list. */
6693 move_block_after (bb
, after
);
6695 /* Update the edge count in the corresponding flowgraphs. */
6696 if (update_edge_count_p
)
6697 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6699 cfun
->cfg
->x_n_edges
--;
6700 dest_cfun
->cfg
->x_n_edges
++;
6703 /* Remove BB from the original basic block array. */
6704 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
6705 cfun
->cfg
->x_n_basic_blocks
--;
6707 /* Grow DEST_CFUN's basic block array if needed. */
6708 cfg
= dest_cfun
->cfg
;
6709 cfg
->x_n_basic_blocks
++;
6710 if (bb
->index
>= cfg
->x_last_basic_block
)
6711 cfg
->x_last_basic_block
= bb
->index
+ 1;
6713 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
6714 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
6716 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
6717 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
6720 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
6722 /* Remap the variables in phi nodes. */
6723 for (gphi_iterator psi
= gsi_start_phis (bb
);
6726 gphi
*phi
= psi
.phi ();
6728 tree op
= PHI_RESULT (phi
);
6732 if (virtual_operand_p (op
))
6734 /* Remove the phi nodes for virtual operands (alias analysis will be
6735 run for the new function, anyway). */
6736 remove_phi_node (&psi
, true);
6740 SET_PHI_RESULT (phi
,
6741 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6742 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
6744 op
= USE_FROM_PTR (use
);
6745 if (TREE_CODE (op
) == SSA_NAME
)
6746 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6749 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
6751 location_t locus
= gimple_phi_arg_location (phi
, i
);
6752 tree block
= LOCATION_BLOCK (locus
);
6754 if (locus
== UNKNOWN_LOCATION
)
6756 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
6758 if (d
->new_block
== NULL_TREE
)
6759 locus
= LOCATION_LOCUS (locus
);
6761 locus
= COMBINE_LOCATION_DATA (line_table
, locus
, d
->new_block
);
6762 gimple_phi_arg_set_location (phi
, i
, locus
);
6769 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6771 gimple stmt
= gsi_stmt (si
);
6772 struct walk_stmt_info wi
;
6774 memset (&wi
, 0, sizeof (wi
));
6776 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
6778 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
6780 tree label
= gimple_label_label (label_stmt
);
6781 int uid
= LABEL_DECL_UID (label
);
6783 gcc_assert (uid
> -1);
6785 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
6786 if (old_len
<= (unsigned) uid
)
6788 new_len
= 3 * uid
/ 2 + 1;
6789 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
6792 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
6793 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
6795 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
6797 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
6798 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
6801 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
6802 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
6804 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
6805 gimple_remove_stmt_histograms (cfun
, stmt
);
6807 /* We cannot leave any operands allocated from the operand caches of
6808 the current function. */
6809 free_stmt_operands (cfun
, stmt
);
6810 push_cfun (dest_cfun
);
6815 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6816 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
6818 tree block
= LOCATION_BLOCK (e
->goto_locus
);
6819 if (d
->orig_block
== NULL_TREE
6820 || block
== d
->orig_block
)
6821 e
->goto_locus
= d
->new_block
?
6822 COMBINE_LOCATION_DATA (line_table
, e
->goto_locus
, d
->new_block
) :
6823 LOCATION_LOCUS (e
->goto_locus
);
6827 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6828 the outermost EH region. Use REGION as the incoming base EH region. */
6831 find_outermost_region_in_block (struct function
*src_cfun
,
6832 basic_block bb
, eh_region region
)
6834 gimple_stmt_iterator si
;
6836 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6838 gimple stmt
= gsi_stmt (si
);
6839 eh_region stmt_region
;
6842 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
6843 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
6847 region
= stmt_region
;
6848 else if (stmt_region
!= region
)
6850 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
6851 gcc_assert (region
!= NULL
);
6860 new_label_mapper (tree decl
, void *data
)
6862 htab_t hash
= (htab_t
) data
;
6866 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
6868 m
= XNEW (struct tree_map
);
6869 m
->hash
= DECL_UID (decl
);
6870 m
->base
.from
= decl
;
6871 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
6872 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
6873 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
6874 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
6876 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
6877 gcc_assert (*slot
== NULL
);
6884 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6888 replace_block_vars_by_duplicates (tree block
, hash_map
<tree
, tree
> *vars_map
,
6893 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
6896 if (TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != CONST_DECL
)
6898 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
6901 if (TREE_CODE (*tp
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (*tp
))
6903 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (*tp
));
6904 DECL_HAS_VALUE_EXPR_P (t
) = 1;
6906 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
6911 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
6912 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
6915 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6919 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
6922 /* Discard it from the old loop array. */
6923 (*get_loops (fn1
))[loop
->num
] = NULL
;
6925 /* Place it in the new loop array, assigning it a new number. */
6926 loop
->num
= number_of_loops (fn2
);
6927 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
6929 /* Recurse to children. */
6930 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
6931 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
6934 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6935 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6938 verify_sese (basic_block entry
, basic_block exit
, vec
<basic_block
> *bbs_p
)
6943 bitmap bbs
= BITMAP_ALLOC (NULL
);
6946 gcc_assert (entry
!= NULL
);
6947 gcc_assert (entry
!= exit
);
6948 gcc_assert (bbs_p
!= NULL
);
6950 gcc_assert (bbs_p
->length () > 0);
6952 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
6953 bitmap_set_bit (bbs
, bb
->index
);
6955 gcc_assert (bitmap_bit_p (bbs
, entry
->index
));
6956 gcc_assert (exit
== NULL
|| bitmap_bit_p (bbs
, exit
->index
));
6958 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
6962 gcc_assert (single_pred_p (entry
));
6963 gcc_assert (!bitmap_bit_p (bbs
, single_pred (entry
)->index
));
6966 for (ei
= ei_start (bb
->preds
); !ei_end_p (ei
); ei_next (&ei
))
6969 gcc_assert (bitmap_bit_p (bbs
, e
->src
->index
));
6974 gcc_assert (single_succ_p (exit
));
6975 gcc_assert (!bitmap_bit_p (bbs
, single_succ (exit
)->index
));
6978 for (ei
= ei_start (bb
->succs
); !ei_end_p (ei
); ei_next (&ei
))
6981 gcc_assert (bitmap_bit_p (bbs
, e
->dest
->index
));
6989 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6990 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6991 single basic block in the original CFG and the new basic block is
6992 returned. DEST_CFUN must not have a CFG yet.
6994 Note that the region need not be a pure SESE region. Blocks inside
6995 the region may contain calls to abort/exit. The only restriction
6996 is that ENTRY_BB should be the only entry point and it must
6999 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7000 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7001 to the new function.
7003 All local variables referenced in the region are assumed to be in
7004 the corresponding BLOCK_VARS and unexpanded variable lists
7005 associated with DEST_CFUN. */
7008 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
7009 basic_block exit_bb
, tree orig_block
)
7011 vec
<basic_block
> bbs
, dom_bbs
;
7012 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
7013 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
7014 struct function
*saved_cfun
= cfun
;
7015 int *entry_flag
, *exit_flag
;
7016 unsigned *entry_prob
, *exit_prob
;
7017 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
7020 htab_t new_label_map
;
7021 hash_map
<void *, void *> *eh_map
;
7022 struct loop
*loop
= entry_bb
->loop_father
;
7023 struct loop
*loop0
= get_loop (saved_cfun
, 0);
7024 struct move_stmt_d d
;
7026 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7028 gcc_assert (entry_bb
!= exit_bb
7030 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
7032 /* Collect all the blocks in the region. Manually add ENTRY_BB
7033 because it won't be added by dfs_enumerate_from. */
7035 bbs
.safe_push (entry_bb
);
7036 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
7037 #ifdef ENABLE_CHECKING
7038 verify_sese (entry_bb
, exit_bb
, &bbs
);
7041 /* The blocks that used to be dominated by something in BBS will now be
7042 dominated by the new block. */
7043 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
7047 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7048 the predecessor edges to ENTRY_BB and the successor edges to
7049 EXIT_BB so that we can re-attach them to the new basic block that
7050 will replace the region. */
7051 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
7052 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
7053 entry_flag
= XNEWVEC (int, num_entry_edges
);
7054 entry_prob
= XNEWVEC (unsigned, num_entry_edges
);
7056 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
7058 entry_prob
[i
] = e
->probability
;
7059 entry_flag
[i
] = e
->flags
;
7060 entry_pred
[i
++] = e
->src
;
7066 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
7067 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
7068 exit_flag
= XNEWVEC (int, num_exit_edges
);
7069 exit_prob
= XNEWVEC (unsigned, num_exit_edges
);
7071 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
7073 exit_prob
[i
] = e
->probability
;
7074 exit_flag
[i
] = e
->flags
;
7075 exit_succ
[i
++] = e
->dest
;
7087 /* Switch context to the child function to initialize DEST_FN's CFG. */
7088 gcc_assert (dest_cfun
->cfg
== NULL
);
7089 push_cfun (dest_cfun
);
7091 init_empty_tree_cfg ();
7093 /* Initialize EH information for the new function. */
7095 new_label_map
= NULL
;
7098 eh_region region
= NULL
;
7100 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7101 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
7103 init_eh_for_function ();
7106 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
7107 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
7108 new_label_mapper
, new_label_map
);
7112 /* Initialize an empty loop tree. */
7113 struct loops
*loops
= ggc_cleared_alloc
<struct loops
> ();
7114 init_loops_structure (dest_cfun
, loops
, 1);
7115 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
7116 set_loops_for_fn (dest_cfun
, loops
);
7118 /* Move the outlined loop tree part. */
7119 num_nodes
= bbs
.length ();
7120 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7122 if (bb
->loop_father
->header
== bb
)
7124 struct loop
*this_loop
= bb
->loop_father
;
7125 struct loop
*outer
= loop_outer (this_loop
);
7127 /* If the SESE region contains some bbs ending with
7128 a noreturn call, those are considered to belong
7129 to the outermost loop in saved_cfun, rather than
7130 the entry_bb's loop_father. */
7134 num_nodes
-= this_loop
->num_nodes
;
7135 flow_loop_tree_node_remove (bb
->loop_father
);
7136 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
7137 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
7140 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
7143 /* Remove loop exits from the outlined region. */
7144 if (loops_for_fn (saved_cfun
)->exits
)
7145 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7147 struct loops
*l
= loops_for_fn (saved_cfun
);
7149 = l
->exits
->find_slot_with_hash (e
, htab_hash_pointer (e
),
7152 l
->exits
->clear_slot (slot
);
7157 /* Adjust the number of blocks in the tree root of the outlined part. */
7158 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
7160 /* Setup a mapping to be used by move_block_to_fn. */
7161 loop
->aux
= current_loops
->tree_root
;
7162 loop0
->aux
= current_loops
->tree_root
;
7166 /* Move blocks from BBS into DEST_CFUN. */
7167 gcc_assert (bbs
.length () >= 2);
7168 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
7169 hash_map
<tree
, tree
> vars_map
;
7171 memset (&d
, 0, sizeof (d
));
7172 d
.orig_block
= orig_block
;
7173 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
7174 d
.from_context
= cfun
->decl
;
7175 d
.to_context
= dest_cfun
->decl
;
7176 d
.vars_map
= &vars_map
;
7177 d
.new_label_map
= new_label_map
;
7179 d
.remap_decls_p
= true;
7181 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7183 /* No need to update edge counts on the last block. It has
7184 already been updated earlier when we detached the region from
7185 the original CFG. */
7186 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
7192 /* Loop sizes are no longer correct, fix them up. */
7193 loop
->num_nodes
-= num_nodes
;
7194 for (struct loop
*outer
= loop_outer (loop
);
7195 outer
; outer
= loop_outer (outer
))
7196 outer
->num_nodes
-= num_nodes
;
7197 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
7199 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vectorize_loops
)
7202 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
7207 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
7209 dest_cfun
->has_simduid_loops
= true;
7211 if (aloop
->force_vectorize
)
7212 dest_cfun
->has_force_vectorize_loops
= true;
7216 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7220 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7222 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7223 = BLOCK_SUBBLOCKS (orig_block
);
7224 for (block
= BLOCK_SUBBLOCKS (orig_block
);
7225 block
; block
= BLOCK_CHAIN (block
))
7226 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
7227 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
7230 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
7231 &vars_map
, dest_cfun
->decl
);
7234 htab_delete (new_label_map
);
7238 /* Rewire the entry and exit blocks. The successor to the entry
7239 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7240 the child function. Similarly, the predecessor of DEST_FN's
7241 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7242 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7243 various CFG manipulation function get to the right CFG.
7245 FIXME, this is silly. The CFG ought to become a parameter to
7247 push_cfun (dest_cfun
);
7248 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
7250 make_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
7253 /* Back in the original function, the SESE region has disappeared,
7254 create a new basic block in its place. */
7255 bb
= create_empty_bb (entry_pred
[0]);
7257 add_bb_to_loop (bb
, loop
);
7258 for (i
= 0; i
< num_entry_edges
; i
++)
7260 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
7261 e
->probability
= entry_prob
[i
];
7264 for (i
= 0; i
< num_exit_edges
; i
++)
7266 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
7267 e
->probability
= exit_prob
[i
];
7270 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
7271 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
7272 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
7290 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7294 dump_function_to_file (tree fndecl
, FILE *file
, int flags
)
7296 tree arg
, var
, old_current_fndecl
= current_function_decl
;
7297 struct function
*dsf
;
7298 bool ignore_topmost_bind
= false, any_var
= false;
7301 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
7302 && decl_is_tm_clone (fndecl
));
7303 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
7305 current_function_decl
= fndecl
;
7306 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
7308 arg
= DECL_ARGUMENTS (fndecl
);
7311 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
7312 fprintf (file
, " ");
7313 print_generic_expr (file
, arg
, dump_flags
);
7314 if (flags
& TDF_VERBOSE
)
7315 print_node (file
, "", arg
, 4);
7316 if (DECL_CHAIN (arg
))
7317 fprintf (file
, ", ");
7318 arg
= DECL_CHAIN (arg
);
7320 fprintf (file
, ")\n");
7322 if (flags
& TDF_VERBOSE
)
7323 print_node (file
, "", fndecl
, 2);
7325 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
7326 if (dsf
&& (flags
& TDF_EH
))
7327 dump_eh_tree (file
, dsf
);
7329 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
7331 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
7332 current_function_decl
= old_current_fndecl
;
7336 /* When GIMPLE is lowered, the variables are no longer available in
7337 BIND_EXPRs, so display them separately. */
7338 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
7341 ignore_topmost_bind
= true;
7343 fprintf (file
, "{\n");
7344 if (!vec_safe_is_empty (fun
->local_decls
))
7345 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7347 print_generic_decl (file
, var
, flags
);
7348 if (flags
& TDF_VERBOSE
)
7349 print_node (file
, "", var
, 4);
7350 fprintf (file
, "\n");
7354 if (gimple_in_ssa_p (cfun
))
7355 for (ix
= 1; ix
< num_ssa_names
; ++ix
)
7357 tree name
= ssa_name (ix
);
7358 if (name
&& !SSA_NAME_VAR (name
))
7360 fprintf (file
, " ");
7361 print_generic_expr (file
, TREE_TYPE (name
), flags
);
7362 fprintf (file
, " ");
7363 print_generic_expr (file
, name
, flags
);
7364 fprintf (file
, ";\n");
7371 if (fun
&& fun
->decl
== fndecl
7373 && basic_block_info_for_fn (fun
))
7375 /* If the CFG has been built, emit a CFG-based dump. */
7376 if (!ignore_topmost_bind
)
7377 fprintf (file
, "{\n");
7379 if (any_var
&& n_basic_blocks_for_fn (fun
))
7380 fprintf (file
, "\n");
7382 FOR_EACH_BB_FN (bb
, fun
)
7383 dump_bb (file
, bb
, 2, flags
| TDF_COMMENT
);
7385 fprintf (file
, "}\n");
7387 else if (DECL_SAVED_TREE (fndecl
) == NULL
)
7389 /* The function is now in GIMPLE form but the CFG has not been
7390 built yet. Emit the single sequence of GIMPLE statements
7391 that make up its body. */
7392 gimple_seq body
= gimple_body (fndecl
);
7394 if (gimple_seq_first_stmt (body
)
7395 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
7396 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
7397 print_gimple_seq (file
, body
, 0, flags
);
7400 if (!ignore_topmost_bind
)
7401 fprintf (file
, "{\n");
7404 fprintf (file
, "\n");
7406 print_gimple_seq (file
, body
, 2, flags
);
7407 fprintf (file
, "}\n");
7414 /* Make a tree based dump. */
7415 chain
= DECL_SAVED_TREE (fndecl
);
7416 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
7418 if (ignore_topmost_bind
)
7420 chain
= BIND_EXPR_BODY (chain
);
7428 if (!ignore_topmost_bind
)
7430 fprintf (file
, "{\n");
7431 /* No topmost bind, pretend it's ignored for later. */
7432 ignore_topmost_bind
= true;
7438 fprintf (file
, "\n");
7440 print_generic_stmt_indented (file
, chain
, flags
, indent
);
7441 if (ignore_topmost_bind
)
7442 fprintf (file
, "}\n");
7445 if (flags
& TDF_ENUMERATE_LOCALS
)
7446 dump_enumerated_decls (file
, flags
);
7447 fprintf (file
, "\n\n");
7449 current_function_decl
= old_current_fndecl
;
7452 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7455 debug_function (tree fn
, int flags
)
7457 dump_function_to_file (fn
, stderr
, flags
);
7461 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7464 print_pred_bbs (FILE *file
, basic_block bb
)
7469 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7470 fprintf (file
, "bb_%d ", e
->src
->index
);
7474 /* Print on FILE the indexes for the successors of basic_block BB. */
7477 print_succ_bbs (FILE *file
, basic_block bb
)
7482 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7483 fprintf (file
, "bb_%d ", e
->dest
->index
);
7486 /* Print to FILE the basic block BB following the VERBOSITY level. */
7489 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
7491 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
7492 memset ((void *) s_indent
, ' ', (size_t) indent
);
7493 s_indent
[indent
] = '\0';
7495 /* Print basic_block's header. */
7498 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
7499 print_pred_bbs (file
, bb
);
7500 fprintf (file
, "}, succs = {");
7501 print_succ_bbs (file
, bb
);
7502 fprintf (file
, "})\n");
7505 /* Print basic_block's body. */
7508 fprintf (file
, "%s {\n", s_indent
);
7509 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
7510 fprintf (file
, "%s }\n", s_indent
);
7514 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
7516 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7517 VERBOSITY level this outputs the contents of the loop, or just its
7521 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
7529 s_indent
= (char *) alloca ((size_t) indent
+ 1);
7530 memset ((void *) s_indent
, ' ', (size_t) indent
);
7531 s_indent
[indent
] = '\0';
7533 /* Print loop's header. */
7534 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
7536 fprintf (file
, "header = %d", loop
->header
->index
);
7539 fprintf (file
, "deleted)\n");
7543 fprintf (file
, ", latch = %d", loop
->latch
->index
);
7545 fprintf (file
, ", multiple latches");
7546 fprintf (file
, ", niter = ");
7547 print_generic_expr (file
, loop
->nb_iterations
, 0);
7549 if (loop
->any_upper_bound
)
7551 fprintf (file
, ", upper_bound = ");
7552 print_decu (loop
->nb_iterations_upper_bound
, file
);
7555 if (loop
->any_estimate
)
7557 fprintf (file
, ", estimate = ");
7558 print_decu (loop
->nb_iterations_estimate
, file
);
7560 fprintf (file
, ")\n");
7562 /* Print loop's body. */
7565 fprintf (file
, "%s{\n", s_indent
);
7566 FOR_EACH_BB_FN (bb
, cfun
)
7567 if (bb
->loop_father
== loop
)
7568 print_loops_bb (file
, bb
, indent
, verbosity
);
7570 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
7571 fprintf (file
, "%s}\n", s_indent
);
7575 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7576 spaces. Following VERBOSITY level this outputs the contents of the
7577 loop, or just its structure. */
7580 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
7586 print_loop (file
, loop
, indent
, verbosity
);
7587 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
7590 /* Follow a CFG edge from the entry point of the program, and on entry
7591 of a loop, pretty print the loop structure on FILE. */
7594 print_loops (FILE *file
, int verbosity
)
7598 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
7599 if (bb
&& bb
->loop_father
)
7600 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
7606 debug (struct loop
&ref
)
7608 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
7612 debug (struct loop
*ptr
)
7617 fprintf (stderr
, "<nil>\n");
7620 /* Dump a loop verbosely. */
7623 debug_verbose (struct loop
&ref
)
7625 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
7629 debug_verbose (struct loop
*ptr
)
7634 fprintf (stderr
, "<nil>\n");
7638 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7641 debug_loops (int verbosity
)
7643 print_loops (stderr
, verbosity
);
7646 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7649 debug_loop (struct loop
*loop
, int verbosity
)
7651 print_loop (stderr
, loop
, 0, verbosity
);
7654 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7658 debug_loop_num (unsigned num
, int verbosity
)
7660 debug_loop (get_loop (cfun
, num
), verbosity
);
7663 /* Return true if BB ends with a call, possibly followed by some
7664 instructions that must stay with the call. Return false,
7668 gimple_block_ends_with_call_p (basic_block bb
)
7670 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7671 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
7675 /* Return true if BB ends with a conditional branch. Return false,
7679 gimple_block_ends_with_condjump_p (const_basic_block bb
)
7681 gimple stmt
= last_stmt (CONST_CAST_BB (bb
));
7682 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
7686 /* Return true if we need to add fake edge to exit at statement T.
7687 Helper function for gimple_flow_call_edges_add. */
7690 need_fake_edge_p (gimple t
)
7692 tree fndecl
= NULL_TREE
;
7695 /* NORETURN and LONGJMP calls already have an edge to exit.
7696 CONST and PURE calls do not need one.
7697 We don't currently check for CONST and PURE here, although
7698 it would be a good idea, because those attributes are
7699 figured out from the RTL in mark_constant_function, and
7700 the counter incrementation code from -fprofile-arcs
7701 leads to different results from -fbranch-probabilities. */
7702 if (is_gimple_call (t
))
7704 fndecl
= gimple_call_fndecl (t
);
7705 call_flags
= gimple_call_flags (t
);
7708 if (is_gimple_call (t
)
7710 && DECL_BUILT_IN (fndecl
)
7711 && (call_flags
& ECF_NOTHROW
)
7712 && !(call_flags
& ECF_RETURNS_TWICE
)
7713 /* fork() doesn't really return twice, but the effect of
7714 wrapping it in __gcov_fork() which calls __gcov_flush()
7715 and clears the counters before forking has the same
7716 effect as returning twice. Force a fake edge. */
7717 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7718 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
7721 if (is_gimple_call (t
))
7727 if (!(call_flags
& ECF_NORETURN
))
7731 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7732 if ((e
->flags
& EDGE_FAKE
) == 0)
7736 if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (t
))
7737 if (gimple_asm_volatile_p (asm_stmt
) || gimple_asm_input_p (asm_stmt
))
7744 /* Add fake edges to the function exit for any non constant and non
7745 noreturn calls (or noreturn calls with EH/abnormal edges),
7746 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7747 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7750 The goal is to expose cases in which entering a basic block does
7751 not imply that all subsequent instructions must be executed. */
7754 gimple_flow_call_edges_add (sbitmap blocks
)
7757 int blocks_split
= 0;
7758 int last_bb
= last_basic_block_for_fn (cfun
);
7759 bool check_last_block
= false;
7761 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
7765 check_last_block
= true;
7767 check_last_block
= bitmap_bit_p (blocks
,
7768 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
7770 /* In the last basic block, before epilogue generation, there will be
7771 a fallthru edge to EXIT. Special care is required if the last insn
7772 of the last basic block is a call because make_edge folds duplicate
7773 edges, which would result in the fallthru edge also being marked
7774 fake, which would result in the fallthru edge being removed by
7775 remove_fake_edges, which would result in an invalid CFG.
7777 Moreover, we can't elide the outgoing fake edge, since the block
7778 profiler needs to take this into account in order to solve the minimal
7779 spanning tree in the case that the call doesn't return.
7781 Handle this by adding a dummy instruction in a new last basic block. */
7782 if (check_last_block
)
7784 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
7785 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7788 if (!gsi_end_p (gsi
))
7791 if (t
&& need_fake_edge_p (t
))
7795 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7798 gsi_insert_on_edge (e
, gimple_build_nop ());
7799 gsi_commit_edge_inserts ();
7804 /* Now add fake edges to the function exit for any non constant
7805 calls since there is no way that we can determine if they will
7807 for (i
= 0; i
< last_bb
; i
++)
7809 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7810 gimple_stmt_iterator gsi
;
7811 gimple stmt
, last_stmt
;
7816 if (blocks
&& !bitmap_bit_p (blocks
, i
))
7819 gsi
= gsi_last_nondebug_bb (bb
);
7820 if (!gsi_end_p (gsi
))
7822 last_stmt
= gsi_stmt (gsi
);
7825 stmt
= gsi_stmt (gsi
);
7826 if (need_fake_edge_p (stmt
))
7830 /* The handling above of the final block before the
7831 epilogue should be enough to verify that there is
7832 no edge to the exit block in CFG already.
7833 Calling make_edge in such case would cause us to
7834 mark that edge as fake and remove it later. */
7835 #ifdef ENABLE_CHECKING
7836 if (stmt
== last_stmt
)
7838 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7839 gcc_assert (e
== NULL
);
7843 /* Note that the following may create a new basic block
7844 and renumber the existing basic blocks. */
7845 if (stmt
!= last_stmt
)
7847 e
= split_block (bb
, stmt
);
7851 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
7855 while (!gsi_end_p (gsi
));
7860 verify_flow_info ();
7862 return blocks_split
;
7865 /* Removes edge E and all the blocks dominated by it, and updates dominance
7866 information. The IL in E->src needs to be updated separately.
7867 If dominance info is not available, only the edge E is removed.*/
7870 remove_edge_and_dominated_blocks (edge e
)
7872 vec
<basic_block
> bbs_to_remove
= vNULL
;
7873 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
7877 bool none_removed
= false;
7879 basic_block bb
, dbb
;
7882 /* If we are removing a path inside a non-root loop that may change
7883 loop ownership of blocks or remove loops. Mark loops for fixup. */
7885 && loop_outer (e
->src
->loop_father
) != NULL
7886 && e
->src
->loop_father
== e
->dest
->loop_father
)
7887 loops_state_set (LOOPS_NEED_FIXUP
);
7889 if (!dom_info_available_p (CDI_DOMINATORS
))
7895 /* No updating is needed for edges to exit. */
7896 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7898 if (cfgcleanup_altered_bbs
)
7899 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7904 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7905 that is not dominated by E->dest, then this set is empty. Otherwise,
7906 all the basic blocks dominated by E->dest are removed.
7908 Also, to DF_IDOM we store the immediate dominators of the blocks in
7909 the dominance frontier of E (i.e., of the successors of the
7910 removed blocks, if there are any, and of E->dest otherwise). */
7911 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
7916 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
7918 none_removed
= true;
7923 df
= BITMAP_ALLOC (NULL
);
7924 df_idom
= BITMAP_ALLOC (NULL
);
7927 bitmap_set_bit (df_idom
,
7928 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
7931 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
7932 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7934 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
7936 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
7937 bitmap_set_bit (df
, f
->dest
->index
);
7940 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7941 bitmap_clear_bit (df
, bb
->index
);
7943 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
7945 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7946 bitmap_set_bit (df_idom
,
7947 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
7951 if (cfgcleanup_altered_bbs
)
7953 /* Record the set of the altered basic blocks. */
7954 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7955 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
7958 /* Remove E and the cancelled blocks. */
7963 /* Walk backwards so as to get a chance to substitute all
7964 released DEFs into debug stmts. See
7965 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7967 for (i
= bbs_to_remove
.length (); i
-- > 0; )
7968 delete_basic_block (bbs_to_remove
[i
]);
7971 /* Update the dominance information. The immediate dominator may change only
7972 for blocks whose immediate dominator belongs to DF_IDOM:
7974 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7975 removal. Let Z the arbitrary block such that idom(Z) = Y and
7976 Z dominates X after the removal. Before removal, there exists a path P
7977 from Y to X that avoids Z. Let F be the last edge on P that is
7978 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7979 dominates W, and because of P, Z does not dominate W), and W belongs to
7980 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7981 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
7983 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7984 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
7986 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
7987 bbs_to_fix_dom
.safe_push (dbb
);
7990 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
7993 BITMAP_FREE (df_idom
);
7994 bbs_to_remove
.release ();
7995 bbs_to_fix_dom
.release ();
7998 /* Purge dead EH edges from basic block BB. */
8001 gimple_purge_dead_eh_edges (basic_block bb
)
8003 bool changed
= false;
8006 gimple stmt
= last_stmt (bb
);
8008 if (stmt
&& stmt_can_throw_internal (stmt
))
8011 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8013 if (e
->flags
& EDGE_EH
)
8015 remove_edge_and_dominated_blocks (e
);
8025 /* Purge dead EH edges from basic block listed in BLOCKS. */
8028 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
8030 bool changed
= false;
8034 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8036 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8038 /* Earlier gimple_purge_dead_eh_edges could have removed
8039 this basic block already. */
8040 gcc_assert (bb
|| changed
);
8042 changed
|= gimple_purge_dead_eh_edges (bb
);
8048 /* Purge dead abnormal call edges from basic block BB. */
8051 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
8053 bool changed
= false;
8056 gimple stmt
= last_stmt (bb
);
8058 if (!cfun
->has_nonlocal_label
8059 && !cfun
->calls_setjmp
)
8062 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
8065 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8067 if (e
->flags
& EDGE_ABNORMAL
)
8069 if (e
->flags
& EDGE_FALLTHRU
)
8070 e
->flags
&= ~EDGE_ABNORMAL
;
8072 remove_edge_and_dominated_blocks (e
);
8082 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8085 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
8087 bool changed
= false;
8091 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8093 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8095 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8096 this basic block already. */
8097 gcc_assert (bb
|| changed
);
8099 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
8105 /* This function is called whenever a new edge is created or
8109 gimple_execute_on_growing_pred (edge e
)
8111 basic_block bb
= e
->dest
;
8113 if (!gimple_seq_empty_p (phi_nodes (bb
)))
8114 reserve_phi_args_for_new_edge (bb
);
8117 /* This function is called immediately before edge E is removed from
8118 the edge vector E->dest->preds. */
8121 gimple_execute_on_shrinking_pred (edge e
)
8123 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
8124 remove_phi_args (e
);
8127 /*---------------------------------------------------------------------------
8128 Helper functions for Loop versioning
8129 ---------------------------------------------------------------------------*/
8131 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8132 of 'first'. Both of them are dominated by 'new_head' basic block. When
8133 'new_head' was created by 'second's incoming edge it received phi arguments
8134 on the edge by split_edge(). Later, additional edge 'e' was created to
8135 connect 'new_head' and 'first'. Now this routine adds phi args on this
8136 additional edge 'e' that new_head to second edge received as part of edge
8140 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
8141 basic_block new_head
, edge e
)
8144 gphi_iterator psi1
, psi2
;
8146 edge e2
= find_edge (new_head
, second
);
8148 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8149 edge, we should always have an edge from NEW_HEAD to SECOND. */
8150 gcc_assert (e2
!= NULL
);
8152 /* Browse all 'second' basic block phi nodes and add phi args to
8153 edge 'e' for 'first' head. PHI args are always in correct order. */
8155 for (psi2
= gsi_start_phis (second
),
8156 psi1
= gsi_start_phis (first
);
8157 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
8158 gsi_next (&psi2
), gsi_next (&psi1
))
8162 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
8163 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
8168 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8169 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8170 the destination of the ELSE part. */
8173 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
8174 basic_block second_head ATTRIBUTE_UNUSED
,
8175 basic_block cond_bb
, void *cond_e
)
8177 gimple_stmt_iterator gsi
;
8178 gimple new_cond_expr
;
8179 tree cond_expr
= (tree
) cond_e
;
8182 /* Build new conditional expr */
8183 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
8184 NULL_TREE
, NULL_TREE
);
8186 /* Add new cond in cond_bb. */
8187 gsi
= gsi_last_bb (cond_bb
);
8188 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
8190 /* Adjust edges appropriately to connect new head with first head
8191 as well as second head. */
8192 e0
= single_succ_edge (cond_bb
);
8193 e0
->flags
&= ~EDGE_FALLTHRU
;
8194 e0
->flags
|= EDGE_FALSE_VALUE
;
8198 /* Do book-keeping of basic block BB for the profile consistency checker.
8199 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8200 then do post-pass accounting. Store the counting in RECORD. */
8202 gimple_account_profile_record (basic_block bb
, int after_pass
,
8203 struct profile_record
*record
)
8205 gimple_stmt_iterator i
;
8206 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
8208 record
->size
[after_pass
]
8209 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
8210 if (profile_status_for_fn (cfun
) == PROFILE_READ
)
8211 record
->time
[after_pass
]
8212 += estimate_num_insns (gsi_stmt (i
),
8213 &eni_time_weights
) * bb
->count
;
8214 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
8215 record
->time
[after_pass
]
8216 += estimate_num_insns (gsi_stmt (i
),
8217 &eni_time_weights
) * bb
->frequency
;
8221 struct cfg_hooks gimple_cfg_hooks
= {
8223 gimple_verify_flow_info
,
8224 gimple_dump_bb
, /* dump_bb */
8225 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
8226 create_bb
, /* create_basic_block */
8227 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
8228 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
8229 gimple_can_remove_branch_p
, /* can_remove_branch_p */
8230 remove_bb
, /* delete_basic_block */
8231 gimple_split_block
, /* split_block */
8232 gimple_move_block_after
, /* move_block_after */
8233 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
8234 gimple_merge_blocks
, /* merge_blocks */
8235 gimple_predict_edge
, /* predict_edge */
8236 gimple_predicted_by_p
, /* predicted_by_p */
8237 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
8238 gimple_duplicate_bb
, /* duplicate_block */
8239 gimple_split_edge
, /* split_edge */
8240 gimple_make_forwarder_block
, /* make_forward_block */
8241 NULL
, /* tidy_fallthru_edge */
8242 NULL
, /* force_nonfallthru */
8243 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
8244 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
8245 gimple_flow_call_edges_add
, /* flow_call_edges_add */
8246 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
8247 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
8248 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
8249 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
8250 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
8251 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
8252 flush_pending_stmts
, /* flush_pending_stmts */
8253 gimple_empty_block_p
, /* block_empty_p */
8254 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
8255 gimple_account_profile_record
,
8259 /* Split all critical edges. */
8262 split_critical_edges (void)
8268 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8269 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8270 mappings around the calls to split_edge. */
8271 start_recording_case_labels ();
8272 FOR_ALL_BB_FN (bb
, cfun
)
8274 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8276 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
8278 /* PRE inserts statements to edges and expects that
8279 since split_critical_edges was done beforehand, committing edge
8280 insertions will not split more edges. In addition to critical
8281 edges we must split edges that have multiple successors and
8282 end by control flow statements, such as RESX.
8283 Go ahead and split them too. This matches the logic in
8284 gimple_find_edge_insert_loc. */
8285 else if ((!single_pred_p (e
->dest
)
8286 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
8287 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8288 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
8289 && !(e
->flags
& EDGE_ABNORMAL
))
8291 gimple_stmt_iterator gsi
;
8293 gsi
= gsi_last_bb (e
->src
);
8294 if (!gsi_end_p (gsi
)
8295 && stmt_ends_bb_p (gsi_stmt (gsi
))
8296 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
8297 && !gimple_call_builtin_p (gsi_stmt (gsi
),
8303 end_recording_case_labels ();
8309 const pass_data pass_data_split_crit_edges
=
8311 GIMPLE_PASS
, /* type */
8312 "crited", /* name */
8313 OPTGROUP_NONE
, /* optinfo_flags */
8314 TV_TREE_SPLIT_EDGES
, /* tv_id */
8315 PROP_cfg
, /* properties_required */
8316 PROP_no_crit_edges
, /* properties_provided */
8317 0, /* properties_destroyed */
8318 0, /* todo_flags_start */
8319 0, /* todo_flags_finish */
8322 class pass_split_crit_edges
: public gimple_opt_pass
8325 pass_split_crit_edges (gcc::context
*ctxt
)
8326 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
8329 /* opt_pass methods: */
8330 virtual unsigned int execute (function
*) { return split_critical_edges (); }
8332 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
8333 }; // class pass_split_crit_edges
8338 make_pass_split_crit_edges (gcc::context
*ctxt
)
8340 return new pass_split_crit_edges (ctxt
);
8344 /* Insert COND expression which is GIMPLE_COND after STMT
8345 in basic block BB with appropriate basic block split
8346 and creation of a new conditionally executed basic block.
8347 Return created basic block. */
8349 insert_cond_bb (basic_block bb
, gimple stmt
, gimple cond
)
8351 edge fall
= split_block (bb
, stmt
);
8352 gimple_stmt_iterator iter
= gsi_last_bb (bb
);
8355 /* Insert cond statement. */
8356 gcc_assert (gimple_code (cond
) == GIMPLE_COND
);
8357 if (gsi_end_p (iter
))
8358 gsi_insert_before (&iter
, cond
, GSI_CONTINUE_LINKING
);
8360 gsi_insert_after (&iter
, cond
, GSI_CONTINUE_LINKING
);
8362 /* Create conditionally executed block. */
8363 new_bb
= create_empty_bb (bb
);
8364 make_edge (bb
, new_bb
, EDGE_TRUE_VALUE
);
8365 make_single_succ_edge (new_bb
, fall
->dest
, EDGE_FALLTHRU
);
8367 /* Fix edge for split bb. */
8368 fall
->flags
= EDGE_FALSE_VALUE
;
8370 /* Update dominance info. */
8371 if (dom_info_available_p (CDI_DOMINATORS
))
8373 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, bb
);
8374 set_immediate_dominator (CDI_DOMINATORS
, fall
->dest
, bb
);
8377 /* Update loop info. */
8379 add_bb_to_loop (new_bb
, bb
->loop_father
);
8384 /* Build a ternary operation and gimplify it. Emit code before GSI.
8385 Return the gimple_val holding the result. */
8388 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8389 tree type
, tree a
, tree b
, tree c
)
8392 location_t loc
= gimple_location (gsi_stmt (*gsi
));
8394 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
8397 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8401 /* Build a binary operation and gimplify it. Emit code before GSI.
8402 Return the gimple_val holding the result. */
8405 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8406 tree type
, tree a
, tree b
)
8410 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
8413 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8417 /* Build a unary operation and gimplify it. Emit code before GSI.
8418 Return the gimple_val holding the result. */
8421 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
8426 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
8429 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8435 /* Given a basic block B which ends with a conditional and has
8436 precisely two successors, determine which of the edges is taken if
8437 the conditional is true and which is taken if the conditional is
8438 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8441 extract_true_false_edges_from_block (basic_block b
,
8445 edge e
= EDGE_SUCC (b
, 0);
8447 if (e
->flags
& EDGE_TRUE_VALUE
)
8450 *false_edge
= EDGE_SUCC (b
, 1);
8455 *true_edge
= EDGE_SUCC (b
, 1);
8459 /* Emit return warnings. */
8463 const pass_data pass_data_warn_function_return
=
8465 GIMPLE_PASS
, /* type */
8466 "*warn_function_return", /* name */
8467 OPTGROUP_NONE
, /* optinfo_flags */
8468 TV_NONE
, /* tv_id */
8469 PROP_cfg
, /* properties_required */
8470 0, /* properties_provided */
8471 0, /* properties_destroyed */
8472 0, /* todo_flags_start */
8473 0, /* todo_flags_finish */
8476 class pass_warn_function_return
: public gimple_opt_pass
8479 pass_warn_function_return (gcc::context
*ctxt
)
8480 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
8483 /* opt_pass methods: */
8484 virtual unsigned int execute (function
*);
8486 }; // class pass_warn_function_return
8489 pass_warn_function_return::execute (function
*fun
)
8491 source_location location
;
8496 if (!targetm
.warn_func_return (fun
->decl
))
8499 /* If we have a path to EXIT, then we do return. */
8500 if (TREE_THIS_VOLATILE (fun
->decl
)
8501 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0)
8503 location
= UNKNOWN_LOCATION
;
8504 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
8506 last
= last_stmt (e
->src
);
8507 if ((gimple_code (last
) == GIMPLE_RETURN
8508 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
8509 && (location
= gimple_location (last
)) != UNKNOWN_LOCATION
)
8512 if (location
== UNKNOWN_LOCATION
)
8513 location
= cfun
->function_end_locus
;
8514 warning_at (location
, 0, "%<noreturn%> function does return");
8517 /* If we see "return;" in some basic block, then we do reach the end
8518 without returning a value. */
8519 else if (warn_return_type
8520 && !TREE_NO_WARNING (fun
->decl
)
8521 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0
8522 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun
->decl
))))
8524 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
8526 gimple last
= last_stmt (e
->src
);
8527 greturn
*return_stmt
= dyn_cast
<greturn
*> (last
);
8529 && gimple_return_retval (return_stmt
) == NULL
8530 && !gimple_no_warning_p (last
))
8532 location
= gimple_location (last
);
8533 if (location
== UNKNOWN_LOCATION
)
8534 location
= fun
->function_end_locus
;
8535 warning_at (location
, OPT_Wreturn_type
, "control reaches end of non-void function");
8536 TREE_NO_WARNING (fun
->decl
) = 1;
8547 make_pass_warn_function_return (gcc::context
*ctxt
)
8549 return new pass_warn_function_return (ctxt
);
8552 /* Walk a gimplified function and warn for functions whose return value is
8553 ignored and attribute((warn_unused_result)) is set. This is done before
8554 inlining, so we don't have to worry about that. */
8557 do_warn_unused_result (gimple_seq seq
)
8560 gimple_stmt_iterator i
;
8562 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
8564 gimple g
= gsi_stmt (i
);
8566 switch (gimple_code (g
))
8569 do_warn_unused_result (gimple_bind_body (as_a
<gbind
*>(g
)));
8572 do_warn_unused_result (gimple_try_eval (g
));
8573 do_warn_unused_result (gimple_try_cleanup (g
));
8576 do_warn_unused_result (gimple_catch_handler (
8577 as_a
<gcatch
*> (g
)));
8579 case GIMPLE_EH_FILTER
:
8580 do_warn_unused_result (gimple_eh_filter_failure (g
));
8584 if (gimple_call_lhs (g
))
8586 if (gimple_call_internal_p (g
))
8589 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8590 LHS. All calls whose value is ignored should be
8591 represented like this. Look for the attribute. */
8592 fdecl
= gimple_call_fndecl (g
);
8593 ftype
= gimple_call_fntype (g
);
8595 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
8597 location_t loc
= gimple_location (g
);
8600 warning_at (loc
, OPT_Wunused_result
,
8601 "ignoring return value of %qD, "
8602 "declared with attribute warn_unused_result",
8605 warning_at (loc
, OPT_Wunused_result
,
8606 "ignoring return value of function "
8607 "declared with attribute warn_unused_result");
8612 /* Not a container, not a call, or a call whose value is used. */
8620 const pass_data pass_data_warn_unused_result
=
8622 GIMPLE_PASS
, /* type */
8623 "*warn_unused_result", /* name */
8624 OPTGROUP_NONE
, /* optinfo_flags */
8625 TV_NONE
, /* tv_id */
8626 PROP_gimple_any
, /* properties_required */
8627 0, /* properties_provided */
8628 0, /* properties_destroyed */
8629 0, /* todo_flags_start */
8630 0, /* todo_flags_finish */
8633 class pass_warn_unused_result
: public gimple_opt_pass
8636 pass_warn_unused_result (gcc::context
*ctxt
)
8637 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
8640 /* opt_pass methods: */
8641 virtual bool gate (function
*) { return flag_warn_unused_result
; }
8642 virtual unsigned int execute (function
*)
8644 do_warn_unused_result (gimple_body (current_function_decl
));
8648 }; // class pass_warn_unused_result
8653 make_pass_warn_unused_result (gcc::context
*ctxt
)
8655 return new pass_warn_unused_result (ctxt
);
8658 /* IPA passes, compilation of earlier functions or inlining
8659 might have changed some properties, such as marked functions nothrow,
8660 pure, const or noreturn.
8661 Remove redundant edges and basic blocks, and create new ones if necessary.
8663 This pass can't be executed as stand alone pass from pass manager, because
8664 in between inlining and this fixup the verify_flow_info would fail. */
8667 execute_fixup_cfg (void)
8670 gimple_stmt_iterator gsi
;
8672 gcov_type count_scale
;
8677 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl
)->count
,
8678 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
);
8680 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
8681 cgraph_node::get (current_function_decl
)->count
;
8682 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
8683 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
,
8686 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
8687 e
->count
= apply_scale (e
->count
, count_scale
);
8689 FOR_EACH_BB_FN (bb
, cfun
)
8691 bb
->count
= apply_scale (bb
->count
, count_scale
);
8692 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
8694 gimple stmt
= gsi_stmt (gsi
);
8695 tree decl
= is_gimple_call (stmt
)
8696 ? gimple_call_fndecl (stmt
)
8700 int flags
= gimple_call_flags (stmt
);
8701 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
8703 if (gimple_purge_dead_abnormal_call_edges (bb
))
8704 todo
|= TODO_cleanup_cfg
;
8706 if (gimple_in_ssa_p (cfun
))
8708 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8713 if (flags
& ECF_NORETURN
8714 && fixup_noreturn_call (stmt
))
8715 todo
|= TODO_cleanup_cfg
;
8718 /* Remove stores to variables we marked write-only.
8719 Keep access when store has side effect, i.e. in case when source
8721 if (gimple_store_p (stmt
)
8722 && !gimple_has_side_effects (stmt
))
8724 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
8726 if (TREE_CODE (lhs
) == VAR_DECL
8727 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
8728 && varpool_node::get (lhs
)->writeonly
)
8730 unlink_stmt_vdef (stmt
);
8731 gsi_remove (&gsi
, true);
8732 release_defs (stmt
);
8733 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8737 /* For calls we can simply remove LHS when it is known
8738 to be write-only. */
8739 if (is_gimple_call (stmt
)
8740 && gimple_get_lhs (stmt
))
8742 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
8744 if (TREE_CODE (lhs
) == VAR_DECL
8745 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
8746 && varpool_node::get (lhs
)->writeonly
)
8748 gimple_call_set_lhs (stmt
, NULL
);
8750 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8754 if (maybe_clean_eh_stmt (stmt
)
8755 && gimple_purge_dead_eh_edges (bb
))
8756 todo
|= TODO_cleanup_cfg
;
8760 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8761 e
->count
= apply_scale (e
->count
, count_scale
);
8763 /* If we have a basic block with no successors that does not
8764 end with a control statement or a noreturn call end it with
8765 a call to __builtin_unreachable. This situation can occur
8766 when inlining a noreturn call that does in fact return. */
8767 if (EDGE_COUNT (bb
->succs
) == 0)
8769 gimple stmt
= last_stmt (bb
);
8771 || (!is_ctrl_stmt (stmt
)
8772 && (!is_gimple_call (stmt
)
8773 || (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0)))
8775 if (stmt
&& is_gimple_call (stmt
))
8776 gimple_call_set_ctrl_altering (stmt
, false);
8777 stmt
= gimple_build_call
8778 (builtin_decl_implicit (BUILT_IN_UNREACHABLE
), 0);
8779 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
8780 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
8784 if (count_scale
!= REG_BR_PROB_BASE
)
8785 compute_function_frequency ();
8788 && (todo
& TODO_cleanup_cfg
))
8789 loops_state_set (LOOPS_NEED_FIXUP
);
8796 const pass_data pass_data_fixup_cfg
=
8798 GIMPLE_PASS
, /* type */
8799 "fixup_cfg", /* name */
8800 OPTGROUP_NONE
, /* optinfo_flags */
8801 TV_NONE
, /* tv_id */
8802 PROP_cfg
, /* properties_required */
8803 0, /* properties_provided */
8804 0, /* properties_destroyed */
8805 0, /* todo_flags_start */
8806 0, /* todo_flags_finish */
8809 class pass_fixup_cfg
: public gimple_opt_pass
8812 pass_fixup_cfg (gcc::context
*ctxt
)
8813 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
8816 /* opt_pass methods: */
8817 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
8818 virtual unsigned int execute (function
*) { return execute_fixup_cfg (); }
8820 }; // class pass_fixup_cfg
8825 make_pass_fixup_cfg (gcc::context
*ctxt
)
8827 return new pass_fixup_cfg (ctxt
);
8830 /* Garbage collection support for edge_def. */
8832 extern void gt_ggc_mx (tree
&);
8833 extern void gt_ggc_mx (gimple
&);
8834 extern void gt_ggc_mx (rtx
&);
8835 extern void gt_ggc_mx (basic_block
&);
8838 gt_ggc_mx (rtx_insn
*& x
)
8841 gt_ggc_mx_rtx_def ((void *) x
);
8845 gt_ggc_mx (edge_def
*e
)
8847 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8849 gt_ggc_mx (e
->dest
);
8850 if (current_ir_type () == IR_GIMPLE
)
8851 gt_ggc_mx (e
->insns
.g
);
8853 gt_ggc_mx (e
->insns
.r
);
8857 /* PCH support for edge_def. */
8859 extern void gt_pch_nx (tree
&);
8860 extern void gt_pch_nx (gimple
&);
8861 extern void gt_pch_nx (rtx
&);
8862 extern void gt_pch_nx (basic_block
&);
8865 gt_pch_nx (rtx_insn
*& x
)
8868 gt_pch_nx_rtx_def ((void *) x
);
8872 gt_pch_nx (edge_def
*e
)
8874 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8876 gt_pch_nx (e
->dest
);
8877 if (current_ir_type () == IR_GIMPLE
)
8878 gt_pch_nx (e
->insns
.g
);
8880 gt_pch_nx (e
->insns
.r
);
8885 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
8887 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8888 op (&(e
->src
), cookie
);
8889 op (&(e
->dest
), cookie
);
8890 if (current_ir_type () == IR_GIMPLE
)
8891 op (&(e
->insns
.g
), cookie
);
8893 op (&(e
->insns
.r
), cookie
);
8894 op (&(block
), cookie
);