rs6000.c (rs6000_cannot_force_const_mem): Match CONST high part large-toc address.
[official-gcc.git] / gcc / tree-cfg.c
blob349f56ecf746620080bdce82d9bdb5f18a98e27d
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "output.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "ggc.h"
33 #include "langhooks.h"
34 #include "tree-pretty-print.h"
35 #include "gimple-pretty-print.h"
36 #include "tree-flow.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-pass.h"
40 #include "diagnostic-core.h"
41 #include "except.h"
42 #include "cfgloop.h"
43 #include "cfglayout.h"
44 #include "tree-ssa-propagate.h"
45 #include "value-prof.h"
46 #include "pointer-set.h"
47 #include "tree-inline.h"
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
52 /* Local declarations. */
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their TREE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
70 static struct pointer_map_t *edge_to_cases;
72 /* If we record edge_to_cases, this bitmap will hold indexes
73 of basic blocks that end in a GIMPLE_SWITCH which we touched
74 due to edge manipulations. */
76 static bitmap touched_switch_bbs;
78 /* CFG statistics. */
79 struct cfg_stats_d
81 long num_merged_labels;
84 static struct cfg_stats_d cfg_stats;
86 /* Nonzero if we found a computed goto while building basic blocks. */
87 static bool found_computed_goto;
89 /* Hash table to store last discriminator assigned for each locus. */
90 struct locus_discrim_map
92 location_t locus;
93 int discriminator;
95 static htab_t discriminator_per_locus;
97 /* Basic blocks and flowgraphs. */
98 static void make_blocks (gimple_seq);
99 static void factor_computed_gotos (void);
101 /* Edges. */
102 static void make_edges (void);
103 static void make_cond_expr_edges (basic_block);
104 static void make_gimple_switch_edges (basic_block);
105 static void make_goto_expr_edges (basic_block);
106 static void make_gimple_asm_edges (basic_block);
107 static unsigned int locus_map_hash (const void *);
108 static int locus_map_eq (const void *, const void *);
109 static void assign_discriminator (location_t, basic_block);
110 static edge gimple_redirect_edge_and_branch (edge, basic_block);
111 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
112 static unsigned int split_critical_edges (void);
114 /* Various helpers. */
115 static inline bool stmt_starts_bb_p (gimple, gimple);
116 static int gimple_verify_flow_info (void);
117 static void gimple_make_forwarder_block (edge);
118 static void gimple_cfg2vcg (FILE *);
119 static gimple first_non_label_stmt (basic_block);
121 /* Flowgraph optimization and cleanup. */
122 static void gimple_merge_blocks (basic_block, basic_block);
123 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
124 static void remove_bb (basic_block);
125 static edge find_taken_edge_computed_goto (basic_block, tree);
126 static edge find_taken_edge_cond_expr (basic_block, tree);
127 static edge find_taken_edge_switch_expr (basic_block, tree);
128 static tree find_case_label_for_value (gimple, tree);
129 static void group_case_labels_stmt (gimple);
131 void
132 init_empty_tree_cfg_for_function (struct function *fn)
134 /* Initialize the basic block array. */
135 init_flow (fn);
136 profile_status_for_function (fn) = PROFILE_ABSENT;
137 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
138 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
139 basic_block_info_for_function (fn)
140 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
141 VEC_safe_grow_cleared (basic_block, gc,
142 basic_block_info_for_function (fn),
143 initial_cfg_capacity);
145 /* Build a mapping of labels to their associated blocks. */
146 label_to_block_map_for_function (fn)
147 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
148 VEC_safe_grow_cleared (basic_block, gc,
149 label_to_block_map_for_function (fn),
150 initial_cfg_capacity);
152 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
153 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
154 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
155 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
157 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
158 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
159 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
160 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
163 void
164 init_empty_tree_cfg (void)
166 init_empty_tree_cfg_for_function (cfun);
169 /*---------------------------------------------------------------------------
170 Create basic blocks
171 ---------------------------------------------------------------------------*/
173 /* Entry point to the CFG builder for trees. SEQ is the sequence of
174 statements to be added to the flowgraph. */
176 static void
177 build_gimple_cfg (gimple_seq seq)
179 /* Register specific gimple functions. */
180 gimple_register_cfg_hooks ();
182 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
184 init_empty_tree_cfg ();
186 found_computed_goto = 0;
187 make_blocks (seq);
189 /* Computed gotos are hell to deal with, especially if there are
190 lots of them with a large number of destinations. So we factor
191 them to a common computed goto location before we build the
192 edge list. After we convert back to normal form, we will un-factor
193 the computed gotos since factoring introduces an unwanted jump. */
194 if (found_computed_goto)
195 factor_computed_gotos ();
197 /* Make sure there is always at least one block, even if it's empty. */
198 if (n_basic_blocks == NUM_FIXED_BLOCKS)
199 create_empty_bb (ENTRY_BLOCK_PTR);
201 /* Adjust the size of the array. */
202 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
203 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
205 /* To speed up statement iterator walks, we first purge dead labels. */
206 cleanup_dead_labels ();
208 /* Group case nodes to reduce the number of edges.
209 We do this after cleaning up dead labels because otherwise we miss
210 a lot of obvious case merging opportunities. */
211 group_case_labels ();
213 /* Create the edges of the flowgraph. */
214 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
215 free);
216 make_edges ();
217 cleanup_dead_labels ();
218 htab_delete (discriminator_per_locus);
220 /* Debugging dumps. */
222 /* Write the flowgraph to a VCG file. */
224 int local_dump_flags;
225 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
226 if (vcg_file)
228 gimple_cfg2vcg (vcg_file);
229 dump_end (TDI_vcg, vcg_file);
234 static unsigned int
235 execute_build_cfg (void)
237 gimple_seq body = gimple_body (current_function_decl);
239 build_gimple_cfg (body);
240 gimple_set_body (current_function_decl, NULL);
241 if (dump_file && (dump_flags & TDF_DETAILS))
243 fprintf (dump_file, "Scope blocks:\n");
244 dump_scope_blocks (dump_file, dump_flags);
246 return 0;
249 struct gimple_opt_pass pass_build_cfg =
252 GIMPLE_PASS,
253 "cfg", /* name */
254 NULL, /* gate */
255 execute_build_cfg, /* execute */
256 NULL, /* sub */
257 NULL, /* next */
258 0, /* static_pass_number */
259 TV_TREE_CFG, /* tv_id */
260 PROP_gimple_leh, /* properties_required */
261 PROP_cfg, /* properties_provided */
262 0, /* properties_destroyed */
263 0, /* todo_flags_start */
264 TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
269 /* Return true if T is a computed goto. */
271 static bool
272 computed_goto_p (gimple t)
274 return (gimple_code (t) == GIMPLE_GOTO
275 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
279 /* Search the CFG for any computed gotos. If found, factor them to a
280 common computed goto site. Also record the location of that site so
281 that we can un-factor the gotos after we have converted back to
282 normal form. */
284 static void
285 factor_computed_gotos (void)
287 basic_block bb;
288 tree factored_label_decl = NULL;
289 tree var = NULL;
290 gimple factored_computed_goto_label = NULL;
291 gimple factored_computed_goto = NULL;
293 /* We know there are one or more computed gotos in this function.
294 Examine the last statement in each basic block to see if the block
295 ends with a computed goto. */
297 FOR_EACH_BB (bb)
299 gimple_stmt_iterator gsi = gsi_last_bb (bb);
300 gimple last;
302 if (gsi_end_p (gsi))
303 continue;
305 last = gsi_stmt (gsi);
307 /* Ignore the computed goto we create when we factor the original
308 computed gotos. */
309 if (last == factored_computed_goto)
310 continue;
312 /* If the last statement is a computed goto, factor it. */
313 if (computed_goto_p (last))
315 gimple assignment;
317 /* The first time we find a computed goto we need to create
318 the factored goto block and the variable each original
319 computed goto will use for their goto destination. */
320 if (!factored_computed_goto)
322 basic_block new_bb = create_empty_bb (bb);
323 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
325 /* Create the destination of the factored goto. Each original
326 computed goto will put its desired destination into this
327 variable and jump to the label we create immediately
328 below. */
329 var = create_tmp_var (ptr_type_node, "gotovar");
331 /* Build a label for the new block which will contain the
332 factored computed goto. */
333 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
334 factored_computed_goto_label
335 = gimple_build_label (factored_label_decl);
336 gsi_insert_after (&new_gsi, factored_computed_goto_label,
337 GSI_NEW_STMT);
339 /* Build our new computed goto. */
340 factored_computed_goto = gimple_build_goto (var);
341 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
344 /* Copy the original computed goto's destination into VAR. */
345 assignment = gimple_build_assign (var, gimple_goto_dest (last));
346 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
348 /* And re-vector the computed goto to the new destination. */
349 gimple_goto_set_dest (last, factored_label_decl);
355 /* Build a flowgraph for the sequence of stmts SEQ. */
357 static void
358 make_blocks (gimple_seq seq)
360 gimple_stmt_iterator i = gsi_start (seq);
361 gimple stmt = NULL;
362 bool start_new_block = true;
363 bool first_stmt_of_seq = true;
364 basic_block bb = ENTRY_BLOCK_PTR;
366 while (!gsi_end_p (i))
368 gimple prev_stmt;
370 prev_stmt = stmt;
371 stmt = gsi_stmt (i);
373 /* If the statement starts a new basic block or if we have determined
374 in a previous pass that we need to create a new block for STMT, do
375 so now. */
376 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
378 if (!first_stmt_of_seq)
379 seq = gsi_split_seq_before (&i);
380 bb = create_basic_block (seq, NULL, bb);
381 start_new_block = false;
384 /* Now add STMT to BB and create the subgraphs for special statement
385 codes. */
386 gimple_set_bb (stmt, bb);
388 if (computed_goto_p (stmt))
389 found_computed_goto = true;
391 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
392 next iteration. */
393 if (stmt_ends_bb_p (stmt))
395 /* If the stmt can make abnormal goto use a new temporary
396 for the assignment to the LHS. This makes sure the old value
397 of the LHS is available on the abnormal edge. Otherwise
398 we will end up with overlapping life-ranges for abnormal
399 SSA names. */
400 if (gimple_has_lhs (stmt)
401 && stmt_can_make_abnormal_goto (stmt)
402 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
404 tree lhs = gimple_get_lhs (stmt);
405 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
406 gimple s = gimple_build_assign (lhs, tmp);
407 gimple_set_location (s, gimple_location (stmt));
408 gimple_set_block (s, gimple_block (stmt));
409 gimple_set_lhs (stmt, tmp);
410 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
411 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
412 DECL_GIMPLE_REG_P (tmp) = 1;
413 gsi_insert_after (&i, s, GSI_SAME_STMT);
415 start_new_block = true;
418 gsi_next (&i);
419 first_stmt_of_seq = false;
424 /* Create and return a new empty basic block after bb AFTER. */
426 static basic_block
427 create_bb (void *h, void *e, basic_block after)
429 basic_block bb;
431 gcc_assert (!e);
433 /* Create and initialize a new basic block. Since alloc_block uses
434 GC allocation that clears memory to allocate a basic block, we do
435 not have to clear the newly allocated basic block here. */
436 bb = alloc_block ();
438 bb->index = last_basic_block;
439 bb->flags = BB_NEW;
440 bb->il.gimple = ggc_alloc_cleared_gimple_bb_info ();
441 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
443 /* Add the new block to the linked list of blocks. */
444 link_block (bb, after);
446 /* Grow the basic block array if needed. */
447 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
449 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
450 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
453 /* Add the newly created block to the array. */
454 SET_BASIC_BLOCK (last_basic_block, bb);
456 n_basic_blocks++;
457 last_basic_block++;
459 return bb;
463 /*---------------------------------------------------------------------------
464 Edge creation
465 ---------------------------------------------------------------------------*/
467 /* Fold COND_EXPR_COND of each COND_EXPR. */
469 void
470 fold_cond_expr_cond (void)
472 basic_block bb;
474 FOR_EACH_BB (bb)
476 gimple stmt = last_stmt (bb);
478 if (stmt && gimple_code (stmt) == GIMPLE_COND)
480 location_t loc = gimple_location (stmt);
481 tree cond;
482 bool zerop, onep;
484 fold_defer_overflow_warnings ();
485 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
486 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
487 if (cond)
489 zerop = integer_zerop (cond);
490 onep = integer_onep (cond);
492 else
493 zerop = onep = false;
495 fold_undefer_overflow_warnings (zerop || onep,
496 stmt,
497 WARN_STRICT_OVERFLOW_CONDITIONAL);
498 if (zerop)
499 gimple_cond_make_false (stmt);
500 else if (onep)
501 gimple_cond_make_true (stmt);
506 /* Join all the blocks in the flowgraph. */
508 static void
509 make_edges (void)
511 basic_block bb;
512 struct omp_region *cur_region = NULL;
514 /* Create an edge from entry to the first block with executable
515 statements in it. */
516 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
518 /* Traverse the basic block array placing edges. */
519 FOR_EACH_BB (bb)
521 gimple last = last_stmt (bb);
522 bool fallthru;
524 if (last)
526 enum gimple_code code = gimple_code (last);
527 switch (code)
529 case GIMPLE_GOTO:
530 make_goto_expr_edges (bb);
531 fallthru = false;
532 break;
533 case GIMPLE_RETURN:
534 make_edge (bb, EXIT_BLOCK_PTR, 0);
535 fallthru = false;
536 break;
537 case GIMPLE_COND:
538 make_cond_expr_edges (bb);
539 fallthru = false;
540 break;
541 case GIMPLE_SWITCH:
542 make_gimple_switch_edges (bb);
543 fallthru = false;
544 break;
545 case GIMPLE_RESX:
546 make_eh_edges (last);
547 fallthru = false;
548 break;
549 case GIMPLE_EH_DISPATCH:
550 fallthru = make_eh_dispatch_edges (last);
551 break;
553 case GIMPLE_CALL:
554 /* If this function receives a nonlocal goto, then we need to
555 make edges from this call site to all the nonlocal goto
556 handlers. */
557 if (stmt_can_make_abnormal_goto (last))
558 make_abnormal_goto_edges (bb, true);
560 /* If this statement has reachable exception handlers, then
561 create abnormal edges to them. */
562 make_eh_edges (last);
564 /* BUILTIN_RETURN is really a return statement. */
565 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
566 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
567 /* Some calls are known not to return. */
568 else
569 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
570 break;
572 case GIMPLE_ASSIGN:
573 /* A GIMPLE_ASSIGN may throw internally and thus be considered
574 control-altering. */
575 if (is_ctrl_altering_stmt (last))
576 make_eh_edges (last);
577 fallthru = true;
578 break;
580 case GIMPLE_ASM:
581 make_gimple_asm_edges (bb);
582 fallthru = true;
583 break;
585 case GIMPLE_OMP_PARALLEL:
586 case GIMPLE_OMP_TASK:
587 case GIMPLE_OMP_FOR:
588 case GIMPLE_OMP_SINGLE:
589 case GIMPLE_OMP_MASTER:
590 case GIMPLE_OMP_ORDERED:
591 case GIMPLE_OMP_CRITICAL:
592 case GIMPLE_OMP_SECTION:
593 cur_region = new_omp_region (bb, code, cur_region);
594 fallthru = true;
595 break;
597 case GIMPLE_OMP_SECTIONS:
598 cur_region = new_omp_region (bb, code, cur_region);
599 fallthru = true;
600 break;
602 case GIMPLE_OMP_SECTIONS_SWITCH:
603 fallthru = false;
604 break;
606 case GIMPLE_OMP_ATOMIC_LOAD:
607 case GIMPLE_OMP_ATOMIC_STORE:
608 fallthru = true;
609 break;
611 case GIMPLE_OMP_RETURN:
612 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
613 somewhere other than the next block. This will be
614 created later. */
615 cur_region->exit = bb;
616 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
617 cur_region = cur_region->outer;
618 break;
620 case GIMPLE_OMP_CONTINUE:
621 cur_region->cont = bb;
622 switch (cur_region->type)
624 case GIMPLE_OMP_FOR:
625 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
626 succs edges as abnormal to prevent splitting
627 them. */
628 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
629 /* Make the loopback edge. */
630 make_edge (bb, single_succ (cur_region->entry),
631 EDGE_ABNORMAL);
633 /* Create an edge from GIMPLE_OMP_FOR to exit, which
634 corresponds to the case that the body of the loop
635 is not executed at all. */
636 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
637 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
638 fallthru = false;
639 break;
641 case GIMPLE_OMP_SECTIONS:
642 /* Wire up the edges into and out of the nested sections. */
644 basic_block switch_bb = single_succ (cur_region->entry);
646 struct omp_region *i;
647 for (i = cur_region->inner; i ; i = i->next)
649 gcc_assert (i->type == GIMPLE_OMP_SECTION);
650 make_edge (switch_bb, i->entry, 0);
651 make_edge (i->exit, bb, EDGE_FALLTHRU);
654 /* Make the loopback edge to the block with
655 GIMPLE_OMP_SECTIONS_SWITCH. */
656 make_edge (bb, switch_bb, 0);
658 /* Make the edge from the switch to exit. */
659 make_edge (switch_bb, bb->next_bb, 0);
660 fallthru = false;
662 break;
664 default:
665 gcc_unreachable ();
667 break;
669 default:
670 gcc_assert (!stmt_ends_bb_p (last));
671 fallthru = true;
674 else
675 fallthru = true;
677 if (fallthru)
679 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
680 if (last)
681 assign_discriminator (gimple_location (last), bb->next_bb);
685 if (root_omp_region)
686 free_omp_regions ();
688 /* Fold COND_EXPR_COND of each COND_EXPR. */
689 fold_cond_expr_cond ();
692 /* Trivial hash function for a location_t. ITEM is a pointer to
693 a hash table entry that maps a location_t to a discriminator. */
695 static unsigned int
696 locus_map_hash (const void *item)
698 return ((const struct locus_discrim_map *) item)->locus;
701 /* Equality function for the locus-to-discriminator map. VA and VB
702 point to the two hash table entries to compare. */
704 static int
705 locus_map_eq (const void *va, const void *vb)
707 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
708 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
709 return a->locus == b->locus;
712 /* Find the next available discriminator value for LOCUS. The
713 discriminator distinguishes among several basic blocks that
714 share a common locus, allowing for more accurate sample-based
715 profiling. */
717 static int
718 next_discriminator_for_locus (location_t locus)
720 struct locus_discrim_map item;
721 struct locus_discrim_map **slot;
723 item.locus = locus;
724 item.discriminator = 0;
725 slot = (struct locus_discrim_map **)
726 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
727 (hashval_t) locus, INSERT);
728 gcc_assert (slot);
729 if (*slot == HTAB_EMPTY_ENTRY)
731 *slot = XNEW (struct locus_discrim_map);
732 gcc_assert (*slot);
733 (*slot)->locus = locus;
734 (*slot)->discriminator = 0;
736 (*slot)->discriminator++;
737 return (*slot)->discriminator;
740 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
742 static bool
743 same_line_p (location_t locus1, location_t locus2)
745 expanded_location from, to;
747 if (locus1 == locus2)
748 return true;
750 from = expand_location (locus1);
751 to = expand_location (locus2);
753 if (from.line != to.line)
754 return false;
755 if (from.file == to.file)
756 return true;
757 return (from.file != NULL
758 && to.file != NULL
759 && filename_cmp (from.file, to.file) == 0);
762 /* Assign a unique discriminator value to block BB if it begins at the same
763 LOCUS as its predecessor block. */
765 static void
766 assign_discriminator (location_t locus, basic_block bb)
768 gimple first_in_to_bb, last_in_to_bb;
770 if (locus == 0 || bb->discriminator != 0)
771 return;
773 first_in_to_bb = first_non_label_stmt (bb);
774 last_in_to_bb = last_stmt (bb);
775 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
776 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
777 bb->discriminator = next_discriminator_for_locus (locus);
780 /* Create the edges for a GIMPLE_COND starting at block BB. */
782 static void
783 make_cond_expr_edges (basic_block bb)
785 gimple entry = last_stmt (bb);
786 gimple then_stmt, else_stmt;
787 basic_block then_bb, else_bb;
788 tree then_label, else_label;
789 edge e;
790 location_t entry_locus;
792 gcc_assert (entry);
793 gcc_assert (gimple_code (entry) == GIMPLE_COND);
795 entry_locus = gimple_location (entry);
797 /* Entry basic blocks for each component. */
798 then_label = gimple_cond_true_label (entry);
799 else_label = gimple_cond_false_label (entry);
800 then_bb = label_to_block (then_label);
801 else_bb = label_to_block (else_label);
802 then_stmt = first_stmt (then_bb);
803 else_stmt = first_stmt (else_bb);
805 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
806 assign_discriminator (entry_locus, then_bb);
807 e->goto_locus = gimple_location (then_stmt);
808 if (e->goto_locus)
809 e->goto_block = gimple_block (then_stmt);
810 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
811 if (e)
813 assign_discriminator (entry_locus, else_bb);
814 e->goto_locus = gimple_location (else_stmt);
815 if (e->goto_locus)
816 e->goto_block = gimple_block (else_stmt);
819 /* We do not need the labels anymore. */
820 gimple_cond_set_true_label (entry, NULL_TREE);
821 gimple_cond_set_false_label (entry, NULL_TREE);
825 /* Called for each element in the hash table (P) as we delete the
826 edge to cases hash table.
828 Clear all the TREE_CHAINs to prevent problems with copying of
829 SWITCH_EXPRs and structure sharing rules, then free the hash table
830 element. */
832 static bool
833 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
834 void *data ATTRIBUTE_UNUSED)
836 tree t, next;
838 for (t = (tree) *value; t; t = next)
840 next = CASE_CHAIN (t);
841 CASE_CHAIN (t) = NULL;
844 *value = NULL;
845 return true;
848 /* Start recording information mapping edges to case labels. */
850 void
851 start_recording_case_labels (void)
853 gcc_assert (edge_to_cases == NULL);
854 edge_to_cases = pointer_map_create ();
855 touched_switch_bbs = BITMAP_ALLOC (NULL);
858 /* Return nonzero if we are recording information for case labels. */
860 static bool
861 recording_case_labels_p (void)
863 return (edge_to_cases != NULL);
866 /* Stop recording information mapping edges to case labels and
867 remove any information we have recorded. */
868 void
869 end_recording_case_labels (void)
871 bitmap_iterator bi;
872 unsigned i;
873 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
874 pointer_map_destroy (edge_to_cases);
875 edge_to_cases = NULL;
876 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
878 basic_block bb = BASIC_BLOCK (i);
879 if (bb)
881 gimple stmt = last_stmt (bb);
882 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
883 group_case_labels_stmt (stmt);
886 BITMAP_FREE (touched_switch_bbs);
889 /* If we are inside a {start,end}_recording_cases block, then return
890 a chain of CASE_LABEL_EXPRs from T which reference E.
892 Otherwise return NULL. */
894 static tree
895 get_cases_for_edge (edge e, gimple t)
897 void **slot;
898 size_t i, n;
900 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
901 chains available. Return NULL so the caller can detect this case. */
902 if (!recording_case_labels_p ())
903 return NULL;
905 slot = pointer_map_contains (edge_to_cases, e);
906 if (slot)
907 return (tree) *slot;
909 /* If we did not find E in the hash table, then this must be the first
910 time we have been queried for information about E & T. Add all the
911 elements from T to the hash table then perform the query again. */
913 n = gimple_switch_num_labels (t);
914 for (i = 0; i < n; i++)
916 tree elt = gimple_switch_label (t, i);
917 tree lab = CASE_LABEL (elt);
918 basic_block label_bb = label_to_block (lab);
919 edge this_edge = find_edge (e->src, label_bb);
921 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
922 a new chain. */
923 slot = pointer_map_insert (edge_to_cases, this_edge);
924 CASE_CHAIN (elt) = (tree) *slot;
925 *slot = elt;
928 return (tree) *pointer_map_contains (edge_to_cases, e);
931 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
933 static void
934 make_gimple_switch_edges (basic_block bb)
936 gimple entry = last_stmt (bb);
937 location_t entry_locus;
938 size_t i, n;
940 entry_locus = gimple_location (entry);
942 n = gimple_switch_num_labels (entry);
944 for (i = 0; i < n; ++i)
946 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
947 basic_block label_bb = label_to_block (lab);
948 make_edge (bb, label_bb, 0);
949 assign_discriminator (entry_locus, label_bb);
954 /* Return the basic block holding label DEST. */
956 basic_block
957 label_to_block_fn (struct function *ifun, tree dest)
959 int uid = LABEL_DECL_UID (dest);
961 /* We would die hard when faced by an undefined label. Emit a label to
962 the very first basic block. This will hopefully make even the dataflow
963 and undefined variable warnings quite right. */
964 if (seen_error () && uid < 0)
966 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
967 gimple stmt;
969 stmt = gimple_build_label (dest);
970 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
971 uid = LABEL_DECL_UID (dest);
973 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
974 <= (unsigned int) uid)
975 return NULL;
976 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
979 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
980 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
982 void
983 make_abnormal_goto_edges (basic_block bb, bool for_call)
985 basic_block target_bb;
986 gimple_stmt_iterator gsi;
988 FOR_EACH_BB (target_bb)
989 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
991 gimple label_stmt = gsi_stmt (gsi);
992 tree target;
994 if (gimple_code (label_stmt) != GIMPLE_LABEL)
995 break;
997 target = gimple_label_label (label_stmt);
999 /* Make an edge to every label block that has been marked as a
1000 potential target for a computed goto or a non-local goto. */
1001 if ((FORCED_LABEL (target) && !for_call)
1002 || (DECL_NONLOCAL (target) && for_call))
1004 make_edge (bb, target_bb, EDGE_ABNORMAL);
1005 break;
1010 /* Create edges for a goto statement at block BB. */
1012 static void
1013 make_goto_expr_edges (basic_block bb)
1015 gimple_stmt_iterator last = gsi_last_bb (bb);
1016 gimple goto_t = gsi_stmt (last);
1018 /* A simple GOTO creates normal edges. */
1019 if (simple_goto_p (goto_t))
1021 tree dest = gimple_goto_dest (goto_t);
1022 basic_block label_bb = label_to_block (dest);
1023 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1024 e->goto_locus = gimple_location (goto_t);
1025 assign_discriminator (e->goto_locus, label_bb);
1026 if (e->goto_locus)
1027 e->goto_block = gimple_block (goto_t);
1028 gsi_remove (&last, true);
1029 return;
1032 /* A computed GOTO creates abnormal edges. */
1033 make_abnormal_goto_edges (bb, false);
1036 /* Create edges for an asm statement with labels at block BB. */
1038 static void
1039 make_gimple_asm_edges (basic_block bb)
1041 gimple stmt = last_stmt (bb);
1042 location_t stmt_loc = gimple_location (stmt);
1043 int i, n = gimple_asm_nlabels (stmt);
1045 for (i = 0; i < n; ++i)
1047 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1048 basic_block label_bb = label_to_block (label);
1049 make_edge (bb, label_bb, 0);
1050 assign_discriminator (stmt_loc, label_bb);
1054 /*---------------------------------------------------------------------------
1055 Flowgraph analysis
1056 ---------------------------------------------------------------------------*/
1058 /* Cleanup useless labels in basic blocks. This is something we wish
1059 to do early because it allows us to group case labels before creating
1060 the edges for the CFG, and it speeds up block statement iterators in
1061 all passes later on.
1062 We rerun this pass after CFG is created, to get rid of the labels that
1063 are no longer referenced. After then we do not run it any more, since
1064 (almost) no new labels should be created. */
1066 /* A map from basic block index to the leading label of that block. */
1067 static struct label_record
1069 /* The label. */
1070 tree label;
1072 /* True if the label is referenced from somewhere. */
1073 bool used;
1074 } *label_for_bb;
1076 /* Given LABEL return the first label in the same basic block. */
1078 static tree
1079 main_block_label (tree label)
1081 basic_block bb = label_to_block (label);
1082 tree main_label = label_for_bb[bb->index].label;
1084 /* label_to_block possibly inserted undefined label into the chain. */
1085 if (!main_label)
1087 label_for_bb[bb->index].label = label;
1088 main_label = label;
1091 label_for_bb[bb->index].used = true;
1092 return main_label;
1095 /* Clean up redundant labels within the exception tree. */
1097 static void
1098 cleanup_dead_labels_eh (void)
1100 eh_landing_pad lp;
1101 eh_region r;
1102 tree lab;
1103 int i;
1105 if (cfun->eh == NULL)
1106 return;
1108 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1109 if (lp && lp->post_landing_pad)
1111 lab = main_block_label (lp->post_landing_pad);
1112 if (lab != lp->post_landing_pad)
1114 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1115 EH_LANDING_PAD_NR (lab) = lp->index;
1119 FOR_ALL_EH_REGION (r)
1120 switch (r->type)
1122 case ERT_CLEANUP:
1123 case ERT_MUST_NOT_THROW:
1124 break;
1126 case ERT_TRY:
1128 eh_catch c;
1129 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1131 lab = c->label;
1132 if (lab)
1133 c->label = main_block_label (lab);
1136 break;
1138 case ERT_ALLOWED_EXCEPTIONS:
1139 lab = r->u.allowed.label;
1140 if (lab)
1141 r->u.allowed.label = main_block_label (lab);
1142 break;
1147 /* Cleanup redundant labels. This is a three-step process:
1148 1) Find the leading label for each block.
1149 2) Redirect all references to labels to the leading labels.
1150 3) Cleanup all useless labels. */
1152 void
1153 cleanup_dead_labels (void)
1155 basic_block bb;
1156 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1158 /* Find a suitable label for each block. We use the first user-defined
1159 label if there is one, or otherwise just the first label we see. */
1160 FOR_EACH_BB (bb)
1162 gimple_stmt_iterator i;
1164 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1166 tree label;
1167 gimple stmt = gsi_stmt (i);
1169 if (gimple_code (stmt) != GIMPLE_LABEL)
1170 break;
1172 label = gimple_label_label (stmt);
1174 /* If we have not yet seen a label for the current block,
1175 remember this one and see if there are more labels. */
1176 if (!label_for_bb[bb->index].label)
1178 label_for_bb[bb->index].label = label;
1179 continue;
1182 /* If we did see a label for the current block already, but it
1183 is an artificially created label, replace it if the current
1184 label is a user defined label. */
1185 if (!DECL_ARTIFICIAL (label)
1186 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1188 label_for_bb[bb->index].label = label;
1189 break;
1194 /* Now redirect all jumps/branches to the selected label.
1195 First do so for each block ending in a control statement. */
1196 FOR_EACH_BB (bb)
1198 gimple stmt = last_stmt (bb);
1199 if (!stmt)
1200 continue;
1202 switch (gimple_code (stmt))
1204 case GIMPLE_COND:
1206 tree true_label = gimple_cond_true_label (stmt);
1207 tree false_label = gimple_cond_false_label (stmt);
1209 if (true_label)
1210 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1211 if (false_label)
1212 gimple_cond_set_false_label (stmt, main_block_label (false_label));
1213 break;
1216 case GIMPLE_SWITCH:
1218 size_t i, n = gimple_switch_num_labels (stmt);
1220 /* Replace all destination labels. */
1221 for (i = 0; i < n; ++i)
1223 tree case_label = gimple_switch_label (stmt, i);
1224 tree label = main_block_label (CASE_LABEL (case_label));
1225 CASE_LABEL (case_label) = label;
1227 break;
1230 case GIMPLE_ASM:
1232 int i, n = gimple_asm_nlabels (stmt);
1234 for (i = 0; i < n; ++i)
1236 tree cons = gimple_asm_label_op (stmt, i);
1237 tree label = main_block_label (TREE_VALUE (cons));
1238 TREE_VALUE (cons) = label;
1240 break;
1243 /* We have to handle gotos until they're removed, and we don't
1244 remove them until after we've created the CFG edges. */
1245 case GIMPLE_GOTO:
1246 if (!computed_goto_p (stmt))
1248 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1249 gimple_goto_set_dest (stmt, new_dest);
1251 break;
1253 default:
1254 break;
1258 /* Do the same for the exception region tree labels. */
1259 cleanup_dead_labels_eh ();
1261 /* Finally, purge dead labels. All user-defined labels and labels that
1262 can be the target of non-local gotos and labels which have their
1263 address taken are preserved. */
1264 FOR_EACH_BB (bb)
1266 gimple_stmt_iterator i;
1267 tree label_for_this_bb = label_for_bb[bb->index].label;
1269 if (!label_for_this_bb)
1270 continue;
1272 /* If the main label of the block is unused, we may still remove it. */
1273 if (!label_for_bb[bb->index].used)
1274 label_for_this_bb = NULL;
1276 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1278 tree label;
1279 gimple stmt = gsi_stmt (i);
1281 if (gimple_code (stmt) != GIMPLE_LABEL)
1282 break;
1284 label = gimple_label_label (stmt);
1286 if (label == label_for_this_bb
1287 || !DECL_ARTIFICIAL (label)
1288 || DECL_NONLOCAL (label)
1289 || FORCED_LABEL (label))
1290 gsi_next (&i);
1291 else
1292 gsi_remove (&i, true);
1296 free (label_for_bb);
1299 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1300 the ones jumping to the same label.
1301 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1303 static void
1304 group_case_labels_stmt (gimple stmt)
1306 int old_size = gimple_switch_num_labels (stmt);
1307 int i, j, new_size = old_size;
1308 tree default_case = NULL_TREE;
1309 tree default_label = NULL_TREE;
1310 bool has_default;
1312 /* The default label is always the first case in a switch
1313 statement after gimplification if it was not optimized
1314 away */
1315 if (!CASE_LOW (gimple_switch_default_label (stmt))
1316 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1318 default_case = gimple_switch_default_label (stmt);
1319 default_label = CASE_LABEL (default_case);
1320 has_default = true;
1322 else
1323 has_default = false;
1325 /* Look for possible opportunities to merge cases. */
1326 if (has_default)
1327 i = 1;
1328 else
1329 i = 0;
1330 while (i < old_size)
1332 tree base_case, base_label, base_high;
1333 base_case = gimple_switch_label (stmt, i);
1335 gcc_assert (base_case);
1336 base_label = CASE_LABEL (base_case);
1338 /* Discard cases that have the same destination as the
1339 default case. */
1340 if (base_label == default_label)
1342 gimple_switch_set_label (stmt, i, NULL_TREE);
1343 i++;
1344 new_size--;
1345 continue;
1348 base_high = CASE_HIGH (base_case)
1349 ? CASE_HIGH (base_case)
1350 : CASE_LOW (base_case);
1351 i++;
1353 /* Try to merge case labels. Break out when we reach the end
1354 of the label vector or when we cannot merge the next case
1355 label with the current one. */
1356 while (i < old_size)
1358 tree merge_case = gimple_switch_label (stmt, i);
1359 tree merge_label = CASE_LABEL (merge_case);
1360 double_int bhp1 = double_int_add (tree_to_double_int (base_high),
1361 double_int_one);
1363 /* Merge the cases if they jump to the same place,
1364 and their ranges are consecutive. */
1365 if (merge_label == base_label
1366 && double_int_equal_p (tree_to_double_int (CASE_LOW (merge_case)),
1367 bhp1))
1369 base_high = CASE_HIGH (merge_case) ?
1370 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1371 CASE_HIGH (base_case) = base_high;
1372 gimple_switch_set_label (stmt, i, NULL_TREE);
1373 new_size--;
1374 i++;
1376 else
1377 break;
1381 /* Compress the case labels in the label vector, and adjust the
1382 length of the vector. */
1383 for (i = 0, j = 0; i < new_size; i++)
1385 while (! gimple_switch_label (stmt, j))
1386 j++;
1387 gimple_switch_set_label (stmt, i,
1388 gimple_switch_label (stmt, j++));
1391 gcc_assert (new_size <= old_size);
1392 gimple_switch_set_num_labels (stmt, new_size);
1395 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1396 and scan the sorted vector of cases. Combine the ones jumping to the
1397 same label. */
1399 void
1400 group_case_labels (void)
1402 basic_block bb;
1404 FOR_EACH_BB (bb)
1406 gimple stmt = last_stmt (bb);
1407 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1408 group_case_labels_stmt (stmt);
1412 /* Checks whether we can merge block B into block A. */
1414 static bool
1415 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1417 gimple stmt;
1418 gimple_stmt_iterator gsi;
1419 gimple_seq phis;
1421 if (!single_succ_p (a))
1422 return false;
1424 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
1425 return false;
1427 if (single_succ (a) != b)
1428 return false;
1430 if (!single_pred_p (b))
1431 return false;
1433 if (b == EXIT_BLOCK_PTR)
1434 return false;
1436 /* If A ends by a statement causing exceptions or something similar, we
1437 cannot merge the blocks. */
1438 stmt = last_stmt (a);
1439 if (stmt && stmt_ends_bb_p (stmt))
1440 return false;
1442 /* Do not allow a block with only a non-local label to be merged. */
1443 if (stmt
1444 && gimple_code (stmt) == GIMPLE_LABEL
1445 && DECL_NONLOCAL (gimple_label_label (stmt)))
1446 return false;
1448 /* Examine the labels at the beginning of B. */
1449 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1451 tree lab;
1452 stmt = gsi_stmt (gsi);
1453 if (gimple_code (stmt) != GIMPLE_LABEL)
1454 break;
1455 lab = gimple_label_label (stmt);
1457 /* Do not remove user labels. */
1458 if (!DECL_ARTIFICIAL (lab))
1459 return false;
1462 /* Protect the loop latches. */
1463 if (current_loops && b->loop_father->latch == b)
1464 return false;
1466 /* It must be possible to eliminate all phi nodes in B. If ssa form
1467 is not up-to-date and a name-mapping is registered, we cannot eliminate
1468 any phis. Symbols marked for renaming are never a problem though. */
1469 phis = phi_nodes (b);
1470 if (!gimple_seq_empty_p (phis)
1471 && name_mappings_registered_p ())
1472 return false;
1474 /* When not optimizing, don't merge if we'd lose goto_locus. */
1475 if (!optimize
1476 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1478 location_t goto_locus = single_succ_edge (a)->goto_locus;
1479 gimple_stmt_iterator prev, next;
1480 prev = gsi_last_nondebug_bb (a);
1481 next = gsi_after_labels (b);
1482 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1483 gsi_next_nondebug (&next);
1484 if ((gsi_end_p (prev)
1485 || gimple_location (gsi_stmt (prev)) != goto_locus)
1486 && (gsi_end_p (next)
1487 || gimple_location (gsi_stmt (next)) != goto_locus))
1488 return false;
1491 return true;
1494 /* Return true if the var whose chain of uses starts at PTR has no
1495 nondebug uses. */
1496 bool
1497 has_zero_uses_1 (const ssa_use_operand_t *head)
1499 const ssa_use_operand_t *ptr;
1501 for (ptr = head->next; ptr != head; ptr = ptr->next)
1502 if (!is_gimple_debug (USE_STMT (ptr)))
1503 return false;
1505 return true;
1508 /* Return true if the var whose chain of uses starts at PTR has a
1509 single nondebug use. Set USE_P and STMT to that single nondebug
1510 use, if so, or to NULL otherwise. */
1511 bool
1512 single_imm_use_1 (const ssa_use_operand_t *head,
1513 use_operand_p *use_p, gimple *stmt)
1515 ssa_use_operand_t *ptr, *single_use = 0;
1517 for (ptr = head->next; ptr != head; ptr = ptr->next)
1518 if (!is_gimple_debug (USE_STMT (ptr)))
1520 if (single_use)
1522 single_use = NULL;
1523 break;
1525 single_use = ptr;
1528 if (use_p)
1529 *use_p = single_use;
1531 if (stmt)
1532 *stmt = single_use ? single_use->loc.stmt : NULL;
1534 return !!single_use;
1537 /* Replaces all uses of NAME by VAL. */
1539 void
1540 replace_uses_by (tree name, tree val)
1542 imm_use_iterator imm_iter;
1543 use_operand_p use;
1544 gimple stmt;
1545 edge e;
1547 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1549 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1551 replace_exp (use, val);
1553 if (gimple_code (stmt) == GIMPLE_PHI)
1555 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1556 if (e->flags & EDGE_ABNORMAL)
1558 /* This can only occur for virtual operands, since
1559 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1560 would prevent replacement. */
1561 gcc_assert (!is_gimple_reg (name));
1562 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1567 if (gimple_code (stmt) != GIMPLE_PHI)
1569 size_t i;
1571 fold_stmt_inplace (stmt);
1572 if (cfgcleanup_altered_bbs && !is_gimple_debug (stmt))
1573 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1575 /* FIXME. This should go in update_stmt. */
1576 for (i = 0; i < gimple_num_ops (stmt); i++)
1578 tree op = gimple_op (stmt, i);
1579 /* Operands may be empty here. For example, the labels
1580 of a GIMPLE_COND are nulled out following the creation
1581 of the corresponding CFG edges. */
1582 if (op && TREE_CODE (op) == ADDR_EXPR)
1583 recompute_tree_invariant_for_addr_expr (op);
1586 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1587 update_stmt (stmt);
1591 gcc_assert (has_zero_uses (name));
1593 /* Also update the trees stored in loop structures. */
1594 if (current_loops)
1596 struct loop *loop;
1597 loop_iterator li;
1599 FOR_EACH_LOOP (li, loop, 0)
1601 substitute_in_loop_info (loop, name, val);
1606 /* Merge block B into block A. */
1608 static void
1609 gimple_merge_blocks (basic_block a, basic_block b)
1611 gimple_stmt_iterator last, gsi, psi;
1612 gimple_seq phis = phi_nodes (b);
1614 if (dump_file)
1615 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1617 /* Remove all single-valued PHI nodes from block B of the form
1618 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1619 gsi = gsi_last_bb (a);
1620 for (psi = gsi_start (phis); !gsi_end_p (psi); )
1622 gimple phi = gsi_stmt (psi);
1623 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1624 gimple copy;
1625 bool may_replace_uses = !is_gimple_reg (def)
1626 || may_propagate_copy (def, use);
1628 /* In case we maintain loop closed ssa form, do not propagate arguments
1629 of loop exit phi nodes. */
1630 if (current_loops
1631 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1632 && is_gimple_reg (def)
1633 && TREE_CODE (use) == SSA_NAME
1634 && a->loop_father != b->loop_father)
1635 may_replace_uses = false;
1637 if (!may_replace_uses)
1639 gcc_assert (is_gimple_reg (def));
1641 /* Note that just emitting the copies is fine -- there is no problem
1642 with ordering of phi nodes. This is because A is the single
1643 predecessor of B, therefore results of the phi nodes cannot
1644 appear as arguments of the phi nodes. */
1645 copy = gimple_build_assign (def, use);
1646 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1647 remove_phi_node (&psi, false);
1649 else
1651 /* If we deal with a PHI for virtual operands, we can simply
1652 propagate these without fussing with folding or updating
1653 the stmt. */
1654 if (!is_gimple_reg (def))
1656 imm_use_iterator iter;
1657 use_operand_p use_p;
1658 gimple stmt;
1660 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1661 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1662 SET_USE (use_p, use);
1664 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1665 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1667 else
1668 replace_uses_by (def, use);
1670 remove_phi_node (&psi, true);
1674 /* Ensure that B follows A. */
1675 move_block_after (b, a);
1677 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1678 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1680 /* Remove labels from B and set gimple_bb to A for other statements. */
1681 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1683 gimple stmt = gsi_stmt (gsi);
1684 if (gimple_code (stmt) == GIMPLE_LABEL)
1686 tree label = gimple_label_label (stmt);
1687 int lp_nr;
1689 gsi_remove (&gsi, false);
1691 /* Now that we can thread computed gotos, we might have
1692 a situation where we have a forced label in block B
1693 However, the label at the start of block B might still be
1694 used in other ways (think about the runtime checking for
1695 Fortran assigned gotos). So we can not just delete the
1696 label. Instead we move the label to the start of block A. */
1697 if (FORCED_LABEL (label))
1699 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1700 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1703 lp_nr = EH_LANDING_PAD_NR (label);
1704 if (lp_nr)
1706 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1707 lp->post_landing_pad = NULL;
1710 else
1712 gimple_set_bb (stmt, a);
1713 gsi_next (&gsi);
1717 /* Merge the sequences. */
1718 last = gsi_last_bb (a);
1719 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1720 set_bb_seq (b, NULL);
1722 if (cfgcleanup_altered_bbs)
1723 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1727 /* Return the one of two successors of BB that is not reachable by a
1728 complex edge, if there is one. Else, return BB. We use
1729 this in optimizations that use post-dominators for their heuristics,
1730 to catch the cases in C++ where function calls are involved. */
1732 basic_block
1733 single_noncomplex_succ (basic_block bb)
1735 edge e0, e1;
1736 if (EDGE_COUNT (bb->succs) != 2)
1737 return bb;
1739 e0 = EDGE_SUCC (bb, 0);
1740 e1 = EDGE_SUCC (bb, 1);
1741 if (e0->flags & EDGE_COMPLEX)
1742 return e1->dest;
1743 if (e1->flags & EDGE_COMPLEX)
1744 return e0->dest;
1746 return bb;
1749 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1751 void
1752 notice_special_calls (gimple call)
1754 int flags = gimple_call_flags (call);
1756 if (flags & ECF_MAY_BE_ALLOCA)
1757 cfun->calls_alloca = true;
1758 if (flags & ECF_RETURNS_TWICE)
1759 cfun->calls_setjmp = true;
1763 /* Clear flags set by notice_special_calls. Used by dead code removal
1764 to update the flags. */
1766 void
1767 clear_special_calls (void)
1769 cfun->calls_alloca = false;
1770 cfun->calls_setjmp = false;
1773 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1775 static void
1776 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1778 /* Since this block is no longer reachable, we can just delete all
1779 of its PHI nodes. */
1780 remove_phi_nodes (bb);
1782 /* Remove edges to BB's successors. */
1783 while (EDGE_COUNT (bb->succs) > 0)
1784 remove_edge (EDGE_SUCC (bb, 0));
1788 /* Remove statements of basic block BB. */
1790 static void
1791 remove_bb (basic_block bb)
1793 gimple_stmt_iterator i;
1795 if (dump_file)
1797 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1798 if (dump_flags & TDF_DETAILS)
1800 dump_bb (bb, dump_file, 0);
1801 fprintf (dump_file, "\n");
1805 if (current_loops)
1807 struct loop *loop = bb->loop_father;
1809 /* If a loop gets removed, clean up the information associated
1810 with it. */
1811 if (loop->latch == bb
1812 || loop->header == bb)
1813 free_numbers_of_iterations_estimates_loop (loop);
1816 /* Remove all the instructions in the block. */
1817 if (bb_seq (bb) != NULL)
1819 /* Walk backwards so as to get a chance to substitute all
1820 released DEFs into debug stmts. See
1821 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1822 details. */
1823 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1825 gimple stmt = gsi_stmt (i);
1826 if (gimple_code (stmt) == GIMPLE_LABEL
1827 && (FORCED_LABEL (gimple_label_label (stmt))
1828 || DECL_NONLOCAL (gimple_label_label (stmt))))
1830 basic_block new_bb;
1831 gimple_stmt_iterator new_gsi;
1833 /* A non-reachable non-local label may still be referenced.
1834 But it no longer needs to carry the extra semantics of
1835 non-locality. */
1836 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1838 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1839 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1842 new_bb = bb->prev_bb;
1843 new_gsi = gsi_start_bb (new_bb);
1844 gsi_remove (&i, false);
1845 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1847 else
1849 /* Release SSA definitions if we are in SSA. Note that we
1850 may be called when not in SSA. For example,
1851 final_cleanup calls this function via
1852 cleanup_tree_cfg. */
1853 if (gimple_in_ssa_p (cfun))
1854 release_defs (stmt);
1856 gsi_remove (&i, true);
1859 if (gsi_end_p (i))
1860 i = gsi_last_bb (bb);
1861 else
1862 gsi_prev (&i);
1866 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1867 bb->il.gimple = NULL;
1871 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1872 predicate VAL, return the edge that will be taken out of the block.
1873 If VAL does not match a unique edge, NULL is returned. */
1875 edge
1876 find_taken_edge (basic_block bb, tree val)
1878 gimple stmt;
1880 stmt = last_stmt (bb);
1882 gcc_assert (stmt);
1883 gcc_assert (is_ctrl_stmt (stmt));
1885 if (val == NULL)
1886 return NULL;
1888 if (!is_gimple_min_invariant (val))
1889 return NULL;
1891 if (gimple_code (stmt) == GIMPLE_COND)
1892 return find_taken_edge_cond_expr (bb, val);
1894 if (gimple_code (stmt) == GIMPLE_SWITCH)
1895 return find_taken_edge_switch_expr (bb, val);
1897 if (computed_goto_p (stmt))
1899 /* Only optimize if the argument is a label, if the argument is
1900 not a label then we can not construct a proper CFG.
1902 It may be the case that we only need to allow the LABEL_REF to
1903 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1904 appear inside a LABEL_EXPR just to be safe. */
1905 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1906 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1907 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1908 return NULL;
1911 gcc_unreachable ();
1914 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1915 statement, determine which of the outgoing edges will be taken out of the
1916 block. Return NULL if either edge may be taken. */
1918 static edge
1919 find_taken_edge_computed_goto (basic_block bb, tree val)
1921 basic_block dest;
1922 edge e = NULL;
1924 dest = label_to_block (val);
1925 if (dest)
1927 e = find_edge (bb, dest);
1928 gcc_assert (e != NULL);
1931 return e;
1934 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1935 statement, determine which of the two edges will be taken out of the
1936 block. Return NULL if either edge may be taken. */
1938 static edge
1939 find_taken_edge_cond_expr (basic_block bb, tree val)
1941 edge true_edge, false_edge;
1943 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1945 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1946 return (integer_zerop (val) ? false_edge : true_edge);
1949 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1950 statement, determine which edge will be taken out of the block. Return
1951 NULL if any edge may be taken. */
1953 static edge
1954 find_taken_edge_switch_expr (basic_block bb, tree val)
1956 basic_block dest_bb;
1957 edge e;
1958 gimple switch_stmt;
1959 tree taken_case;
1961 switch_stmt = last_stmt (bb);
1962 taken_case = find_case_label_for_value (switch_stmt, val);
1963 dest_bb = label_to_block (CASE_LABEL (taken_case));
1965 e = find_edge (bb, dest_bb);
1966 gcc_assert (e);
1967 return e;
1971 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1972 We can make optimal use here of the fact that the case labels are
1973 sorted: We can do a binary search for a case matching VAL. */
1975 static tree
1976 find_case_label_for_value (gimple switch_stmt, tree val)
1978 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1979 tree default_case = gimple_switch_default_label (switch_stmt);
1981 for (low = 0, high = n; high - low > 1; )
1983 size_t i = (high + low) / 2;
1984 tree t = gimple_switch_label (switch_stmt, i);
1985 int cmp;
1987 /* Cache the result of comparing CASE_LOW and val. */
1988 cmp = tree_int_cst_compare (CASE_LOW (t), val);
1990 if (cmp > 0)
1991 high = i;
1992 else
1993 low = i;
1995 if (CASE_HIGH (t) == NULL)
1997 /* A singe-valued case label. */
1998 if (cmp == 0)
1999 return t;
2001 else
2003 /* A case range. We can only handle integer ranges. */
2004 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2005 return t;
2009 return default_case;
2013 /* Dump a basic block on stderr. */
2015 void
2016 gimple_debug_bb (basic_block bb)
2018 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
2022 /* Dump basic block with index N on stderr. */
2024 basic_block
2025 gimple_debug_bb_n (int n)
2027 gimple_debug_bb (BASIC_BLOCK (n));
2028 return BASIC_BLOCK (n);
2032 /* Dump the CFG on stderr.
2034 FLAGS are the same used by the tree dumping functions
2035 (see TDF_* in tree-pass.h). */
2037 void
2038 gimple_debug_cfg (int flags)
2040 gimple_dump_cfg (stderr, flags);
2044 /* Dump the program showing basic block boundaries on the given FILE.
2046 FLAGS are the same used by the tree dumping functions (see TDF_* in
2047 tree.h). */
2049 void
2050 gimple_dump_cfg (FILE *file, int flags)
2052 if (flags & TDF_DETAILS)
2054 dump_function_header (file, current_function_decl, flags);
2055 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2056 n_basic_blocks, n_edges, last_basic_block);
2058 brief_dump_cfg (file);
2059 fprintf (file, "\n");
2062 if (flags & TDF_STATS)
2063 dump_cfg_stats (file);
2065 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2069 /* Dump CFG statistics on FILE. */
2071 void
2072 dump_cfg_stats (FILE *file)
2074 static long max_num_merged_labels = 0;
2075 unsigned long size, total = 0;
2076 long num_edges;
2077 basic_block bb;
2078 const char * const fmt_str = "%-30s%-13s%12s\n";
2079 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2080 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2081 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2082 const char *funcname
2083 = lang_hooks.decl_printable_name (current_function_decl, 2);
2086 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2088 fprintf (file, "---------------------------------------------------------\n");
2089 fprintf (file, fmt_str, "", " Number of ", "Memory");
2090 fprintf (file, fmt_str, "", " instances ", "used ");
2091 fprintf (file, "---------------------------------------------------------\n");
2093 size = n_basic_blocks * sizeof (struct basic_block_def);
2094 total += size;
2095 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2096 SCALE (size), LABEL (size));
2098 num_edges = 0;
2099 FOR_EACH_BB (bb)
2100 num_edges += EDGE_COUNT (bb->succs);
2101 size = num_edges * sizeof (struct edge_def);
2102 total += size;
2103 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2105 fprintf (file, "---------------------------------------------------------\n");
2106 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2107 LABEL (total));
2108 fprintf (file, "---------------------------------------------------------\n");
2109 fprintf (file, "\n");
2111 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2112 max_num_merged_labels = cfg_stats.num_merged_labels;
2114 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2115 cfg_stats.num_merged_labels, max_num_merged_labels);
2117 fprintf (file, "\n");
2121 /* Dump CFG statistics on stderr. Keep extern so that it's always
2122 linked in the final executable. */
2124 DEBUG_FUNCTION void
2125 debug_cfg_stats (void)
2127 dump_cfg_stats (stderr);
2131 /* Dump the flowgraph to a .vcg FILE. */
2133 static void
2134 gimple_cfg2vcg (FILE *file)
2136 edge e;
2137 edge_iterator ei;
2138 basic_block bb;
2139 const char *funcname
2140 = lang_hooks.decl_printable_name (current_function_decl, 2);
2142 /* Write the file header. */
2143 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2144 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2145 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2147 /* Write blocks and edges. */
2148 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2150 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2151 e->dest->index);
2153 if (e->flags & EDGE_FAKE)
2154 fprintf (file, " linestyle: dotted priority: 10");
2155 else
2156 fprintf (file, " linestyle: solid priority: 100");
2158 fprintf (file, " }\n");
2160 fputc ('\n', file);
2162 FOR_EACH_BB (bb)
2164 enum gimple_code head_code, end_code;
2165 const char *head_name, *end_name;
2166 int head_line = 0;
2167 int end_line = 0;
2168 gimple first = first_stmt (bb);
2169 gimple last = last_stmt (bb);
2171 if (first)
2173 head_code = gimple_code (first);
2174 head_name = gimple_code_name[head_code];
2175 head_line = get_lineno (first);
2177 else
2178 head_name = "no-statement";
2180 if (last)
2182 end_code = gimple_code (last);
2183 end_name = gimple_code_name[end_code];
2184 end_line = get_lineno (last);
2186 else
2187 end_name = "no-statement";
2189 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2190 bb->index, bb->index, head_name, head_line, end_name,
2191 end_line);
2193 FOR_EACH_EDGE (e, ei, bb->succs)
2195 if (e->dest == EXIT_BLOCK_PTR)
2196 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2197 else
2198 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2200 if (e->flags & EDGE_FAKE)
2201 fprintf (file, " priority: 10 linestyle: dotted");
2202 else
2203 fprintf (file, " priority: 100 linestyle: solid");
2205 fprintf (file, " }\n");
2208 if (bb->next_bb != EXIT_BLOCK_PTR)
2209 fputc ('\n', file);
2212 fputs ("}\n\n", file);
2217 /*---------------------------------------------------------------------------
2218 Miscellaneous helpers
2219 ---------------------------------------------------------------------------*/
2221 /* Return true if T represents a stmt that always transfers control. */
2223 bool
2224 is_ctrl_stmt (gimple t)
2226 switch (gimple_code (t))
2228 case GIMPLE_COND:
2229 case GIMPLE_SWITCH:
2230 case GIMPLE_GOTO:
2231 case GIMPLE_RETURN:
2232 case GIMPLE_RESX:
2233 return true;
2234 default:
2235 return false;
2240 /* Return true if T is a statement that may alter the flow of control
2241 (e.g., a call to a non-returning function). */
2243 bool
2244 is_ctrl_altering_stmt (gimple t)
2246 gcc_assert (t);
2248 switch (gimple_code (t))
2250 case GIMPLE_CALL:
2252 int flags = gimple_call_flags (t);
2254 /* A non-pure/const call alters flow control if the current
2255 function has nonlocal labels. */
2256 if (!(flags & (ECF_CONST | ECF_PURE | ECF_LEAF))
2257 && cfun->has_nonlocal_label)
2258 return true;
2260 /* A call also alters control flow if it does not return. */
2261 if (flags & ECF_NORETURN)
2262 return true;
2264 /* BUILT_IN_RETURN call is same as return statement. */
2265 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2266 return true;
2268 break;
2270 case GIMPLE_EH_DISPATCH:
2271 /* EH_DISPATCH branches to the individual catch handlers at
2272 this level of a try or allowed-exceptions region. It can
2273 fallthru to the next statement as well. */
2274 return true;
2276 case GIMPLE_ASM:
2277 if (gimple_asm_nlabels (t) > 0)
2278 return true;
2279 break;
2281 CASE_GIMPLE_OMP:
2282 /* OpenMP directives alter control flow. */
2283 return true;
2285 default:
2286 break;
2289 /* If a statement can throw, it alters control flow. */
2290 return stmt_can_throw_internal (t);
2294 /* Return true if T is a simple local goto. */
2296 bool
2297 simple_goto_p (gimple t)
2299 return (gimple_code (t) == GIMPLE_GOTO
2300 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2304 /* Return true if T can make an abnormal transfer of control flow.
2305 Transfers of control flow associated with EH are excluded. */
2307 bool
2308 stmt_can_make_abnormal_goto (gimple t)
2310 if (computed_goto_p (t))
2311 return true;
2312 if (is_gimple_call (t))
2313 return (gimple_has_side_effects (t) && cfun->has_nonlocal_label
2314 && !(gimple_call_flags (t) & ECF_LEAF));
2315 return false;
2319 /* Return true if STMT should start a new basic block. PREV_STMT is
2320 the statement preceding STMT. It is used when STMT is a label or a
2321 case label. Labels should only start a new basic block if their
2322 previous statement wasn't a label. Otherwise, sequence of labels
2323 would generate unnecessary basic blocks that only contain a single
2324 label. */
2326 static inline bool
2327 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2329 if (stmt == NULL)
2330 return false;
2332 /* Labels start a new basic block only if the preceding statement
2333 wasn't a label of the same type. This prevents the creation of
2334 consecutive blocks that have nothing but a single label. */
2335 if (gimple_code (stmt) == GIMPLE_LABEL)
2337 /* Nonlocal and computed GOTO targets always start a new block. */
2338 if (DECL_NONLOCAL (gimple_label_label (stmt))
2339 || FORCED_LABEL (gimple_label_label (stmt)))
2340 return true;
2342 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2344 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2345 return true;
2347 cfg_stats.num_merged_labels++;
2348 return false;
2350 else
2351 return true;
2354 return false;
2358 /* Return true if T should end a basic block. */
2360 bool
2361 stmt_ends_bb_p (gimple t)
2363 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2366 /* Remove block annotations and other data structures. */
2368 void
2369 delete_tree_cfg_annotations (void)
2371 label_to_block_map = NULL;
2375 /* Return the first statement in basic block BB. */
2377 gimple
2378 first_stmt (basic_block bb)
2380 gimple_stmt_iterator i = gsi_start_bb (bb);
2381 gimple stmt = NULL;
2383 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2385 gsi_next (&i);
2386 stmt = NULL;
2388 return stmt;
2391 /* Return the first non-label statement in basic block BB. */
2393 static gimple
2394 first_non_label_stmt (basic_block bb)
2396 gimple_stmt_iterator i = gsi_start_bb (bb);
2397 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2398 gsi_next (&i);
2399 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2402 /* Return the last statement in basic block BB. */
2404 gimple
2405 last_stmt (basic_block bb)
2407 gimple_stmt_iterator i = gsi_last_bb (bb);
2408 gimple stmt = NULL;
2410 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2412 gsi_prev (&i);
2413 stmt = NULL;
2415 return stmt;
2418 /* Return the last statement of an otherwise empty block. Return NULL
2419 if the block is totally empty, or if it contains more than one
2420 statement. */
2422 gimple
2423 last_and_only_stmt (basic_block bb)
2425 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2426 gimple last, prev;
2428 if (gsi_end_p (i))
2429 return NULL;
2431 last = gsi_stmt (i);
2432 gsi_prev_nondebug (&i);
2433 if (gsi_end_p (i))
2434 return last;
2436 /* Empty statements should no longer appear in the instruction stream.
2437 Everything that might have appeared before should be deleted by
2438 remove_useless_stmts, and the optimizers should just gsi_remove
2439 instead of smashing with build_empty_stmt.
2441 Thus the only thing that should appear here in a block containing
2442 one executable statement is a label. */
2443 prev = gsi_stmt (i);
2444 if (gimple_code (prev) == GIMPLE_LABEL)
2445 return last;
2446 else
2447 return NULL;
2450 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2452 static void
2453 reinstall_phi_args (edge new_edge, edge old_edge)
2455 edge_var_map_vector v;
2456 edge_var_map *vm;
2457 int i;
2458 gimple_stmt_iterator phis;
2460 v = redirect_edge_var_map_vector (old_edge);
2461 if (!v)
2462 return;
2464 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2465 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2466 i++, gsi_next (&phis))
2468 gimple phi = gsi_stmt (phis);
2469 tree result = redirect_edge_var_map_result (vm);
2470 tree arg = redirect_edge_var_map_def (vm);
2472 gcc_assert (result == gimple_phi_result (phi));
2474 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2477 redirect_edge_var_map_clear (old_edge);
2480 /* Returns the basic block after which the new basic block created
2481 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2482 near its "logical" location. This is of most help to humans looking
2483 at debugging dumps. */
2485 static basic_block
2486 split_edge_bb_loc (edge edge_in)
2488 basic_block dest = edge_in->dest;
2489 basic_block dest_prev = dest->prev_bb;
2491 if (dest_prev)
2493 edge e = find_edge (dest_prev, dest);
2494 if (e && !(e->flags & EDGE_COMPLEX))
2495 return edge_in->src;
2497 return dest_prev;
2500 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2501 Abort on abnormal edges. */
2503 static basic_block
2504 gimple_split_edge (edge edge_in)
2506 basic_block new_bb, after_bb, dest;
2507 edge new_edge, e;
2509 /* Abnormal edges cannot be split. */
2510 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2512 dest = edge_in->dest;
2514 after_bb = split_edge_bb_loc (edge_in);
2516 new_bb = create_empty_bb (after_bb);
2517 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2518 new_bb->count = edge_in->count;
2519 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2520 new_edge->probability = REG_BR_PROB_BASE;
2521 new_edge->count = edge_in->count;
2523 e = redirect_edge_and_branch (edge_in, new_bb);
2524 gcc_assert (e == edge_in);
2525 reinstall_phi_args (new_edge, e);
2527 return new_bb;
2531 /* Verify properties of the address expression T with base object BASE. */
2533 static tree
2534 verify_address (tree t, tree base)
2536 bool old_constant;
2537 bool old_side_effects;
2538 bool new_constant;
2539 bool new_side_effects;
2541 old_constant = TREE_CONSTANT (t);
2542 old_side_effects = TREE_SIDE_EFFECTS (t);
2544 recompute_tree_invariant_for_addr_expr (t);
2545 new_side_effects = TREE_SIDE_EFFECTS (t);
2546 new_constant = TREE_CONSTANT (t);
2548 if (old_constant != new_constant)
2550 error ("constant not recomputed when ADDR_EXPR changed");
2551 return t;
2553 if (old_side_effects != new_side_effects)
2555 error ("side effects not recomputed when ADDR_EXPR changed");
2556 return t;
2559 if (!(TREE_CODE (base) == VAR_DECL
2560 || TREE_CODE (base) == PARM_DECL
2561 || TREE_CODE (base) == RESULT_DECL))
2562 return NULL_TREE;
2564 if (DECL_GIMPLE_REG_P (base))
2566 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2567 return base;
2570 return NULL_TREE;
2573 /* Callback for walk_tree, check that all elements with address taken are
2574 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2575 inside a PHI node. */
2577 static tree
2578 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2580 tree t = *tp, x;
2582 if (TYPE_P (t))
2583 *walk_subtrees = 0;
2585 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2586 #define CHECK_OP(N, MSG) \
2587 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2588 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2590 switch (TREE_CODE (t))
2592 case SSA_NAME:
2593 if (SSA_NAME_IN_FREE_LIST (t))
2595 error ("SSA name in freelist but still referenced");
2596 return *tp;
2598 break;
2600 case INDIRECT_REF:
2601 error ("INDIRECT_REF in gimple IL");
2602 return t;
2604 case MEM_REF:
2605 x = TREE_OPERAND (t, 0);
2606 if (!POINTER_TYPE_P (TREE_TYPE (x))
2607 || !is_gimple_mem_ref_addr (x))
2609 error ("invalid first operand of MEM_REF");
2610 return x;
2612 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2613 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2615 error ("invalid offset operand of MEM_REF");
2616 return TREE_OPERAND (t, 1);
2618 if (TREE_CODE (x) == ADDR_EXPR
2619 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2620 return x;
2621 *walk_subtrees = 0;
2622 break;
2624 case ASSERT_EXPR:
2625 x = fold (ASSERT_EXPR_COND (t));
2626 if (x == boolean_false_node)
2628 error ("ASSERT_EXPR with an always-false condition");
2629 return *tp;
2631 break;
2633 case MODIFY_EXPR:
2634 error ("MODIFY_EXPR not expected while having tuples");
2635 return *tp;
2637 case ADDR_EXPR:
2639 tree tem;
2641 gcc_assert (is_gimple_address (t));
2643 /* Skip any references (they will be checked when we recurse down the
2644 tree) and ensure that any variable used as a prefix is marked
2645 addressable. */
2646 for (x = TREE_OPERAND (t, 0);
2647 handled_component_p (x);
2648 x = TREE_OPERAND (x, 0))
2651 if ((tem = verify_address (t, x)))
2652 return tem;
2654 if (!(TREE_CODE (x) == VAR_DECL
2655 || TREE_CODE (x) == PARM_DECL
2656 || TREE_CODE (x) == RESULT_DECL))
2657 return NULL;
2659 if (!TREE_ADDRESSABLE (x))
2661 error ("address taken, but ADDRESSABLE bit not set");
2662 return x;
2665 break;
2668 case COND_EXPR:
2669 x = COND_EXPR_COND (t);
2670 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2672 error ("non-integral used in condition");
2673 return x;
2675 if (!is_gimple_condexpr (x))
2677 error ("invalid conditional operand");
2678 return x;
2680 break;
2682 case NON_LVALUE_EXPR:
2683 gcc_unreachable ();
2685 CASE_CONVERT:
2686 case FIX_TRUNC_EXPR:
2687 case FLOAT_EXPR:
2688 case NEGATE_EXPR:
2689 case ABS_EXPR:
2690 case BIT_NOT_EXPR:
2691 case TRUTH_NOT_EXPR:
2692 CHECK_OP (0, "invalid operand to unary operator");
2693 break;
2695 case REALPART_EXPR:
2696 case IMAGPART_EXPR:
2697 case COMPONENT_REF:
2698 case ARRAY_REF:
2699 case ARRAY_RANGE_REF:
2700 case BIT_FIELD_REF:
2701 case VIEW_CONVERT_EXPR:
2702 /* We have a nest of references. Verify that each of the operands
2703 that determine where to reference is either a constant or a variable,
2704 verify that the base is valid, and then show we've already checked
2705 the subtrees. */
2706 while (handled_component_p (t))
2708 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2709 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2710 else if (TREE_CODE (t) == ARRAY_REF
2711 || TREE_CODE (t) == ARRAY_RANGE_REF)
2713 CHECK_OP (1, "invalid array index");
2714 if (TREE_OPERAND (t, 2))
2715 CHECK_OP (2, "invalid array lower bound");
2716 if (TREE_OPERAND (t, 3))
2717 CHECK_OP (3, "invalid array stride");
2719 else if (TREE_CODE (t) == BIT_FIELD_REF)
2721 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2722 || !host_integerp (TREE_OPERAND (t, 2), 1))
2724 error ("invalid position or size operand to BIT_FIELD_REF");
2725 return t;
2727 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2728 && (TYPE_PRECISION (TREE_TYPE (t))
2729 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2731 error ("integral result type precision does not match "
2732 "field size of BIT_FIELD_REF");
2733 return t;
2735 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2736 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2737 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2739 error ("mode precision of non-integral result does not "
2740 "match field size of BIT_FIELD_REF");
2741 return t;
2745 t = TREE_OPERAND (t, 0);
2748 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2750 error ("invalid reference prefix");
2751 return t;
2753 *walk_subtrees = 0;
2754 break;
2755 case PLUS_EXPR:
2756 case MINUS_EXPR:
2757 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2758 POINTER_PLUS_EXPR. */
2759 if (POINTER_TYPE_P (TREE_TYPE (t)))
2761 error ("invalid operand to plus/minus, type is a pointer");
2762 return t;
2764 CHECK_OP (0, "invalid operand to binary operator");
2765 CHECK_OP (1, "invalid operand to binary operator");
2766 break;
2768 case POINTER_PLUS_EXPR:
2769 /* Check to make sure the first operand is a pointer or reference type. */
2770 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2772 error ("invalid operand to pointer plus, first operand is not a pointer");
2773 return t;
2775 /* Check to make sure the second operand is an integer with type of
2776 sizetype. */
2777 if (!useless_type_conversion_p (sizetype,
2778 TREE_TYPE (TREE_OPERAND (t, 1))))
2780 error ("invalid operand to pointer plus, second operand is not an "
2781 "integer with type of sizetype");
2782 return t;
2784 /* FALLTHROUGH */
2785 case LT_EXPR:
2786 case LE_EXPR:
2787 case GT_EXPR:
2788 case GE_EXPR:
2789 case EQ_EXPR:
2790 case NE_EXPR:
2791 case UNORDERED_EXPR:
2792 case ORDERED_EXPR:
2793 case UNLT_EXPR:
2794 case UNLE_EXPR:
2795 case UNGT_EXPR:
2796 case UNGE_EXPR:
2797 case UNEQ_EXPR:
2798 case LTGT_EXPR:
2799 case MULT_EXPR:
2800 case TRUNC_DIV_EXPR:
2801 case CEIL_DIV_EXPR:
2802 case FLOOR_DIV_EXPR:
2803 case ROUND_DIV_EXPR:
2804 case TRUNC_MOD_EXPR:
2805 case CEIL_MOD_EXPR:
2806 case FLOOR_MOD_EXPR:
2807 case ROUND_MOD_EXPR:
2808 case RDIV_EXPR:
2809 case EXACT_DIV_EXPR:
2810 case MIN_EXPR:
2811 case MAX_EXPR:
2812 case LSHIFT_EXPR:
2813 case RSHIFT_EXPR:
2814 case LROTATE_EXPR:
2815 case RROTATE_EXPR:
2816 case BIT_IOR_EXPR:
2817 case BIT_XOR_EXPR:
2818 case BIT_AND_EXPR:
2819 CHECK_OP (0, "invalid operand to binary operator");
2820 CHECK_OP (1, "invalid operand to binary operator");
2821 break;
2823 case CONSTRUCTOR:
2824 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2825 *walk_subtrees = 0;
2826 break;
2828 case CASE_LABEL_EXPR:
2829 if (CASE_CHAIN (t))
2831 error ("invalid CASE_CHAIN");
2832 return t;
2834 break;
2836 default:
2837 break;
2839 return NULL;
2841 #undef CHECK_OP
2845 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2846 Returns true if there is an error, otherwise false. */
2848 static bool
2849 verify_types_in_gimple_min_lval (tree expr)
2851 tree op;
2853 if (is_gimple_id (expr))
2854 return false;
2856 if (TREE_CODE (expr) != TARGET_MEM_REF
2857 && TREE_CODE (expr) != MEM_REF)
2859 error ("invalid expression for min lvalue");
2860 return true;
2863 /* TARGET_MEM_REFs are strange beasts. */
2864 if (TREE_CODE (expr) == TARGET_MEM_REF)
2865 return false;
2867 op = TREE_OPERAND (expr, 0);
2868 if (!is_gimple_val (op))
2870 error ("invalid operand in indirect reference");
2871 debug_generic_stmt (op);
2872 return true;
2874 /* Memory references now generally can involve a value conversion. */
2876 return false;
2879 /* Verify if EXPR is a valid GIMPLE reference expression. If
2880 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2881 if there is an error, otherwise false. */
2883 static bool
2884 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2886 while (handled_component_p (expr))
2888 tree op = TREE_OPERAND (expr, 0);
2890 if (TREE_CODE (expr) == ARRAY_REF
2891 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2893 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2894 || (TREE_OPERAND (expr, 2)
2895 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2896 || (TREE_OPERAND (expr, 3)
2897 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2899 error ("invalid operands to array reference");
2900 debug_generic_stmt (expr);
2901 return true;
2905 /* Verify if the reference array element types are compatible. */
2906 if (TREE_CODE (expr) == ARRAY_REF
2907 && !useless_type_conversion_p (TREE_TYPE (expr),
2908 TREE_TYPE (TREE_TYPE (op))))
2910 error ("type mismatch in array reference");
2911 debug_generic_stmt (TREE_TYPE (expr));
2912 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2913 return true;
2915 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2916 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2917 TREE_TYPE (TREE_TYPE (op))))
2919 error ("type mismatch in array range reference");
2920 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2921 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2922 return true;
2925 if ((TREE_CODE (expr) == REALPART_EXPR
2926 || TREE_CODE (expr) == IMAGPART_EXPR)
2927 && !useless_type_conversion_p (TREE_TYPE (expr),
2928 TREE_TYPE (TREE_TYPE (op))))
2930 error ("type mismatch in real/imagpart reference");
2931 debug_generic_stmt (TREE_TYPE (expr));
2932 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2933 return true;
2936 if (TREE_CODE (expr) == COMPONENT_REF
2937 && !useless_type_conversion_p (TREE_TYPE (expr),
2938 TREE_TYPE (TREE_OPERAND (expr, 1))))
2940 error ("type mismatch in component reference");
2941 debug_generic_stmt (TREE_TYPE (expr));
2942 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2943 return true;
2946 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2948 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2949 that their operand is not an SSA name or an invariant when
2950 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2951 bug). Otherwise there is nothing to verify, gross mismatches at
2952 most invoke undefined behavior. */
2953 if (require_lvalue
2954 && (TREE_CODE (op) == SSA_NAME
2955 || is_gimple_min_invariant (op)))
2957 error ("conversion of an SSA_NAME on the left hand side");
2958 debug_generic_stmt (expr);
2959 return true;
2961 else if (TREE_CODE (op) == SSA_NAME
2962 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2964 error ("conversion of register to a different size");
2965 debug_generic_stmt (expr);
2966 return true;
2968 else if (!handled_component_p (op))
2969 return false;
2972 expr = op;
2975 if (TREE_CODE (expr) == MEM_REF)
2977 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2979 error ("invalid address operand in MEM_REF");
2980 debug_generic_stmt (expr);
2981 return true;
2983 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
2984 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
2986 error ("invalid offset operand in MEM_REF");
2987 debug_generic_stmt (expr);
2988 return true;
2991 else if (TREE_CODE (expr) == TARGET_MEM_REF)
2993 if (!TMR_BASE (expr)
2994 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
2996 error ("invalid address operand in TARGET_MEM_REF");
2997 return true;
2999 if (!TMR_OFFSET (expr)
3000 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3001 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3003 error ("invalid offset operand in TARGET_MEM_REF");
3004 debug_generic_stmt (expr);
3005 return true;
3009 return ((require_lvalue || !is_gimple_min_invariant (expr))
3010 && verify_types_in_gimple_min_lval (expr));
3013 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3014 list of pointer-to types that is trivially convertible to DEST. */
3016 static bool
3017 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3019 tree src;
3021 if (!TYPE_POINTER_TO (src_obj))
3022 return true;
3024 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3025 if (useless_type_conversion_p (dest, src))
3026 return true;
3028 return false;
3031 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3032 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3034 static bool
3035 valid_fixed_convert_types_p (tree type1, tree type2)
3037 return (FIXED_POINT_TYPE_P (type1)
3038 && (INTEGRAL_TYPE_P (type2)
3039 || SCALAR_FLOAT_TYPE_P (type2)
3040 || FIXED_POINT_TYPE_P (type2)));
3043 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3044 is a problem, otherwise false. */
3046 static bool
3047 verify_gimple_call (gimple stmt)
3049 tree fn = gimple_call_fn (stmt);
3050 tree fntype, fndecl;
3051 unsigned i;
3053 if (gimple_call_internal_p (stmt))
3055 if (fn)
3057 error ("gimple call has two targets");
3058 debug_generic_stmt (fn);
3059 return true;
3062 else
3064 if (!fn)
3066 error ("gimple call has no target");
3067 return true;
3071 if (fn && !is_gimple_call_addr (fn))
3073 error ("invalid function in gimple call");
3074 debug_generic_stmt (fn);
3075 return true;
3078 if (fn
3079 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3080 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3081 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3083 error ("non-function in gimple call");
3084 return true;
3087 fndecl = gimple_call_fndecl (stmt);
3088 if (fndecl
3089 && TREE_CODE (fndecl) == FUNCTION_DECL
3090 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3091 && !DECL_PURE_P (fndecl)
3092 && !TREE_READONLY (fndecl))
3094 error ("invalid pure const state for function");
3095 return true;
3098 if (gimple_call_lhs (stmt)
3099 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3100 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3102 error ("invalid LHS in gimple call");
3103 return true;
3106 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3108 error ("LHS in noreturn call");
3109 return true;
3112 fntype = gimple_call_fntype (stmt);
3113 if (fntype
3114 && gimple_call_lhs (stmt)
3115 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3116 TREE_TYPE (fntype))
3117 /* ??? At least C++ misses conversions at assignments from
3118 void * call results.
3119 ??? Java is completely off. Especially with functions
3120 returning java.lang.Object.
3121 For now simply allow arbitrary pointer type conversions. */
3122 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3123 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3125 error ("invalid conversion in gimple call");
3126 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3127 debug_generic_stmt (TREE_TYPE (fntype));
3128 return true;
3131 if (gimple_call_chain (stmt)
3132 && !is_gimple_val (gimple_call_chain (stmt)))
3134 error ("invalid static chain in gimple call");
3135 debug_generic_stmt (gimple_call_chain (stmt));
3136 return true;
3139 /* If there is a static chain argument, this should not be an indirect
3140 call, and the decl should have DECL_STATIC_CHAIN set. */
3141 if (gimple_call_chain (stmt))
3143 if (!gimple_call_fndecl (stmt))
3145 error ("static chain in indirect gimple call");
3146 return true;
3148 fn = TREE_OPERAND (fn, 0);
3150 if (!DECL_STATIC_CHAIN (fn))
3152 error ("static chain with function that doesn%'t use one");
3153 return true;
3157 /* ??? The C frontend passes unpromoted arguments in case it
3158 didn't see a function declaration before the call. So for now
3159 leave the call arguments mostly unverified. Once we gimplify
3160 unit-at-a-time we have a chance to fix this. */
3162 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3164 tree arg = gimple_call_arg (stmt, i);
3165 if ((is_gimple_reg_type (TREE_TYPE (arg))
3166 && !is_gimple_val (arg))
3167 || (!is_gimple_reg_type (TREE_TYPE (arg))
3168 && !is_gimple_lvalue (arg)))
3170 error ("invalid argument to gimple call");
3171 debug_generic_expr (arg);
3172 return true;
3176 return false;
3179 /* Verifies the gimple comparison with the result type TYPE and
3180 the operands OP0 and OP1. */
3182 static bool
3183 verify_gimple_comparison (tree type, tree op0, tree op1)
3185 tree op0_type = TREE_TYPE (op0);
3186 tree op1_type = TREE_TYPE (op1);
3188 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3190 error ("invalid operands in gimple comparison");
3191 return true;
3194 /* For comparisons we do not have the operations type as the
3195 effective type the comparison is carried out in. Instead
3196 we require that either the first operand is trivially
3197 convertible into the second, or the other way around.
3198 The resulting type of a comparison may be any integral type.
3199 Because we special-case pointers to void we allow
3200 comparisons of pointers with the same mode as well. */
3201 if ((!useless_type_conversion_p (op0_type, op1_type)
3202 && !useless_type_conversion_p (op1_type, op0_type)
3203 && (!POINTER_TYPE_P (op0_type)
3204 || !POINTER_TYPE_P (op1_type)
3205 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3206 || !INTEGRAL_TYPE_P (type))
3208 error ("type mismatch in comparison expression");
3209 debug_generic_expr (type);
3210 debug_generic_expr (op0_type);
3211 debug_generic_expr (op1_type);
3212 return true;
3215 return false;
3218 /* Verify a gimple assignment statement STMT with an unary rhs.
3219 Returns true if anything is wrong. */
3221 static bool
3222 verify_gimple_assign_unary (gimple stmt)
3224 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3225 tree lhs = gimple_assign_lhs (stmt);
3226 tree lhs_type = TREE_TYPE (lhs);
3227 tree rhs1 = gimple_assign_rhs1 (stmt);
3228 tree rhs1_type = TREE_TYPE (rhs1);
3230 if (!is_gimple_reg (lhs))
3232 error ("non-register as LHS of unary operation");
3233 return true;
3236 if (!is_gimple_val (rhs1))
3238 error ("invalid operand in unary operation");
3239 return true;
3242 /* First handle conversions. */
3243 switch (rhs_code)
3245 CASE_CONVERT:
3247 /* Allow conversions between integral types and pointers only if
3248 there is no sign or zero extension involved.
3249 For targets were the precision of sizetype doesn't match that
3250 of pointers we need to allow arbitrary conversions from and
3251 to sizetype. */
3252 if ((POINTER_TYPE_P (lhs_type)
3253 && INTEGRAL_TYPE_P (rhs1_type)
3254 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3255 || rhs1_type == sizetype))
3256 || (POINTER_TYPE_P (rhs1_type)
3257 && INTEGRAL_TYPE_P (lhs_type)
3258 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3259 || lhs_type == sizetype)))
3260 return false;
3262 /* Allow conversion from integer to offset type and vice versa. */
3263 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3264 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3265 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3266 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3267 return false;
3269 /* Otherwise assert we are converting between types of the
3270 same kind. */
3271 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3273 error ("invalid types in nop conversion");
3274 debug_generic_expr (lhs_type);
3275 debug_generic_expr (rhs1_type);
3276 return true;
3279 return false;
3282 case ADDR_SPACE_CONVERT_EXPR:
3284 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3285 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3286 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3288 error ("invalid types in address space conversion");
3289 debug_generic_expr (lhs_type);
3290 debug_generic_expr (rhs1_type);
3291 return true;
3294 return false;
3297 case FIXED_CONVERT_EXPR:
3299 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3300 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3302 error ("invalid types in fixed-point conversion");
3303 debug_generic_expr (lhs_type);
3304 debug_generic_expr (rhs1_type);
3305 return true;
3308 return false;
3311 case FLOAT_EXPR:
3313 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3315 error ("invalid types in conversion to floating point");
3316 debug_generic_expr (lhs_type);
3317 debug_generic_expr (rhs1_type);
3318 return true;
3321 return false;
3324 case FIX_TRUNC_EXPR:
3326 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3328 error ("invalid types in conversion to integer");
3329 debug_generic_expr (lhs_type);
3330 debug_generic_expr (rhs1_type);
3331 return true;
3334 return false;
3337 case VEC_UNPACK_HI_EXPR:
3338 case VEC_UNPACK_LO_EXPR:
3339 case REDUC_MAX_EXPR:
3340 case REDUC_MIN_EXPR:
3341 case REDUC_PLUS_EXPR:
3342 case VEC_UNPACK_FLOAT_HI_EXPR:
3343 case VEC_UNPACK_FLOAT_LO_EXPR:
3344 /* FIXME. */
3345 return false;
3347 case TRUTH_NOT_EXPR:
3348 /* We require two-valued operand types. */
3349 if (!(TREE_CODE (rhs1_type) == BOOLEAN_TYPE
3350 || (INTEGRAL_TYPE_P (rhs1_type)
3351 && TYPE_PRECISION (rhs1_type) == 1)))
3353 error ("invalid types in truth not");
3354 debug_generic_expr (lhs_type);
3355 debug_generic_expr (rhs1_type);
3356 return true;
3358 break;
3360 case NEGATE_EXPR:
3361 case ABS_EXPR:
3362 case BIT_NOT_EXPR:
3363 case PAREN_EXPR:
3364 case NON_LVALUE_EXPR:
3365 case CONJ_EXPR:
3366 break;
3368 default:
3369 gcc_unreachable ();
3372 /* For the remaining codes assert there is no conversion involved. */
3373 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3375 error ("non-trivial conversion in unary operation");
3376 debug_generic_expr (lhs_type);
3377 debug_generic_expr (rhs1_type);
3378 return true;
3381 return false;
3384 /* Verify a gimple assignment statement STMT with a binary rhs.
3385 Returns true if anything is wrong. */
3387 static bool
3388 verify_gimple_assign_binary (gimple stmt)
3390 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3391 tree lhs = gimple_assign_lhs (stmt);
3392 tree lhs_type = TREE_TYPE (lhs);
3393 tree rhs1 = gimple_assign_rhs1 (stmt);
3394 tree rhs1_type = TREE_TYPE (rhs1);
3395 tree rhs2 = gimple_assign_rhs2 (stmt);
3396 tree rhs2_type = TREE_TYPE (rhs2);
3398 if (!is_gimple_reg (lhs))
3400 error ("non-register as LHS of binary operation");
3401 return true;
3404 if (!is_gimple_val (rhs1)
3405 || !is_gimple_val (rhs2))
3407 error ("invalid operands in binary operation");
3408 return true;
3411 /* First handle operations that involve different types. */
3412 switch (rhs_code)
3414 case COMPLEX_EXPR:
3416 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3417 || !(INTEGRAL_TYPE_P (rhs1_type)
3418 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3419 || !(INTEGRAL_TYPE_P (rhs2_type)
3420 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3422 error ("type mismatch in complex expression");
3423 debug_generic_expr (lhs_type);
3424 debug_generic_expr (rhs1_type);
3425 debug_generic_expr (rhs2_type);
3426 return true;
3429 return false;
3432 case LSHIFT_EXPR:
3433 case RSHIFT_EXPR:
3434 case LROTATE_EXPR:
3435 case RROTATE_EXPR:
3437 /* Shifts and rotates are ok on integral types, fixed point
3438 types and integer vector types. */
3439 if ((!INTEGRAL_TYPE_P (rhs1_type)
3440 && !FIXED_POINT_TYPE_P (rhs1_type)
3441 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3442 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3443 || (!INTEGRAL_TYPE_P (rhs2_type)
3444 /* Vector shifts of vectors are also ok. */
3445 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3446 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3447 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3448 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3449 || !useless_type_conversion_p (lhs_type, rhs1_type))
3451 error ("type mismatch in shift expression");
3452 debug_generic_expr (lhs_type);
3453 debug_generic_expr (rhs1_type);
3454 debug_generic_expr (rhs2_type);
3455 return true;
3458 return false;
3461 case VEC_LSHIFT_EXPR:
3462 case VEC_RSHIFT_EXPR:
3464 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3465 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3466 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3467 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3468 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3469 || (!INTEGRAL_TYPE_P (rhs2_type)
3470 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3471 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3472 || !useless_type_conversion_p (lhs_type, rhs1_type))
3474 error ("type mismatch in vector shift expression");
3475 debug_generic_expr (lhs_type);
3476 debug_generic_expr (rhs1_type);
3477 debug_generic_expr (rhs2_type);
3478 return true;
3480 /* For shifting a vector of non-integral components we
3481 only allow shifting by a constant multiple of the element size. */
3482 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3483 && (TREE_CODE (rhs2) != INTEGER_CST
3484 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3485 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3487 error ("non-element sized vector shift of floating point vector");
3488 return true;
3491 return false;
3494 case PLUS_EXPR:
3495 case MINUS_EXPR:
3497 /* We use regular PLUS_EXPR and MINUS_EXPR for vectors.
3498 ??? This just makes the checker happy and may not be what is
3499 intended. */
3500 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3501 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3503 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3504 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3506 error ("invalid non-vector operands to vector valued plus");
3507 return true;
3509 lhs_type = TREE_TYPE (lhs_type);
3510 rhs1_type = TREE_TYPE (rhs1_type);
3511 rhs2_type = TREE_TYPE (rhs2_type);
3512 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3513 the pointer to 2nd place. */
3514 if (POINTER_TYPE_P (rhs2_type))
3516 tree tem = rhs1_type;
3517 rhs1_type = rhs2_type;
3518 rhs2_type = tem;
3520 goto do_pointer_plus_expr_check;
3522 if (POINTER_TYPE_P (lhs_type)
3523 || POINTER_TYPE_P (rhs1_type)
3524 || POINTER_TYPE_P (rhs2_type))
3526 error ("invalid (pointer) operands to plus/minus");
3527 return true;
3530 /* Continue with generic binary expression handling. */
3531 break;
3534 case POINTER_PLUS_EXPR:
3536 do_pointer_plus_expr_check:
3537 if (!POINTER_TYPE_P (rhs1_type)
3538 || !useless_type_conversion_p (lhs_type, rhs1_type)
3539 || !useless_type_conversion_p (sizetype, rhs2_type))
3541 error ("type mismatch in pointer plus expression");
3542 debug_generic_stmt (lhs_type);
3543 debug_generic_stmt (rhs1_type);
3544 debug_generic_stmt (rhs2_type);
3545 return true;
3548 return false;
3551 case TRUTH_ANDIF_EXPR:
3552 case TRUTH_ORIF_EXPR:
3553 case TRUTH_AND_EXPR:
3554 case TRUTH_OR_EXPR:
3555 case TRUTH_XOR_EXPR:
3557 gcc_unreachable ();
3559 case LT_EXPR:
3560 case LE_EXPR:
3561 case GT_EXPR:
3562 case GE_EXPR:
3563 case EQ_EXPR:
3564 case NE_EXPR:
3565 case UNORDERED_EXPR:
3566 case ORDERED_EXPR:
3567 case UNLT_EXPR:
3568 case UNLE_EXPR:
3569 case UNGT_EXPR:
3570 case UNGE_EXPR:
3571 case UNEQ_EXPR:
3572 case LTGT_EXPR:
3573 /* Comparisons are also binary, but the result type is not
3574 connected to the operand types. */
3575 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3577 case WIDEN_MULT_EXPR:
3578 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3579 return true;
3580 return ((2 * TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (lhs_type))
3581 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3583 case WIDEN_SUM_EXPR:
3584 case VEC_WIDEN_MULT_HI_EXPR:
3585 case VEC_WIDEN_MULT_LO_EXPR:
3586 case VEC_PACK_TRUNC_EXPR:
3587 case VEC_PACK_SAT_EXPR:
3588 case VEC_PACK_FIX_TRUNC_EXPR:
3589 case VEC_EXTRACT_EVEN_EXPR:
3590 case VEC_EXTRACT_ODD_EXPR:
3591 case VEC_INTERLEAVE_HIGH_EXPR:
3592 case VEC_INTERLEAVE_LOW_EXPR:
3593 /* FIXME. */
3594 return false;
3596 case MULT_EXPR:
3597 case TRUNC_DIV_EXPR:
3598 case CEIL_DIV_EXPR:
3599 case FLOOR_DIV_EXPR:
3600 case ROUND_DIV_EXPR:
3601 case TRUNC_MOD_EXPR:
3602 case CEIL_MOD_EXPR:
3603 case FLOOR_MOD_EXPR:
3604 case ROUND_MOD_EXPR:
3605 case RDIV_EXPR:
3606 case EXACT_DIV_EXPR:
3607 case MIN_EXPR:
3608 case MAX_EXPR:
3609 case BIT_IOR_EXPR:
3610 case BIT_XOR_EXPR:
3611 case BIT_AND_EXPR:
3612 /* Continue with generic binary expression handling. */
3613 break;
3615 default:
3616 gcc_unreachable ();
3619 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3620 || !useless_type_conversion_p (lhs_type, rhs2_type))
3622 error ("type mismatch in binary expression");
3623 debug_generic_stmt (lhs_type);
3624 debug_generic_stmt (rhs1_type);
3625 debug_generic_stmt (rhs2_type);
3626 return true;
3629 return false;
3632 /* Verify a gimple assignment statement STMT with a ternary rhs.
3633 Returns true if anything is wrong. */
3635 static bool
3636 verify_gimple_assign_ternary (gimple stmt)
3638 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3639 tree lhs = gimple_assign_lhs (stmt);
3640 tree lhs_type = TREE_TYPE (lhs);
3641 tree rhs1 = gimple_assign_rhs1 (stmt);
3642 tree rhs1_type = TREE_TYPE (rhs1);
3643 tree rhs2 = gimple_assign_rhs2 (stmt);
3644 tree rhs2_type = TREE_TYPE (rhs2);
3645 tree rhs3 = gimple_assign_rhs3 (stmt);
3646 tree rhs3_type = TREE_TYPE (rhs3);
3648 if (!is_gimple_reg (lhs))
3650 error ("non-register as LHS of ternary operation");
3651 return true;
3654 if (!is_gimple_val (rhs1)
3655 || !is_gimple_val (rhs2)
3656 || !is_gimple_val (rhs3))
3658 error ("invalid operands in ternary operation");
3659 return true;
3662 /* First handle operations that involve different types. */
3663 switch (rhs_code)
3665 case WIDEN_MULT_PLUS_EXPR:
3666 case WIDEN_MULT_MINUS_EXPR:
3667 if ((!INTEGRAL_TYPE_P (rhs1_type)
3668 && !FIXED_POINT_TYPE_P (rhs1_type))
3669 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3670 || !useless_type_conversion_p (lhs_type, rhs3_type)
3671 || 2 * TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (lhs_type)
3672 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3674 error ("type mismatch in widening multiply-accumulate expression");
3675 debug_generic_expr (lhs_type);
3676 debug_generic_expr (rhs1_type);
3677 debug_generic_expr (rhs2_type);
3678 debug_generic_expr (rhs3_type);
3679 return true;
3681 break;
3683 case FMA_EXPR:
3684 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3685 || !useless_type_conversion_p (lhs_type, rhs2_type)
3686 || !useless_type_conversion_p (lhs_type, rhs3_type))
3688 error ("type mismatch in fused multiply-add expression");
3689 debug_generic_expr (lhs_type);
3690 debug_generic_expr (rhs1_type);
3691 debug_generic_expr (rhs2_type);
3692 debug_generic_expr (rhs3_type);
3693 return true;
3695 break;
3697 case DOT_PROD_EXPR:
3698 case REALIGN_LOAD_EXPR:
3699 /* FIXME. */
3700 return false;
3702 default:
3703 gcc_unreachable ();
3705 return false;
3708 /* Verify a gimple assignment statement STMT with a single rhs.
3709 Returns true if anything is wrong. */
3711 static bool
3712 verify_gimple_assign_single (gimple stmt)
3714 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3715 tree lhs = gimple_assign_lhs (stmt);
3716 tree lhs_type = TREE_TYPE (lhs);
3717 tree rhs1 = gimple_assign_rhs1 (stmt);
3718 tree rhs1_type = TREE_TYPE (rhs1);
3719 bool res = false;
3721 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3723 error ("non-trivial conversion at assignment");
3724 debug_generic_expr (lhs_type);
3725 debug_generic_expr (rhs1_type);
3726 return true;
3729 if (handled_component_p (lhs))
3730 res |= verify_types_in_gimple_reference (lhs, true);
3732 /* Special codes we cannot handle via their class. */
3733 switch (rhs_code)
3735 case ADDR_EXPR:
3737 tree op = TREE_OPERAND (rhs1, 0);
3738 if (!is_gimple_addressable (op))
3740 error ("invalid operand in unary expression");
3741 return true;
3744 /* Technically there is no longer a need for matching types, but
3745 gimple hygiene asks for this check. In LTO we can end up
3746 combining incompatible units and thus end up with addresses
3747 of globals that change their type to a common one. */
3748 if (!in_lto_p
3749 && !types_compatible_p (TREE_TYPE (op),
3750 TREE_TYPE (TREE_TYPE (rhs1)))
3751 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3752 TREE_TYPE (op)))
3754 error ("type mismatch in address expression");
3755 debug_generic_stmt (TREE_TYPE (rhs1));
3756 debug_generic_stmt (TREE_TYPE (op));
3757 return true;
3760 return verify_types_in_gimple_reference (op, true);
3763 /* tcc_reference */
3764 case INDIRECT_REF:
3765 error ("INDIRECT_REF in gimple IL");
3766 return true;
3768 case COMPONENT_REF:
3769 case BIT_FIELD_REF:
3770 case ARRAY_REF:
3771 case ARRAY_RANGE_REF:
3772 case VIEW_CONVERT_EXPR:
3773 case REALPART_EXPR:
3774 case IMAGPART_EXPR:
3775 case TARGET_MEM_REF:
3776 case MEM_REF:
3777 if (!is_gimple_reg (lhs)
3778 && is_gimple_reg_type (TREE_TYPE (lhs)))
3780 error ("invalid rhs for gimple memory store");
3781 debug_generic_stmt (lhs);
3782 debug_generic_stmt (rhs1);
3783 return true;
3785 return res || verify_types_in_gimple_reference (rhs1, false);
3787 /* tcc_constant */
3788 case SSA_NAME:
3789 case INTEGER_CST:
3790 case REAL_CST:
3791 case FIXED_CST:
3792 case COMPLEX_CST:
3793 case VECTOR_CST:
3794 case STRING_CST:
3795 return res;
3797 /* tcc_declaration */
3798 case CONST_DECL:
3799 return res;
3800 case VAR_DECL:
3801 case PARM_DECL:
3802 if (!is_gimple_reg (lhs)
3803 && !is_gimple_reg (rhs1)
3804 && is_gimple_reg_type (TREE_TYPE (lhs)))
3806 error ("invalid rhs for gimple memory store");
3807 debug_generic_stmt (lhs);
3808 debug_generic_stmt (rhs1);
3809 return true;
3811 return res;
3813 case COND_EXPR:
3814 if (!is_gimple_reg (lhs)
3815 || (!is_gimple_reg (TREE_OPERAND (rhs1, 0))
3816 && !COMPARISON_CLASS_P (TREE_OPERAND (rhs1, 0)))
3817 || (!is_gimple_reg (TREE_OPERAND (rhs1, 1))
3818 && !is_gimple_min_invariant (TREE_OPERAND (rhs1, 1)))
3819 || (!is_gimple_reg (TREE_OPERAND (rhs1, 2))
3820 && !is_gimple_min_invariant (TREE_OPERAND (rhs1, 2))))
3822 error ("invalid COND_EXPR in gimple assignment");
3823 debug_generic_stmt (rhs1);
3824 return true;
3826 return res;
3828 case CONSTRUCTOR:
3829 case OBJ_TYPE_REF:
3830 case ASSERT_EXPR:
3831 case WITH_SIZE_EXPR:
3832 case VEC_COND_EXPR:
3833 /* FIXME. */
3834 return res;
3836 default:;
3839 return res;
3842 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3843 is a problem, otherwise false. */
3845 static bool
3846 verify_gimple_assign (gimple stmt)
3848 switch (gimple_assign_rhs_class (stmt))
3850 case GIMPLE_SINGLE_RHS:
3851 return verify_gimple_assign_single (stmt);
3853 case GIMPLE_UNARY_RHS:
3854 return verify_gimple_assign_unary (stmt);
3856 case GIMPLE_BINARY_RHS:
3857 return verify_gimple_assign_binary (stmt);
3859 case GIMPLE_TERNARY_RHS:
3860 return verify_gimple_assign_ternary (stmt);
3862 default:
3863 gcc_unreachable ();
3867 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3868 is a problem, otherwise false. */
3870 static bool
3871 verify_gimple_return (gimple stmt)
3873 tree op = gimple_return_retval (stmt);
3874 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3876 /* We cannot test for present return values as we do not fix up missing
3877 return values from the original source. */
3878 if (op == NULL)
3879 return false;
3881 if (!is_gimple_val (op)
3882 && TREE_CODE (op) != RESULT_DECL)
3884 error ("invalid operand in return statement");
3885 debug_generic_stmt (op);
3886 return true;
3889 if ((TREE_CODE (op) == RESULT_DECL
3890 && DECL_BY_REFERENCE (op))
3891 || (TREE_CODE (op) == SSA_NAME
3892 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
3893 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
3894 op = TREE_TYPE (op);
3896 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
3898 error ("invalid conversion in return statement");
3899 debug_generic_stmt (restype);
3900 debug_generic_stmt (TREE_TYPE (op));
3901 return true;
3904 return false;
3908 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
3909 is a problem, otherwise false. */
3911 static bool
3912 verify_gimple_goto (gimple stmt)
3914 tree dest = gimple_goto_dest (stmt);
3916 /* ??? We have two canonical forms of direct goto destinations, a
3917 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
3918 if (TREE_CODE (dest) != LABEL_DECL
3919 && (!is_gimple_val (dest)
3920 || !POINTER_TYPE_P (TREE_TYPE (dest))))
3922 error ("goto destination is neither a label nor a pointer");
3923 return true;
3926 return false;
3929 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
3930 is a problem, otherwise false. */
3932 static bool
3933 verify_gimple_switch (gimple stmt)
3935 if (!is_gimple_val (gimple_switch_index (stmt)))
3937 error ("invalid operand to switch statement");
3938 debug_generic_stmt (gimple_switch_index (stmt));
3939 return true;
3942 return false;
3946 /* Verify a gimple debug statement STMT.
3947 Returns true if anything is wrong. */
3949 static bool
3950 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
3952 /* There isn't much that could be wrong in a gimple debug stmt. A
3953 gimple debug bind stmt, for example, maps a tree, that's usually
3954 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
3955 component or member of an aggregate type, to another tree, that
3956 can be an arbitrary expression. These stmts expand into debug
3957 insns, and are converted to debug notes by var-tracking.c. */
3958 return false;
3961 /* Verify a gimple label statement STMT.
3962 Returns true if anything is wrong. */
3964 static bool
3965 verify_gimple_label (gimple stmt)
3967 tree decl = gimple_label_label (stmt);
3968 int uid;
3969 bool err = false;
3971 if (TREE_CODE (decl) != LABEL_DECL)
3972 return true;
3974 uid = LABEL_DECL_UID (decl);
3975 if (cfun->cfg
3976 && (uid == -1
3977 || VEC_index (basic_block,
3978 label_to_block_map, uid) != gimple_bb (stmt)))
3980 error ("incorrect entry in label_to_block_map");
3981 err |= true;
3984 uid = EH_LANDING_PAD_NR (decl);
3985 if (uid)
3987 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
3988 if (decl != lp->post_landing_pad)
3990 error ("incorrect setting of landing pad number");
3991 err |= true;
3995 return err;
3998 /* Verify the GIMPLE statement STMT. Returns true if there is an
3999 error, otherwise false. */
4001 static bool
4002 verify_gimple_stmt (gimple stmt)
4004 switch (gimple_code (stmt))
4006 case GIMPLE_ASSIGN:
4007 return verify_gimple_assign (stmt);
4009 case GIMPLE_LABEL:
4010 return verify_gimple_label (stmt);
4012 case GIMPLE_CALL:
4013 return verify_gimple_call (stmt);
4015 case GIMPLE_COND:
4016 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4018 error ("invalid comparison code in gimple cond");
4019 return true;
4021 if (!(!gimple_cond_true_label (stmt)
4022 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4023 || !(!gimple_cond_false_label (stmt)
4024 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4026 error ("invalid labels in gimple cond");
4027 return true;
4030 return verify_gimple_comparison (boolean_type_node,
4031 gimple_cond_lhs (stmt),
4032 gimple_cond_rhs (stmt));
4034 case GIMPLE_GOTO:
4035 return verify_gimple_goto (stmt);
4037 case GIMPLE_SWITCH:
4038 return verify_gimple_switch (stmt);
4040 case GIMPLE_RETURN:
4041 return verify_gimple_return (stmt);
4043 case GIMPLE_ASM:
4044 return false;
4046 /* Tuples that do not have tree operands. */
4047 case GIMPLE_NOP:
4048 case GIMPLE_PREDICT:
4049 case GIMPLE_RESX:
4050 case GIMPLE_EH_DISPATCH:
4051 case GIMPLE_EH_MUST_NOT_THROW:
4052 return false;
4054 CASE_GIMPLE_OMP:
4055 /* OpenMP directives are validated by the FE and never operated
4056 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4057 non-gimple expressions when the main index variable has had
4058 its address taken. This does not affect the loop itself
4059 because the header of an GIMPLE_OMP_FOR is merely used to determine
4060 how to setup the parallel iteration. */
4061 return false;
4063 case GIMPLE_DEBUG:
4064 return verify_gimple_debug (stmt);
4066 default:
4067 gcc_unreachable ();
4071 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4072 and false otherwise. */
4074 static bool
4075 verify_gimple_phi (gimple phi)
4077 bool err = false;
4078 unsigned i;
4079 tree phi_result = gimple_phi_result (phi);
4080 bool virtual_p;
4082 if (!phi_result)
4084 error ("invalid PHI result");
4085 return true;
4088 virtual_p = !is_gimple_reg (phi_result);
4089 if (TREE_CODE (phi_result) != SSA_NAME
4090 || (virtual_p
4091 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4093 error ("invalid PHI result");
4094 err = true;
4097 for (i = 0; i < gimple_phi_num_args (phi); i++)
4099 tree t = gimple_phi_arg_def (phi, i);
4101 if (!t)
4103 error ("missing PHI def");
4104 err |= true;
4105 continue;
4107 /* Addressable variables do have SSA_NAMEs but they
4108 are not considered gimple values. */
4109 else if ((TREE_CODE (t) == SSA_NAME
4110 && virtual_p != !is_gimple_reg (t))
4111 || (virtual_p
4112 && (TREE_CODE (t) != SSA_NAME
4113 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4114 || (!virtual_p
4115 && !is_gimple_val (t)))
4117 error ("invalid PHI argument");
4118 debug_generic_expr (t);
4119 err |= true;
4121 #ifdef ENABLE_TYPES_CHECKING
4122 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4124 error ("incompatible types in PHI argument %u", i);
4125 debug_generic_stmt (TREE_TYPE (phi_result));
4126 debug_generic_stmt (TREE_TYPE (t));
4127 err |= true;
4129 #endif
4132 return err;
4135 /* Verify the GIMPLE statements inside the sequence STMTS. */
4137 static bool
4138 verify_gimple_in_seq_2 (gimple_seq stmts)
4140 gimple_stmt_iterator ittr;
4141 bool err = false;
4143 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4145 gimple stmt = gsi_stmt (ittr);
4147 switch (gimple_code (stmt))
4149 case GIMPLE_BIND:
4150 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4151 break;
4153 case GIMPLE_TRY:
4154 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4155 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4156 break;
4158 case GIMPLE_EH_FILTER:
4159 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4160 break;
4162 case GIMPLE_CATCH:
4163 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4164 break;
4166 default:
4168 bool err2 = verify_gimple_stmt (stmt);
4169 if (err2)
4170 debug_gimple_stmt (stmt);
4171 err |= err2;
4176 return err;
4180 /* Verify the GIMPLE statements inside the statement list STMTS. */
4182 DEBUG_FUNCTION void
4183 verify_gimple_in_seq (gimple_seq stmts)
4185 timevar_push (TV_TREE_STMT_VERIFY);
4186 if (verify_gimple_in_seq_2 (stmts))
4187 internal_error ("verify_gimple failed");
4188 timevar_pop (TV_TREE_STMT_VERIFY);
4191 /* Return true when the T can be shared. */
4193 bool
4194 tree_node_can_be_shared (tree t)
4196 if (IS_TYPE_OR_DECL_P (t)
4197 || is_gimple_min_invariant (t)
4198 || TREE_CODE (t) == SSA_NAME
4199 || t == error_mark_node
4200 || TREE_CODE (t) == IDENTIFIER_NODE)
4201 return true;
4203 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4204 return true;
4206 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4207 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4208 || TREE_CODE (t) == COMPONENT_REF
4209 || TREE_CODE (t) == REALPART_EXPR
4210 || TREE_CODE (t) == IMAGPART_EXPR)
4211 t = TREE_OPERAND (t, 0);
4213 if (DECL_P (t))
4214 return true;
4216 return false;
4219 /* Called via walk_gimple_stmt. Verify tree sharing. */
4221 static tree
4222 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4224 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4225 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4227 if (tree_node_can_be_shared (*tp))
4229 *walk_subtrees = false;
4230 return NULL;
4233 if (pointer_set_insert (visited, *tp))
4234 return *tp;
4236 return NULL;
4239 static bool eh_error_found;
4240 static int
4241 verify_eh_throw_stmt_node (void **slot, void *data)
4243 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4244 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4246 if (!pointer_set_contains (visited, node->stmt))
4248 error ("dead STMT in EH table");
4249 debug_gimple_stmt (node->stmt);
4250 eh_error_found = true;
4252 return 1;
4255 /* Verify the GIMPLE statements in the CFG of FN. */
4257 DEBUG_FUNCTION void
4258 verify_gimple_in_cfg (struct function *fn)
4260 basic_block bb;
4261 bool err = false;
4262 struct pointer_set_t *visited, *visited_stmts;
4264 timevar_push (TV_TREE_STMT_VERIFY);
4265 visited = pointer_set_create ();
4266 visited_stmts = pointer_set_create ();
4268 FOR_EACH_BB_FN (bb, fn)
4270 gimple_stmt_iterator gsi;
4272 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4274 gimple phi = gsi_stmt (gsi);
4275 bool err2 = false;
4276 unsigned i;
4278 pointer_set_insert (visited_stmts, phi);
4280 if (gimple_bb (phi) != bb)
4282 error ("gimple_bb (phi) is set to a wrong basic block");
4283 err2 = true;
4286 err2 |= verify_gimple_phi (phi);
4288 for (i = 0; i < gimple_phi_num_args (phi); i++)
4290 tree arg = gimple_phi_arg_def (phi, i);
4291 tree addr = walk_tree (&arg, verify_node_sharing, visited, NULL);
4292 if (addr)
4294 error ("incorrect sharing of tree nodes");
4295 debug_generic_expr (addr);
4296 err2 |= true;
4300 if (err2)
4301 debug_gimple_stmt (phi);
4302 err |= err2;
4305 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4307 gimple stmt = gsi_stmt (gsi);
4308 bool err2 = false;
4309 struct walk_stmt_info wi;
4310 tree addr;
4311 int lp_nr;
4313 pointer_set_insert (visited_stmts, stmt);
4315 if (gimple_bb (stmt) != bb)
4317 error ("gimple_bb (stmt) is set to a wrong basic block");
4318 err2 = true;
4321 err2 |= verify_gimple_stmt (stmt);
4323 memset (&wi, 0, sizeof (wi));
4324 wi.info = (void *) visited;
4325 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4326 if (addr)
4328 error ("incorrect sharing of tree nodes");
4329 debug_generic_expr (addr);
4330 err2 |= true;
4333 /* ??? Instead of not checking these stmts at all the walker
4334 should know its context via wi. */
4335 if (!is_gimple_debug (stmt)
4336 && !is_gimple_omp (stmt))
4338 memset (&wi, 0, sizeof (wi));
4339 addr = walk_gimple_op (stmt, verify_expr, &wi);
4340 if (addr)
4342 debug_generic_expr (addr);
4343 inform (gimple_location (stmt), "in statement");
4344 err2 |= true;
4348 /* If the statement is marked as part of an EH region, then it is
4349 expected that the statement could throw. Verify that when we
4350 have optimizations that simplify statements such that we prove
4351 that they cannot throw, that we update other data structures
4352 to match. */
4353 lp_nr = lookup_stmt_eh_lp (stmt);
4354 if (lp_nr != 0)
4356 if (!stmt_could_throw_p (stmt))
4358 error ("statement marked for throw, but doesn%'t");
4359 err2 |= true;
4361 else if (lp_nr > 0
4362 && !gsi_one_before_end_p (gsi)
4363 && stmt_can_throw_internal (stmt))
4365 error ("statement marked for throw in middle of block");
4366 err2 |= true;
4370 if (err2)
4371 debug_gimple_stmt (stmt);
4372 err |= err2;
4376 eh_error_found = false;
4377 if (get_eh_throw_stmt_table (cfun))
4378 htab_traverse (get_eh_throw_stmt_table (cfun),
4379 verify_eh_throw_stmt_node,
4380 visited_stmts);
4382 if (err || eh_error_found)
4383 internal_error ("verify_gimple failed");
4385 pointer_set_destroy (visited);
4386 pointer_set_destroy (visited_stmts);
4387 verify_histograms ();
4388 timevar_pop (TV_TREE_STMT_VERIFY);
4392 /* Verifies that the flow information is OK. */
4394 static int
4395 gimple_verify_flow_info (void)
4397 int err = 0;
4398 basic_block bb;
4399 gimple_stmt_iterator gsi;
4400 gimple stmt;
4401 edge e;
4402 edge_iterator ei;
4404 if (ENTRY_BLOCK_PTR->il.gimple)
4406 error ("ENTRY_BLOCK has IL associated with it");
4407 err = 1;
4410 if (EXIT_BLOCK_PTR->il.gimple)
4412 error ("EXIT_BLOCK has IL associated with it");
4413 err = 1;
4416 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4417 if (e->flags & EDGE_FALLTHRU)
4419 error ("fallthru to exit from bb %d", e->src->index);
4420 err = 1;
4423 FOR_EACH_BB (bb)
4425 bool found_ctrl_stmt = false;
4427 stmt = NULL;
4429 /* Skip labels on the start of basic block. */
4430 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4432 tree label;
4433 gimple prev_stmt = stmt;
4435 stmt = gsi_stmt (gsi);
4437 if (gimple_code (stmt) != GIMPLE_LABEL)
4438 break;
4440 label = gimple_label_label (stmt);
4441 if (prev_stmt && DECL_NONLOCAL (label))
4443 error ("nonlocal label ");
4444 print_generic_expr (stderr, label, 0);
4445 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4446 bb->index);
4447 err = 1;
4450 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4452 error ("EH landing pad label ");
4453 print_generic_expr (stderr, label, 0);
4454 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4455 bb->index);
4456 err = 1;
4459 if (label_to_block (label) != bb)
4461 error ("label ");
4462 print_generic_expr (stderr, label, 0);
4463 fprintf (stderr, " to block does not match in bb %d",
4464 bb->index);
4465 err = 1;
4468 if (decl_function_context (label) != current_function_decl)
4470 error ("label ");
4471 print_generic_expr (stderr, label, 0);
4472 fprintf (stderr, " has incorrect context in bb %d",
4473 bb->index);
4474 err = 1;
4478 /* Verify that body of basic block BB is free of control flow. */
4479 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4481 gimple stmt = gsi_stmt (gsi);
4483 if (found_ctrl_stmt)
4485 error ("control flow in the middle of basic block %d",
4486 bb->index);
4487 err = 1;
4490 if (stmt_ends_bb_p (stmt))
4491 found_ctrl_stmt = true;
4493 if (gimple_code (stmt) == GIMPLE_LABEL)
4495 error ("label ");
4496 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4497 fprintf (stderr, " in the middle of basic block %d", bb->index);
4498 err = 1;
4502 gsi = gsi_last_bb (bb);
4503 if (gsi_end_p (gsi))
4504 continue;
4506 stmt = gsi_stmt (gsi);
4508 if (gimple_code (stmt) == GIMPLE_LABEL)
4509 continue;
4511 err |= verify_eh_edges (stmt);
4513 if (is_ctrl_stmt (stmt))
4515 FOR_EACH_EDGE (e, ei, bb->succs)
4516 if (e->flags & EDGE_FALLTHRU)
4518 error ("fallthru edge after a control statement in bb %d",
4519 bb->index);
4520 err = 1;
4524 if (gimple_code (stmt) != GIMPLE_COND)
4526 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4527 after anything else but if statement. */
4528 FOR_EACH_EDGE (e, ei, bb->succs)
4529 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4531 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4532 bb->index);
4533 err = 1;
4537 switch (gimple_code (stmt))
4539 case GIMPLE_COND:
4541 edge true_edge;
4542 edge false_edge;
4544 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4546 if (!true_edge
4547 || !false_edge
4548 || !(true_edge->flags & EDGE_TRUE_VALUE)
4549 || !(false_edge->flags & EDGE_FALSE_VALUE)
4550 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4551 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4552 || EDGE_COUNT (bb->succs) >= 3)
4554 error ("wrong outgoing edge flags at end of bb %d",
4555 bb->index);
4556 err = 1;
4559 break;
4561 case GIMPLE_GOTO:
4562 if (simple_goto_p (stmt))
4564 error ("explicit goto at end of bb %d", bb->index);
4565 err = 1;
4567 else
4569 /* FIXME. We should double check that the labels in the
4570 destination blocks have their address taken. */
4571 FOR_EACH_EDGE (e, ei, bb->succs)
4572 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4573 | EDGE_FALSE_VALUE))
4574 || !(e->flags & EDGE_ABNORMAL))
4576 error ("wrong outgoing edge flags at end of bb %d",
4577 bb->index);
4578 err = 1;
4581 break;
4583 case GIMPLE_CALL:
4584 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4585 break;
4586 /* ... fallthru ... */
4587 case GIMPLE_RETURN:
4588 if (!single_succ_p (bb)
4589 || (single_succ_edge (bb)->flags
4590 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4591 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4593 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4594 err = 1;
4596 if (single_succ (bb) != EXIT_BLOCK_PTR)
4598 error ("return edge does not point to exit in bb %d",
4599 bb->index);
4600 err = 1;
4602 break;
4604 case GIMPLE_SWITCH:
4606 tree prev;
4607 edge e;
4608 size_t i, n;
4610 n = gimple_switch_num_labels (stmt);
4612 /* Mark all the destination basic blocks. */
4613 for (i = 0; i < n; ++i)
4615 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4616 basic_block label_bb = label_to_block (lab);
4617 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4618 label_bb->aux = (void *)1;
4621 /* Verify that the case labels are sorted. */
4622 prev = gimple_switch_label (stmt, 0);
4623 for (i = 1; i < n; ++i)
4625 tree c = gimple_switch_label (stmt, i);
4626 if (!CASE_LOW (c))
4628 error ("found default case not at the start of "
4629 "case vector");
4630 err = 1;
4631 continue;
4633 if (CASE_LOW (prev)
4634 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4636 error ("case labels not sorted: ");
4637 print_generic_expr (stderr, prev, 0);
4638 fprintf (stderr," is greater than ");
4639 print_generic_expr (stderr, c, 0);
4640 fprintf (stderr," but comes before it.\n");
4641 err = 1;
4643 prev = c;
4645 /* VRP will remove the default case if it can prove it will
4646 never be executed. So do not verify there always exists
4647 a default case here. */
4649 FOR_EACH_EDGE (e, ei, bb->succs)
4651 if (!e->dest->aux)
4653 error ("extra outgoing edge %d->%d",
4654 bb->index, e->dest->index);
4655 err = 1;
4658 e->dest->aux = (void *)2;
4659 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4660 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4662 error ("wrong outgoing edge flags at end of bb %d",
4663 bb->index);
4664 err = 1;
4668 /* Check that we have all of them. */
4669 for (i = 0; i < n; ++i)
4671 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4672 basic_block label_bb = label_to_block (lab);
4674 if (label_bb->aux != (void *)2)
4676 error ("missing edge %i->%i", bb->index, label_bb->index);
4677 err = 1;
4681 FOR_EACH_EDGE (e, ei, bb->succs)
4682 e->dest->aux = (void *)0;
4684 break;
4686 case GIMPLE_EH_DISPATCH:
4687 err |= verify_eh_dispatch_edge (stmt);
4688 break;
4690 default:
4691 break;
4695 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4696 verify_dominators (CDI_DOMINATORS);
4698 return err;
4702 /* Updates phi nodes after creating a forwarder block joined
4703 by edge FALLTHRU. */
4705 static void
4706 gimple_make_forwarder_block (edge fallthru)
4708 edge e;
4709 edge_iterator ei;
4710 basic_block dummy, bb;
4711 tree var;
4712 gimple_stmt_iterator gsi;
4714 dummy = fallthru->src;
4715 bb = fallthru->dest;
4717 if (single_pred_p (bb))
4718 return;
4720 /* If we redirected a branch we must create new PHI nodes at the
4721 start of BB. */
4722 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4724 gimple phi, new_phi;
4726 phi = gsi_stmt (gsi);
4727 var = gimple_phi_result (phi);
4728 new_phi = create_phi_node (var, bb);
4729 SSA_NAME_DEF_STMT (var) = new_phi;
4730 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4731 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
4732 UNKNOWN_LOCATION);
4735 /* Add the arguments we have stored on edges. */
4736 FOR_EACH_EDGE (e, ei, bb->preds)
4738 if (e == fallthru)
4739 continue;
4741 flush_pending_stmts (e);
4746 /* Return a non-special label in the head of basic block BLOCK.
4747 Create one if it doesn't exist. */
4749 tree
4750 gimple_block_label (basic_block bb)
4752 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4753 bool first = true;
4754 tree label;
4755 gimple stmt;
4757 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4759 stmt = gsi_stmt (i);
4760 if (gimple_code (stmt) != GIMPLE_LABEL)
4761 break;
4762 label = gimple_label_label (stmt);
4763 if (!DECL_NONLOCAL (label))
4765 if (!first)
4766 gsi_move_before (&i, &s);
4767 return label;
4771 label = create_artificial_label (UNKNOWN_LOCATION);
4772 stmt = gimple_build_label (label);
4773 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4774 return label;
4778 /* Attempt to perform edge redirection by replacing a possibly complex
4779 jump instruction by a goto or by removing the jump completely.
4780 This can apply only if all edges now point to the same block. The
4781 parameters and return values are equivalent to
4782 redirect_edge_and_branch. */
4784 static edge
4785 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4787 basic_block src = e->src;
4788 gimple_stmt_iterator i;
4789 gimple stmt;
4791 /* We can replace or remove a complex jump only when we have exactly
4792 two edges. */
4793 if (EDGE_COUNT (src->succs) != 2
4794 /* Verify that all targets will be TARGET. Specifically, the
4795 edge that is not E must also go to TARGET. */
4796 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4797 return NULL;
4799 i = gsi_last_bb (src);
4800 if (gsi_end_p (i))
4801 return NULL;
4803 stmt = gsi_stmt (i);
4805 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4807 gsi_remove (&i, true);
4808 e = ssa_redirect_edge (e, target);
4809 e->flags = EDGE_FALLTHRU;
4810 return e;
4813 return NULL;
4817 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4818 edge representing the redirected branch. */
4820 static edge
4821 gimple_redirect_edge_and_branch (edge e, basic_block dest)
4823 basic_block bb = e->src;
4824 gimple_stmt_iterator gsi;
4825 edge ret;
4826 gimple stmt;
4828 if (e->flags & EDGE_ABNORMAL)
4829 return NULL;
4831 if (e->dest == dest)
4832 return NULL;
4834 if (e->flags & EDGE_EH)
4835 return redirect_eh_edge (e, dest);
4837 if (e->src != ENTRY_BLOCK_PTR)
4839 ret = gimple_try_redirect_by_replacing_jump (e, dest);
4840 if (ret)
4841 return ret;
4844 gsi = gsi_last_bb (bb);
4845 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
4847 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
4849 case GIMPLE_COND:
4850 /* For COND_EXPR, we only need to redirect the edge. */
4851 break;
4853 case GIMPLE_GOTO:
4854 /* No non-abnormal edges should lead from a non-simple goto, and
4855 simple ones should be represented implicitly. */
4856 gcc_unreachable ();
4858 case GIMPLE_SWITCH:
4860 tree label = gimple_block_label (dest);
4861 tree cases = get_cases_for_edge (e, stmt);
4863 /* If we have a list of cases associated with E, then use it
4864 as it's a lot faster than walking the entire case vector. */
4865 if (cases)
4867 edge e2 = find_edge (e->src, dest);
4868 tree last, first;
4870 first = cases;
4871 while (cases)
4873 last = cases;
4874 CASE_LABEL (cases) = label;
4875 cases = CASE_CHAIN (cases);
4878 /* If there was already an edge in the CFG, then we need
4879 to move all the cases associated with E to E2. */
4880 if (e2)
4882 tree cases2 = get_cases_for_edge (e2, stmt);
4884 CASE_CHAIN (last) = CASE_CHAIN (cases2);
4885 CASE_CHAIN (cases2) = first;
4887 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
4889 else
4891 size_t i, n = gimple_switch_num_labels (stmt);
4893 for (i = 0; i < n; i++)
4895 tree elt = gimple_switch_label (stmt, i);
4896 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4897 CASE_LABEL (elt) = label;
4901 break;
4903 case GIMPLE_ASM:
4905 int i, n = gimple_asm_nlabels (stmt);
4906 tree label = NULL;
4908 for (i = 0; i < n; ++i)
4910 tree cons = gimple_asm_label_op (stmt, i);
4911 if (label_to_block (TREE_VALUE (cons)) == e->dest)
4913 if (!label)
4914 label = gimple_block_label (dest);
4915 TREE_VALUE (cons) = label;
4919 /* If we didn't find any label matching the former edge in the
4920 asm labels, we must be redirecting the fallthrough
4921 edge. */
4922 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
4924 break;
4926 case GIMPLE_RETURN:
4927 gsi_remove (&gsi, true);
4928 e->flags |= EDGE_FALLTHRU;
4929 break;
4931 case GIMPLE_OMP_RETURN:
4932 case GIMPLE_OMP_CONTINUE:
4933 case GIMPLE_OMP_SECTIONS_SWITCH:
4934 case GIMPLE_OMP_FOR:
4935 /* The edges from OMP constructs can be simply redirected. */
4936 break;
4938 case GIMPLE_EH_DISPATCH:
4939 if (!(e->flags & EDGE_FALLTHRU))
4940 redirect_eh_dispatch_edge (stmt, e, dest);
4941 break;
4943 default:
4944 /* Otherwise it must be a fallthru edge, and we don't need to
4945 do anything besides redirecting it. */
4946 gcc_assert (e->flags & EDGE_FALLTHRU);
4947 break;
4950 /* Update/insert PHI nodes as necessary. */
4952 /* Now update the edges in the CFG. */
4953 e = ssa_redirect_edge (e, dest);
4955 return e;
4958 /* Returns true if it is possible to remove edge E by redirecting
4959 it to the destination of the other edge from E->src. */
4961 static bool
4962 gimple_can_remove_branch_p (const_edge e)
4964 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
4965 return false;
4967 return true;
4970 /* Simple wrapper, as we can always redirect fallthru edges. */
4972 static basic_block
4973 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
4975 e = gimple_redirect_edge_and_branch (e, dest);
4976 gcc_assert (e);
4978 return NULL;
4982 /* Splits basic block BB after statement STMT (but at least after the
4983 labels). If STMT is NULL, BB is split just after the labels. */
4985 static basic_block
4986 gimple_split_block (basic_block bb, void *stmt)
4988 gimple_stmt_iterator gsi;
4989 gimple_stmt_iterator gsi_tgt;
4990 gimple act;
4991 gimple_seq list;
4992 basic_block new_bb;
4993 edge e;
4994 edge_iterator ei;
4996 new_bb = create_empty_bb (bb);
4998 /* Redirect the outgoing edges. */
4999 new_bb->succs = bb->succs;
5000 bb->succs = NULL;
5001 FOR_EACH_EDGE (e, ei, new_bb->succs)
5002 e->src = new_bb;
5004 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5005 stmt = NULL;
5007 /* Move everything from GSI to the new basic block. */
5008 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5010 act = gsi_stmt (gsi);
5011 if (gimple_code (act) == GIMPLE_LABEL)
5012 continue;
5014 if (!stmt)
5015 break;
5017 if (stmt == act)
5019 gsi_next (&gsi);
5020 break;
5024 if (gsi_end_p (gsi))
5025 return new_bb;
5027 /* Split the statement list - avoid re-creating new containers as this
5028 brings ugly quadratic memory consumption in the inliner.
5029 (We are still quadratic since we need to update stmt BB pointers,
5030 sadly.) */
5031 list = gsi_split_seq_before (&gsi);
5032 set_bb_seq (new_bb, list);
5033 for (gsi_tgt = gsi_start (list);
5034 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5035 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5037 return new_bb;
5041 /* Moves basic block BB after block AFTER. */
5043 static bool
5044 gimple_move_block_after (basic_block bb, basic_block after)
5046 if (bb->prev_bb == after)
5047 return true;
5049 unlink_block (bb);
5050 link_block (bb, after);
5052 return true;
5056 /* Return true if basic_block can be duplicated. */
5058 static bool
5059 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5061 return true;
5064 /* Create a duplicate of the basic block BB. NOTE: This does not
5065 preserve SSA form. */
5067 static basic_block
5068 gimple_duplicate_bb (basic_block bb)
5070 basic_block new_bb;
5071 gimple_stmt_iterator gsi, gsi_tgt;
5072 gimple_seq phis = phi_nodes (bb);
5073 gimple phi, stmt, copy;
5075 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5077 /* Copy the PHI nodes. We ignore PHI node arguments here because
5078 the incoming edges have not been setup yet. */
5079 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5081 phi = gsi_stmt (gsi);
5082 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5083 create_new_def_for (gimple_phi_result (copy), copy,
5084 gimple_phi_result_ptr (copy));
5087 gsi_tgt = gsi_start_bb (new_bb);
5088 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5090 def_operand_p def_p;
5091 ssa_op_iter op_iter;
5092 tree lhs;
5094 stmt = gsi_stmt (gsi);
5095 if (gimple_code (stmt) == GIMPLE_LABEL)
5096 continue;
5098 /* Create a new copy of STMT and duplicate STMT's virtual
5099 operands. */
5100 copy = gimple_copy (stmt);
5101 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5103 maybe_duplicate_eh_stmt (copy, stmt);
5104 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5106 /* When copying around a stmt writing into a local non-user
5107 aggregate, make sure it won't share stack slot with other
5108 vars. */
5109 lhs = gimple_get_lhs (stmt);
5110 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5112 tree base = get_base_address (lhs);
5113 if (base
5114 && (TREE_CODE (base) == VAR_DECL
5115 || TREE_CODE (base) == RESULT_DECL)
5116 && DECL_IGNORED_P (base)
5117 && !TREE_STATIC (base)
5118 && !DECL_EXTERNAL (base)
5119 && (TREE_CODE (base) != VAR_DECL
5120 || !DECL_HAS_VALUE_EXPR_P (base)))
5121 DECL_NONSHAREABLE (base) = 1;
5124 /* Create new names for all the definitions created by COPY and
5125 add replacement mappings for each new name. */
5126 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5127 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5130 return new_bb;
5133 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5135 static void
5136 add_phi_args_after_copy_edge (edge e_copy)
5138 basic_block bb, bb_copy = e_copy->src, dest;
5139 edge e;
5140 edge_iterator ei;
5141 gimple phi, phi_copy;
5142 tree def;
5143 gimple_stmt_iterator psi, psi_copy;
5145 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5146 return;
5148 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5150 if (e_copy->dest->flags & BB_DUPLICATED)
5151 dest = get_bb_original (e_copy->dest);
5152 else
5153 dest = e_copy->dest;
5155 e = find_edge (bb, dest);
5156 if (!e)
5158 /* During loop unrolling the target of the latch edge is copied.
5159 In this case we are not looking for edge to dest, but to
5160 duplicated block whose original was dest. */
5161 FOR_EACH_EDGE (e, ei, bb->succs)
5163 if ((e->dest->flags & BB_DUPLICATED)
5164 && get_bb_original (e->dest) == dest)
5165 break;
5168 gcc_assert (e != NULL);
5171 for (psi = gsi_start_phis (e->dest),
5172 psi_copy = gsi_start_phis (e_copy->dest);
5173 !gsi_end_p (psi);
5174 gsi_next (&psi), gsi_next (&psi_copy))
5176 phi = gsi_stmt (psi);
5177 phi_copy = gsi_stmt (psi_copy);
5178 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5179 add_phi_arg (phi_copy, def, e_copy,
5180 gimple_phi_arg_location_from_edge (phi, e));
5185 /* Basic block BB_COPY was created by code duplication. Add phi node
5186 arguments for edges going out of BB_COPY. The blocks that were
5187 duplicated have BB_DUPLICATED set. */
5189 void
5190 add_phi_args_after_copy_bb (basic_block bb_copy)
5192 edge e_copy;
5193 edge_iterator ei;
5195 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5197 add_phi_args_after_copy_edge (e_copy);
5201 /* Blocks in REGION_COPY array of length N_REGION were created by
5202 duplication of basic blocks. Add phi node arguments for edges
5203 going from these blocks. If E_COPY is not NULL, also add
5204 phi node arguments for its destination.*/
5206 void
5207 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5208 edge e_copy)
5210 unsigned i;
5212 for (i = 0; i < n_region; i++)
5213 region_copy[i]->flags |= BB_DUPLICATED;
5215 for (i = 0; i < n_region; i++)
5216 add_phi_args_after_copy_bb (region_copy[i]);
5217 if (e_copy)
5218 add_phi_args_after_copy_edge (e_copy);
5220 for (i = 0; i < n_region; i++)
5221 region_copy[i]->flags &= ~BB_DUPLICATED;
5224 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5225 important exit edge EXIT. By important we mean that no SSA name defined
5226 inside region is live over the other exit edges of the region. All entry
5227 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5228 to the duplicate of the region. SSA form, dominance and loop information
5229 is updated. The new basic blocks are stored to REGION_COPY in the same
5230 order as they had in REGION, provided that REGION_COPY is not NULL.
5231 The function returns false if it is unable to copy the region,
5232 true otherwise. */
5234 bool
5235 gimple_duplicate_sese_region (edge entry, edge exit,
5236 basic_block *region, unsigned n_region,
5237 basic_block *region_copy)
5239 unsigned i;
5240 bool free_region_copy = false, copying_header = false;
5241 struct loop *loop = entry->dest->loop_father;
5242 edge exit_copy;
5243 VEC (basic_block, heap) *doms;
5244 edge redirected;
5245 int total_freq = 0, entry_freq = 0;
5246 gcov_type total_count = 0, entry_count = 0;
5248 if (!can_copy_bbs_p (region, n_region))
5249 return false;
5251 /* Some sanity checking. Note that we do not check for all possible
5252 missuses of the functions. I.e. if you ask to copy something weird,
5253 it will work, but the state of structures probably will not be
5254 correct. */
5255 for (i = 0; i < n_region; i++)
5257 /* We do not handle subloops, i.e. all the blocks must belong to the
5258 same loop. */
5259 if (region[i]->loop_father != loop)
5260 return false;
5262 if (region[i] != entry->dest
5263 && region[i] == loop->header)
5264 return false;
5267 set_loop_copy (loop, loop);
5269 /* In case the function is used for loop header copying (which is the primary
5270 use), ensure that EXIT and its copy will be new latch and entry edges. */
5271 if (loop->header == entry->dest)
5273 copying_header = true;
5274 set_loop_copy (loop, loop_outer (loop));
5276 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5277 return false;
5279 for (i = 0; i < n_region; i++)
5280 if (region[i] != exit->src
5281 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5282 return false;
5285 if (!region_copy)
5287 region_copy = XNEWVEC (basic_block, n_region);
5288 free_region_copy = true;
5291 gcc_assert (!need_ssa_update_p (cfun));
5293 /* Record blocks outside the region that are dominated by something
5294 inside. */
5295 doms = NULL;
5296 initialize_original_copy_tables ();
5298 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5300 if (entry->dest->count)
5302 total_count = entry->dest->count;
5303 entry_count = entry->count;
5304 /* Fix up corner cases, to avoid division by zero or creation of negative
5305 frequencies. */
5306 if (entry_count > total_count)
5307 entry_count = total_count;
5309 else
5311 total_freq = entry->dest->frequency;
5312 entry_freq = EDGE_FREQUENCY (entry);
5313 /* Fix up corner cases, to avoid division by zero or creation of negative
5314 frequencies. */
5315 if (total_freq == 0)
5316 total_freq = 1;
5317 else if (entry_freq > total_freq)
5318 entry_freq = total_freq;
5321 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5322 split_edge_bb_loc (entry));
5323 if (total_count)
5325 scale_bbs_frequencies_gcov_type (region, n_region,
5326 total_count - entry_count,
5327 total_count);
5328 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5329 total_count);
5331 else
5333 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5334 total_freq);
5335 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5338 if (copying_header)
5340 loop->header = exit->dest;
5341 loop->latch = exit->src;
5344 /* Redirect the entry and add the phi node arguments. */
5345 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5346 gcc_assert (redirected != NULL);
5347 flush_pending_stmts (entry);
5349 /* Concerning updating of dominators: We must recount dominators
5350 for entry block and its copy. Anything that is outside of the
5351 region, but was dominated by something inside needs recounting as
5352 well. */
5353 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5354 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5355 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5356 VEC_free (basic_block, heap, doms);
5358 /* Add the other PHI node arguments. */
5359 add_phi_args_after_copy (region_copy, n_region, NULL);
5361 /* Update the SSA web. */
5362 update_ssa (TODO_update_ssa);
5364 if (free_region_copy)
5365 free (region_copy);
5367 free_original_copy_tables ();
5368 return true;
5371 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5372 are stored to REGION_COPY in the same order in that they appear
5373 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5374 the region, EXIT an exit from it. The condition guarding EXIT
5375 is moved to ENTRY. Returns true if duplication succeeds, false
5376 otherwise.
5378 For example,
5380 some_code;
5381 if (cond)
5383 else
5386 is transformed to
5388 if (cond)
5390 some_code;
5393 else
5395 some_code;
5400 bool
5401 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5402 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5403 basic_block *region_copy ATTRIBUTE_UNUSED)
5405 unsigned i;
5406 bool free_region_copy = false;
5407 struct loop *loop = exit->dest->loop_father;
5408 struct loop *orig_loop = entry->dest->loop_father;
5409 basic_block switch_bb, entry_bb, nentry_bb;
5410 VEC (basic_block, heap) *doms;
5411 int total_freq = 0, exit_freq = 0;
5412 gcov_type total_count = 0, exit_count = 0;
5413 edge exits[2], nexits[2], e;
5414 gimple_stmt_iterator gsi,gsi1;
5415 gimple cond_stmt;
5416 edge sorig, snew;
5417 basic_block exit_bb;
5418 basic_block iters_bb;
5419 tree new_rhs;
5420 gimple_stmt_iterator psi;
5421 gimple phi;
5422 tree def;
5424 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5425 exits[0] = exit;
5426 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5428 if (!can_copy_bbs_p (region, n_region))
5429 return false;
5431 initialize_original_copy_tables ();
5432 set_loop_copy (orig_loop, loop);
5433 duplicate_subloops (orig_loop, loop);
5435 if (!region_copy)
5437 region_copy = XNEWVEC (basic_block, n_region);
5438 free_region_copy = true;
5441 gcc_assert (!need_ssa_update_p (cfun));
5443 /* Record blocks outside the region that are dominated by something
5444 inside. */
5445 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5447 if (exit->src->count)
5449 total_count = exit->src->count;
5450 exit_count = exit->count;
5451 /* Fix up corner cases, to avoid division by zero or creation of negative
5452 frequencies. */
5453 if (exit_count > total_count)
5454 exit_count = total_count;
5456 else
5458 total_freq = exit->src->frequency;
5459 exit_freq = EDGE_FREQUENCY (exit);
5460 /* Fix up corner cases, to avoid division by zero or creation of negative
5461 frequencies. */
5462 if (total_freq == 0)
5463 total_freq = 1;
5464 if (exit_freq > total_freq)
5465 exit_freq = total_freq;
5468 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5469 split_edge_bb_loc (exit));
5470 if (total_count)
5472 scale_bbs_frequencies_gcov_type (region, n_region,
5473 total_count - exit_count,
5474 total_count);
5475 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5476 total_count);
5478 else
5480 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5481 total_freq);
5482 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5485 /* Create the switch block, and put the exit condition to it. */
5486 entry_bb = entry->dest;
5487 nentry_bb = get_bb_copy (entry_bb);
5488 if (!last_stmt (entry->src)
5489 || !stmt_ends_bb_p (last_stmt (entry->src)))
5490 switch_bb = entry->src;
5491 else
5492 switch_bb = split_edge (entry);
5493 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5495 gsi = gsi_last_bb (switch_bb);
5496 cond_stmt = last_stmt (exit->src);
5497 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5498 cond_stmt = gimple_copy (cond_stmt);
5500 /* If the block consisting of the exit condition has the latch as
5501 successor, then the body of the loop is executed before
5502 the exit condition is tested. In such case, moving the
5503 condition to the entry, causes that the loop will iterate
5504 one less iteration (which is the wanted outcome, since we
5505 peel out the last iteration). If the body is executed after
5506 the condition, moving the condition to the entry requires
5507 decrementing one iteration. */
5508 if (exits[1]->dest == orig_loop->latch)
5509 new_rhs = gimple_cond_rhs (cond_stmt);
5510 else
5512 new_rhs = fold_build2 (MINUS_EXPR, TREE_TYPE (gimple_cond_rhs (cond_stmt)),
5513 gimple_cond_rhs (cond_stmt),
5514 build_int_cst (TREE_TYPE (gimple_cond_rhs (cond_stmt)), 1));
5516 if (TREE_CODE (gimple_cond_rhs (cond_stmt)) == SSA_NAME)
5518 iters_bb = gimple_bb (SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)));
5519 for (gsi1 = gsi_start_bb (iters_bb); !gsi_end_p (gsi1); gsi_next (&gsi1))
5520 if (gsi_stmt (gsi1) == SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)))
5521 break;
5523 new_rhs = force_gimple_operand_gsi (&gsi1, new_rhs, true,
5524 NULL_TREE,false,GSI_CONTINUE_LINKING);
5527 gimple_cond_set_rhs (cond_stmt, unshare_expr (new_rhs));
5528 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5529 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5531 sorig = single_succ_edge (switch_bb);
5532 sorig->flags = exits[1]->flags;
5533 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5535 /* Register the new edge from SWITCH_BB in loop exit lists. */
5536 rescan_loop_exit (snew, true, false);
5538 /* Add the PHI node arguments. */
5539 add_phi_args_after_copy (region_copy, n_region, snew);
5541 /* Get rid of now superfluous conditions and associated edges (and phi node
5542 arguments). */
5543 exit_bb = exit->dest;
5545 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5546 PENDING_STMT (e) = NULL;
5548 /* The latch of ORIG_LOOP was copied, and so was the backedge
5549 to the original header. We redirect this backedge to EXIT_BB. */
5550 for (i = 0; i < n_region; i++)
5551 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5553 gcc_assert (single_succ_edge (region_copy[i]));
5554 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5555 PENDING_STMT (e) = NULL;
5556 for (psi = gsi_start_phis (exit_bb);
5557 !gsi_end_p (psi);
5558 gsi_next (&psi))
5560 phi = gsi_stmt (psi);
5561 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5562 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5565 e = redirect_edge_and_branch (nexits[0], nexits[1]->dest);
5566 PENDING_STMT (e) = NULL;
5568 /* Anything that is outside of the region, but was dominated by something
5569 inside needs to update dominance info. */
5570 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5571 VEC_free (basic_block, heap, doms);
5572 /* Update the SSA web. */
5573 update_ssa (TODO_update_ssa);
5575 if (free_region_copy)
5576 free (region_copy);
5578 free_original_copy_tables ();
5579 return true;
5582 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5583 adding blocks when the dominator traversal reaches EXIT. This
5584 function silently assumes that ENTRY strictly dominates EXIT. */
5586 void
5587 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5588 VEC(basic_block,heap) **bbs_p)
5590 basic_block son;
5592 for (son = first_dom_son (CDI_DOMINATORS, entry);
5593 son;
5594 son = next_dom_son (CDI_DOMINATORS, son))
5596 VEC_safe_push (basic_block, heap, *bbs_p, son);
5597 if (son != exit)
5598 gather_blocks_in_sese_region (son, exit, bbs_p);
5602 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5603 The duplicates are recorded in VARS_MAP. */
5605 static void
5606 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5607 tree to_context)
5609 tree t = *tp, new_t;
5610 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5611 void **loc;
5613 if (DECL_CONTEXT (t) == to_context)
5614 return;
5616 loc = pointer_map_contains (vars_map, t);
5618 if (!loc)
5620 loc = pointer_map_insert (vars_map, t);
5622 if (SSA_VAR_P (t))
5624 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5625 add_local_decl (f, new_t);
5627 else
5629 gcc_assert (TREE_CODE (t) == CONST_DECL);
5630 new_t = copy_node (t);
5632 DECL_CONTEXT (new_t) = to_context;
5634 *loc = new_t;
5636 else
5637 new_t = (tree) *loc;
5639 *tp = new_t;
5643 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5644 VARS_MAP maps old ssa names and var_decls to the new ones. */
5646 static tree
5647 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5648 tree to_context)
5650 void **loc;
5651 tree new_name, decl = SSA_NAME_VAR (name);
5653 gcc_assert (is_gimple_reg (name));
5655 loc = pointer_map_contains (vars_map, name);
5657 if (!loc)
5659 replace_by_duplicate_decl (&decl, vars_map, to_context);
5661 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5662 if (gimple_in_ssa_p (cfun))
5663 add_referenced_var (decl);
5665 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5666 if (SSA_NAME_IS_DEFAULT_DEF (name))
5667 set_default_def (decl, new_name);
5668 pop_cfun ();
5670 loc = pointer_map_insert (vars_map, name);
5671 *loc = new_name;
5673 else
5674 new_name = (tree) *loc;
5676 return new_name;
5679 struct move_stmt_d
5681 tree orig_block;
5682 tree new_block;
5683 tree from_context;
5684 tree to_context;
5685 struct pointer_map_t *vars_map;
5686 htab_t new_label_map;
5687 struct pointer_map_t *eh_map;
5688 bool remap_decls_p;
5691 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5692 contained in *TP if it has been ORIG_BLOCK previously and change the
5693 DECL_CONTEXT of every local variable referenced in *TP. */
5695 static tree
5696 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5698 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5699 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5700 tree t = *tp;
5702 if (EXPR_P (t))
5703 /* We should never have TREE_BLOCK set on non-statements. */
5704 gcc_assert (!TREE_BLOCK (t));
5706 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5708 if (TREE_CODE (t) == SSA_NAME)
5709 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5710 else if (TREE_CODE (t) == LABEL_DECL)
5712 if (p->new_label_map)
5714 struct tree_map in, *out;
5715 in.base.from = t;
5716 out = (struct tree_map *)
5717 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5718 if (out)
5719 *tp = t = out->to;
5722 DECL_CONTEXT (t) = p->to_context;
5724 else if (p->remap_decls_p)
5726 /* Replace T with its duplicate. T should no longer appear in the
5727 parent function, so this looks wasteful; however, it may appear
5728 in referenced_vars, and more importantly, as virtual operands of
5729 statements, and in alias lists of other variables. It would be
5730 quite difficult to expunge it from all those places. ??? It might
5731 suffice to do this for addressable variables. */
5732 if ((TREE_CODE (t) == VAR_DECL
5733 && !is_global_var (t))
5734 || TREE_CODE (t) == CONST_DECL)
5735 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5737 if (SSA_VAR_P (t)
5738 && gimple_in_ssa_p (cfun))
5740 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5741 add_referenced_var (*tp);
5742 pop_cfun ();
5745 *walk_subtrees = 0;
5747 else if (TYPE_P (t))
5748 *walk_subtrees = 0;
5750 return NULL_TREE;
5753 /* Helper for move_stmt_r. Given an EH region number for the source
5754 function, map that to the duplicate EH regio number in the dest. */
5756 static int
5757 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
5759 eh_region old_r, new_r;
5760 void **slot;
5762 old_r = get_eh_region_from_number (old_nr);
5763 slot = pointer_map_contains (p->eh_map, old_r);
5764 new_r = (eh_region) *slot;
5766 return new_r->index;
5769 /* Similar, but operate on INTEGER_CSTs. */
5771 static tree
5772 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
5774 int old_nr, new_nr;
5776 old_nr = tree_low_cst (old_t_nr, 0);
5777 new_nr = move_stmt_eh_region_nr (old_nr, p);
5779 return build_int_cst (integer_type_node, new_nr);
5782 /* Like move_stmt_op, but for gimple statements.
5784 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5785 contained in the current statement in *GSI_P and change the
5786 DECL_CONTEXT of every local variable referenced in the current
5787 statement. */
5789 static tree
5790 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5791 struct walk_stmt_info *wi)
5793 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5794 gimple stmt = gsi_stmt (*gsi_p);
5795 tree block = gimple_block (stmt);
5797 if (p->orig_block == NULL_TREE
5798 || block == p->orig_block
5799 || block == NULL_TREE)
5800 gimple_set_block (stmt, p->new_block);
5801 #ifdef ENABLE_CHECKING
5802 else if (block != p->new_block)
5804 while (block && block != p->orig_block)
5805 block = BLOCK_SUPERCONTEXT (block);
5806 gcc_assert (block);
5808 #endif
5810 switch (gimple_code (stmt))
5812 case GIMPLE_CALL:
5813 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
5815 tree r, fndecl = gimple_call_fndecl (stmt);
5816 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5817 switch (DECL_FUNCTION_CODE (fndecl))
5819 case BUILT_IN_EH_COPY_VALUES:
5820 r = gimple_call_arg (stmt, 1);
5821 r = move_stmt_eh_region_tree_nr (r, p);
5822 gimple_call_set_arg (stmt, 1, r);
5823 /* FALLTHRU */
5825 case BUILT_IN_EH_POINTER:
5826 case BUILT_IN_EH_FILTER:
5827 r = gimple_call_arg (stmt, 0);
5828 r = move_stmt_eh_region_tree_nr (r, p);
5829 gimple_call_set_arg (stmt, 0, r);
5830 break;
5832 default:
5833 break;
5836 break;
5838 case GIMPLE_RESX:
5840 int r = gimple_resx_region (stmt);
5841 r = move_stmt_eh_region_nr (r, p);
5842 gimple_resx_set_region (stmt, r);
5844 break;
5846 case GIMPLE_EH_DISPATCH:
5848 int r = gimple_eh_dispatch_region (stmt);
5849 r = move_stmt_eh_region_nr (r, p);
5850 gimple_eh_dispatch_set_region (stmt, r);
5852 break;
5854 case GIMPLE_OMP_RETURN:
5855 case GIMPLE_OMP_CONTINUE:
5856 break;
5857 default:
5858 if (is_gimple_omp (stmt))
5860 /* Do not remap variables inside OMP directives. Variables
5861 referenced in clauses and directive header belong to the
5862 parent function and should not be moved into the child
5863 function. */
5864 bool save_remap_decls_p = p->remap_decls_p;
5865 p->remap_decls_p = false;
5866 *handled_ops_p = true;
5868 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r,
5869 move_stmt_op, wi);
5871 p->remap_decls_p = save_remap_decls_p;
5873 break;
5876 return NULL_TREE;
5879 /* Move basic block BB from function CFUN to function DEST_FN. The
5880 block is moved out of the original linked list and placed after
5881 block AFTER in the new list. Also, the block is removed from the
5882 original array of blocks and placed in DEST_FN's array of blocks.
5883 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5884 updated to reflect the moved edges.
5886 The local variables are remapped to new instances, VARS_MAP is used
5887 to record the mapping. */
5889 static void
5890 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5891 basic_block after, bool update_edge_count_p,
5892 struct move_stmt_d *d)
5894 struct control_flow_graph *cfg;
5895 edge_iterator ei;
5896 edge e;
5897 gimple_stmt_iterator si;
5898 unsigned old_len, new_len;
5900 /* Remove BB from dominance structures. */
5901 delete_from_dominance_info (CDI_DOMINATORS, bb);
5902 if (current_loops)
5903 remove_bb_from_loops (bb);
5905 /* Link BB to the new linked list. */
5906 move_block_after (bb, after);
5908 /* Update the edge count in the corresponding flowgraphs. */
5909 if (update_edge_count_p)
5910 FOR_EACH_EDGE (e, ei, bb->succs)
5912 cfun->cfg->x_n_edges--;
5913 dest_cfun->cfg->x_n_edges++;
5916 /* Remove BB from the original basic block array. */
5917 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5918 cfun->cfg->x_n_basic_blocks--;
5920 /* Grow DEST_CFUN's basic block array if needed. */
5921 cfg = dest_cfun->cfg;
5922 cfg->x_n_basic_blocks++;
5923 if (bb->index >= cfg->x_last_basic_block)
5924 cfg->x_last_basic_block = bb->index + 1;
5926 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5927 if ((unsigned) cfg->x_last_basic_block >= old_len)
5929 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5930 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5931 new_len);
5934 VEC_replace (basic_block, cfg->x_basic_block_info,
5935 bb->index, bb);
5937 /* Remap the variables in phi nodes. */
5938 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
5940 gimple phi = gsi_stmt (si);
5941 use_operand_p use;
5942 tree op = PHI_RESULT (phi);
5943 ssa_op_iter oi;
5945 if (!is_gimple_reg (op))
5947 /* Remove the phi nodes for virtual operands (alias analysis will be
5948 run for the new function, anyway). */
5949 remove_phi_node (&si, true);
5950 continue;
5953 SET_PHI_RESULT (phi,
5954 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5955 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5957 op = USE_FROM_PTR (use);
5958 if (TREE_CODE (op) == SSA_NAME)
5959 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5962 gsi_next (&si);
5965 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5967 gimple stmt = gsi_stmt (si);
5968 struct walk_stmt_info wi;
5970 memset (&wi, 0, sizeof (wi));
5971 wi.info = d;
5972 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
5974 if (gimple_code (stmt) == GIMPLE_LABEL)
5976 tree label = gimple_label_label (stmt);
5977 int uid = LABEL_DECL_UID (label);
5979 gcc_assert (uid > -1);
5981 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5982 if (old_len <= (unsigned) uid)
5984 new_len = 3 * uid / 2 + 1;
5985 VEC_safe_grow_cleared (basic_block, gc,
5986 cfg->x_label_to_block_map, new_len);
5989 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5990 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5992 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5994 if (uid >= dest_cfun->cfg->last_label_uid)
5995 dest_cfun->cfg->last_label_uid = uid + 1;
5998 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
5999 remove_stmt_from_eh_lp_fn (cfun, stmt);
6001 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6002 gimple_remove_stmt_histograms (cfun, stmt);
6004 /* We cannot leave any operands allocated from the operand caches of
6005 the current function. */
6006 free_stmt_operands (stmt);
6007 push_cfun (dest_cfun);
6008 update_stmt (stmt);
6009 pop_cfun ();
6012 FOR_EACH_EDGE (e, ei, bb->succs)
6013 if (e->goto_locus)
6015 tree block = e->goto_block;
6016 if (d->orig_block == NULL_TREE
6017 || block == d->orig_block)
6018 e->goto_block = d->new_block;
6019 #ifdef ENABLE_CHECKING
6020 else if (block != d->new_block)
6022 while (block && block != d->orig_block)
6023 block = BLOCK_SUPERCONTEXT (block);
6024 gcc_assert (block);
6026 #endif
6030 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6031 the outermost EH region. Use REGION as the incoming base EH region. */
6033 static eh_region
6034 find_outermost_region_in_block (struct function *src_cfun,
6035 basic_block bb, eh_region region)
6037 gimple_stmt_iterator si;
6039 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6041 gimple stmt = gsi_stmt (si);
6042 eh_region stmt_region;
6043 int lp_nr;
6045 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6046 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6047 if (stmt_region)
6049 if (region == NULL)
6050 region = stmt_region;
6051 else if (stmt_region != region)
6053 region = eh_region_outermost (src_cfun, stmt_region, region);
6054 gcc_assert (region != NULL);
6059 return region;
6062 static tree
6063 new_label_mapper (tree decl, void *data)
6065 htab_t hash = (htab_t) data;
6066 struct tree_map *m;
6067 void **slot;
6069 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6071 m = XNEW (struct tree_map);
6072 m->hash = DECL_UID (decl);
6073 m->base.from = decl;
6074 m->to = create_artificial_label (UNKNOWN_LOCATION);
6075 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6076 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6077 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6079 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6080 gcc_assert (*slot == NULL);
6082 *slot = m;
6084 return m->to;
6087 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6088 subblocks. */
6090 static void
6091 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6092 tree to_context)
6094 tree *tp, t;
6096 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6098 t = *tp;
6099 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6100 continue;
6101 replace_by_duplicate_decl (&t, vars_map, to_context);
6102 if (t != *tp)
6104 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6106 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6107 DECL_HAS_VALUE_EXPR_P (t) = 1;
6109 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6110 *tp = t;
6114 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6115 replace_block_vars_by_duplicates (block, vars_map, to_context);
6118 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6119 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6120 single basic block in the original CFG and the new basic block is
6121 returned. DEST_CFUN must not have a CFG yet.
6123 Note that the region need not be a pure SESE region. Blocks inside
6124 the region may contain calls to abort/exit. The only restriction
6125 is that ENTRY_BB should be the only entry point and it must
6126 dominate EXIT_BB.
6128 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6129 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6130 to the new function.
6132 All local variables referenced in the region are assumed to be in
6133 the corresponding BLOCK_VARS and unexpanded variable lists
6134 associated with DEST_CFUN. */
6136 basic_block
6137 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6138 basic_block exit_bb, tree orig_block)
6140 VEC(basic_block,heap) *bbs, *dom_bbs;
6141 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6142 basic_block after, bb, *entry_pred, *exit_succ, abb;
6143 struct function *saved_cfun = cfun;
6144 int *entry_flag, *exit_flag;
6145 unsigned *entry_prob, *exit_prob;
6146 unsigned i, num_entry_edges, num_exit_edges;
6147 edge e;
6148 edge_iterator ei;
6149 htab_t new_label_map;
6150 struct pointer_map_t *vars_map, *eh_map;
6151 struct loop *loop = entry_bb->loop_father;
6152 struct move_stmt_d d;
6154 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6155 region. */
6156 gcc_assert (entry_bb != exit_bb
6157 && (!exit_bb
6158 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6160 /* Collect all the blocks in the region. Manually add ENTRY_BB
6161 because it won't be added by dfs_enumerate_from. */
6162 bbs = NULL;
6163 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6164 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6166 /* The blocks that used to be dominated by something in BBS will now be
6167 dominated by the new block. */
6168 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6169 VEC_address (basic_block, bbs),
6170 VEC_length (basic_block, bbs));
6172 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6173 the predecessor edges to ENTRY_BB and the successor edges to
6174 EXIT_BB so that we can re-attach them to the new basic block that
6175 will replace the region. */
6176 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6177 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6178 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
6179 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6180 i = 0;
6181 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6183 entry_prob[i] = e->probability;
6184 entry_flag[i] = e->flags;
6185 entry_pred[i++] = e->src;
6186 remove_edge (e);
6189 if (exit_bb)
6191 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6192 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6193 sizeof (basic_block));
6194 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6195 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6196 i = 0;
6197 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6199 exit_prob[i] = e->probability;
6200 exit_flag[i] = e->flags;
6201 exit_succ[i++] = e->dest;
6202 remove_edge (e);
6205 else
6207 num_exit_edges = 0;
6208 exit_succ = NULL;
6209 exit_flag = NULL;
6210 exit_prob = NULL;
6213 /* Switch context to the child function to initialize DEST_FN's CFG. */
6214 gcc_assert (dest_cfun->cfg == NULL);
6215 push_cfun (dest_cfun);
6217 init_empty_tree_cfg ();
6219 /* Initialize EH information for the new function. */
6220 eh_map = NULL;
6221 new_label_map = NULL;
6222 if (saved_cfun->eh)
6224 eh_region region = NULL;
6226 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6227 region = find_outermost_region_in_block (saved_cfun, bb, region);
6229 init_eh_for_function ();
6230 if (region != NULL)
6232 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6233 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6234 new_label_mapper, new_label_map);
6238 pop_cfun ();
6240 /* Move blocks from BBS into DEST_CFUN. */
6241 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6242 after = dest_cfun->cfg->x_entry_block_ptr;
6243 vars_map = pointer_map_create ();
6245 memset (&d, 0, sizeof (d));
6246 d.orig_block = orig_block;
6247 d.new_block = DECL_INITIAL (dest_cfun->decl);
6248 d.from_context = cfun->decl;
6249 d.to_context = dest_cfun->decl;
6250 d.vars_map = vars_map;
6251 d.new_label_map = new_label_map;
6252 d.eh_map = eh_map;
6253 d.remap_decls_p = true;
6255 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6257 /* No need to update edge counts on the last block. It has
6258 already been updated earlier when we detached the region from
6259 the original CFG. */
6260 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6261 after = bb;
6264 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6265 if (orig_block)
6267 tree block;
6268 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6269 == NULL_TREE);
6270 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6271 = BLOCK_SUBBLOCKS (orig_block);
6272 for (block = BLOCK_SUBBLOCKS (orig_block);
6273 block; block = BLOCK_CHAIN (block))
6274 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6275 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6278 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6279 vars_map, dest_cfun->decl);
6281 if (new_label_map)
6282 htab_delete (new_label_map);
6283 if (eh_map)
6284 pointer_map_destroy (eh_map);
6285 pointer_map_destroy (vars_map);
6287 /* Rewire the entry and exit blocks. The successor to the entry
6288 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6289 the child function. Similarly, the predecessor of DEST_FN's
6290 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6291 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6292 various CFG manipulation function get to the right CFG.
6294 FIXME, this is silly. The CFG ought to become a parameter to
6295 these helpers. */
6296 push_cfun (dest_cfun);
6297 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6298 if (exit_bb)
6299 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6300 pop_cfun ();
6302 /* Back in the original function, the SESE region has disappeared,
6303 create a new basic block in its place. */
6304 bb = create_empty_bb (entry_pred[0]);
6305 if (current_loops)
6306 add_bb_to_loop (bb, loop);
6307 for (i = 0; i < num_entry_edges; i++)
6309 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6310 e->probability = entry_prob[i];
6313 for (i = 0; i < num_exit_edges; i++)
6315 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6316 e->probability = exit_prob[i];
6319 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6320 FOR_EACH_VEC_ELT (basic_block, dom_bbs, i, abb)
6321 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6322 VEC_free (basic_block, heap, dom_bbs);
6324 if (exit_bb)
6326 free (exit_prob);
6327 free (exit_flag);
6328 free (exit_succ);
6330 free (entry_prob);
6331 free (entry_flag);
6332 free (entry_pred);
6333 VEC_free (basic_block, heap, bbs);
6335 return bb;
6339 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6342 void
6343 dump_function_to_file (tree fn, FILE *file, int flags)
6345 tree arg, var;
6346 struct function *dsf;
6347 bool ignore_topmost_bind = false, any_var = false;
6348 basic_block bb;
6349 tree chain;
6351 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6353 arg = DECL_ARGUMENTS (fn);
6354 while (arg)
6356 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6357 fprintf (file, " ");
6358 print_generic_expr (file, arg, dump_flags);
6359 if (flags & TDF_VERBOSE)
6360 print_node (file, "", arg, 4);
6361 if (DECL_CHAIN (arg))
6362 fprintf (file, ", ");
6363 arg = DECL_CHAIN (arg);
6365 fprintf (file, ")\n");
6367 if (flags & TDF_VERBOSE)
6368 print_node (file, "", fn, 2);
6370 dsf = DECL_STRUCT_FUNCTION (fn);
6371 if (dsf && (flags & TDF_EH))
6372 dump_eh_tree (file, dsf);
6374 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6376 dump_node (fn, TDF_SLIM | flags, file);
6377 return;
6380 /* Switch CFUN to point to FN. */
6381 push_cfun (DECL_STRUCT_FUNCTION (fn));
6383 /* When GIMPLE is lowered, the variables are no longer available in
6384 BIND_EXPRs, so display them separately. */
6385 if (cfun && cfun->decl == fn && !VEC_empty (tree, cfun->local_decls))
6387 unsigned ix;
6388 ignore_topmost_bind = true;
6390 fprintf (file, "{\n");
6391 FOR_EACH_LOCAL_DECL (cfun, ix, var)
6393 print_generic_decl (file, var, flags);
6394 if (flags & TDF_VERBOSE)
6395 print_node (file, "", var, 4);
6396 fprintf (file, "\n");
6398 any_var = true;
6402 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6404 /* If the CFG has been built, emit a CFG-based dump. */
6405 check_bb_profile (ENTRY_BLOCK_PTR, file);
6406 if (!ignore_topmost_bind)
6407 fprintf (file, "{\n");
6409 if (any_var && n_basic_blocks)
6410 fprintf (file, "\n");
6412 FOR_EACH_BB (bb)
6413 gimple_dump_bb (bb, file, 2, flags);
6415 fprintf (file, "}\n");
6416 check_bb_profile (EXIT_BLOCK_PTR, file);
6418 else if (DECL_SAVED_TREE (fn) == NULL)
6420 /* The function is now in GIMPLE form but the CFG has not been
6421 built yet. Emit the single sequence of GIMPLE statements
6422 that make up its body. */
6423 gimple_seq body = gimple_body (fn);
6425 if (gimple_seq_first_stmt (body)
6426 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6427 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6428 print_gimple_seq (file, body, 0, flags);
6429 else
6431 if (!ignore_topmost_bind)
6432 fprintf (file, "{\n");
6434 if (any_var)
6435 fprintf (file, "\n");
6437 print_gimple_seq (file, body, 2, flags);
6438 fprintf (file, "}\n");
6441 else
6443 int indent;
6445 /* Make a tree based dump. */
6446 chain = DECL_SAVED_TREE (fn);
6448 if (chain && TREE_CODE (chain) == BIND_EXPR)
6450 if (ignore_topmost_bind)
6452 chain = BIND_EXPR_BODY (chain);
6453 indent = 2;
6455 else
6456 indent = 0;
6458 else
6460 if (!ignore_topmost_bind)
6461 fprintf (file, "{\n");
6462 indent = 2;
6465 if (any_var)
6466 fprintf (file, "\n");
6468 print_generic_stmt_indented (file, chain, flags, indent);
6469 if (ignore_topmost_bind)
6470 fprintf (file, "}\n");
6473 if (flags & TDF_ENUMERATE_LOCALS)
6474 dump_enumerated_decls (file, flags);
6475 fprintf (file, "\n\n");
6477 /* Restore CFUN. */
6478 pop_cfun ();
6482 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6484 DEBUG_FUNCTION void
6485 debug_function (tree fn, int flags)
6487 dump_function_to_file (fn, stderr, flags);
6491 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6493 static void
6494 print_pred_bbs (FILE *file, basic_block bb)
6496 edge e;
6497 edge_iterator ei;
6499 FOR_EACH_EDGE (e, ei, bb->preds)
6500 fprintf (file, "bb_%d ", e->src->index);
6504 /* Print on FILE the indexes for the successors of basic_block BB. */
6506 static void
6507 print_succ_bbs (FILE *file, basic_block bb)
6509 edge e;
6510 edge_iterator ei;
6512 FOR_EACH_EDGE (e, ei, bb->succs)
6513 fprintf (file, "bb_%d ", e->dest->index);
6516 /* Print to FILE the basic block BB following the VERBOSITY level. */
6518 void
6519 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6521 char *s_indent = (char *) alloca ((size_t) indent + 1);
6522 memset ((void *) s_indent, ' ', (size_t) indent);
6523 s_indent[indent] = '\0';
6525 /* Print basic_block's header. */
6526 if (verbosity >= 2)
6528 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6529 print_pred_bbs (file, bb);
6530 fprintf (file, "}, succs = {");
6531 print_succ_bbs (file, bb);
6532 fprintf (file, "})\n");
6535 /* Print basic_block's body. */
6536 if (verbosity >= 3)
6538 fprintf (file, "%s {\n", s_indent);
6539 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6540 fprintf (file, "%s }\n", s_indent);
6544 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6546 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6547 VERBOSITY level this outputs the contents of the loop, or just its
6548 structure. */
6550 static void
6551 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6553 char *s_indent;
6554 basic_block bb;
6556 if (loop == NULL)
6557 return;
6559 s_indent = (char *) alloca ((size_t) indent + 1);
6560 memset ((void *) s_indent, ' ', (size_t) indent);
6561 s_indent[indent] = '\0';
6563 /* Print loop's header. */
6564 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6565 loop->num, loop->header->index, loop->latch->index);
6566 fprintf (file, ", niter = ");
6567 print_generic_expr (file, loop->nb_iterations, 0);
6569 if (loop->any_upper_bound)
6571 fprintf (file, ", upper_bound = ");
6572 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6575 if (loop->any_estimate)
6577 fprintf (file, ", estimate = ");
6578 dump_double_int (file, loop->nb_iterations_estimate, true);
6580 fprintf (file, ")\n");
6582 /* Print loop's body. */
6583 if (verbosity >= 1)
6585 fprintf (file, "%s{\n", s_indent);
6586 FOR_EACH_BB (bb)
6587 if (bb->loop_father == loop)
6588 print_loops_bb (file, bb, indent, verbosity);
6590 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6591 fprintf (file, "%s}\n", s_indent);
6595 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6596 spaces. Following VERBOSITY level this outputs the contents of the
6597 loop, or just its structure. */
6599 static void
6600 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6602 if (loop == NULL)
6603 return;
6605 print_loop (file, loop, indent, verbosity);
6606 print_loop_and_siblings (file, loop->next, indent, verbosity);
6609 /* Follow a CFG edge from the entry point of the program, and on entry
6610 of a loop, pretty print the loop structure on FILE. */
6612 void
6613 print_loops (FILE *file, int verbosity)
6615 basic_block bb;
6617 bb = ENTRY_BLOCK_PTR;
6618 if (bb && bb->loop_father)
6619 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6623 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6625 DEBUG_FUNCTION void
6626 debug_loops (int verbosity)
6628 print_loops (stderr, verbosity);
6631 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6633 DEBUG_FUNCTION void
6634 debug_loop (struct loop *loop, int verbosity)
6636 print_loop (stderr, loop, 0, verbosity);
6639 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6640 level. */
6642 DEBUG_FUNCTION void
6643 debug_loop_num (unsigned num, int verbosity)
6645 debug_loop (get_loop (num), verbosity);
6648 /* Return true if BB ends with a call, possibly followed by some
6649 instructions that must stay with the call. Return false,
6650 otherwise. */
6652 static bool
6653 gimple_block_ends_with_call_p (basic_block bb)
6655 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6656 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
6660 /* Return true if BB ends with a conditional branch. Return false,
6661 otherwise. */
6663 static bool
6664 gimple_block_ends_with_condjump_p (const_basic_block bb)
6666 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6667 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6671 /* Return true if we need to add fake edge to exit at statement T.
6672 Helper function for gimple_flow_call_edges_add. */
6674 static bool
6675 need_fake_edge_p (gimple t)
6677 tree fndecl = NULL_TREE;
6678 int call_flags = 0;
6680 /* NORETURN and LONGJMP calls already have an edge to exit.
6681 CONST and PURE calls do not need one.
6682 We don't currently check for CONST and PURE here, although
6683 it would be a good idea, because those attributes are
6684 figured out from the RTL in mark_constant_function, and
6685 the counter incrementation code from -fprofile-arcs
6686 leads to different results from -fbranch-probabilities. */
6687 if (is_gimple_call (t))
6689 fndecl = gimple_call_fndecl (t);
6690 call_flags = gimple_call_flags (t);
6693 if (is_gimple_call (t)
6694 && fndecl
6695 && DECL_BUILT_IN (fndecl)
6696 && (call_flags & ECF_NOTHROW)
6697 && !(call_flags & ECF_RETURNS_TWICE)
6698 /* fork() doesn't really return twice, but the effect of
6699 wrapping it in __gcov_fork() which calls __gcov_flush()
6700 and clears the counters before forking has the same
6701 effect as returning twice. Force a fake edge. */
6702 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6703 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6704 return false;
6706 if (is_gimple_call (t)
6707 && !(call_flags & ECF_NORETURN))
6708 return true;
6710 if (gimple_code (t) == GIMPLE_ASM
6711 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6712 return true;
6714 return false;
6718 /* Add fake edges to the function exit for any non constant and non
6719 noreturn calls, volatile inline assembly in the bitmap of blocks
6720 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6721 the number of blocks that were split.
6723 The goal is to expose cases in which entering a basic block does
6724 not imply that all subsequent instructions must be executed. */
6726 static int
6727 gimple_flow_call_edges_add (sbitmap blocks)
6729 int i;
6730 int blocks_split = 0;
6731 int last_bb = last_basic_block;
6732 bool check_last_block = false;
6734 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6735 return 0;
6737 if (! blocks)
6738 check_last_block = true;
6739 else
6740 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6742 /* In the last basic block, before epilogue generation, there will be
6743 a fallthru edge to EXIT. Special care is required if the last insn
6744 of the last basic block is a call because make_edge folds duplicate
6745 edges, which would result in the fallthru edge also being marked
6746 fake, which would result in the fallthru edge being removed by
6747 remove_fake_edges, which would result in an invalid CFG.
6749 Moreover, we can't elide the outgoing fake edge, since the block
6750 profiler needs to take this into account in order to solve the minimal
6751 spanning tree in the case that the call doesn't return.
6753 Handle this by adding a dummy instruction in a new last basic block. */
6754 if (check_last_block)
6756 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6757 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6758 gimple t = NULL;
6760 if (!gsi_end_p (gsi))
6761 t = gsi_stmt (gsi);
6763 if (t && need_fake_edge_p (t))
6765 edge e;
6767 e = find_edge (bb, EXIT_BLOCK_PTR);
6768 if (e)
6770 gsi_insert_on_edge (e, gimple_build_nop ());
6771 gsi_commit_edge_inserts ();
6776 /* Now add fake edges to the function exit for any non constant
6777 calls since there is no way that we can determine if they will
6778 return or not... */
6779 for (i = 0; i < last_bb; i++)
6781 basic_block bb = BASIC_BLOCK (i);
6782 gimple_stmt_iterator gsi;
6783 gimple stmt, last_stmt;
6785 if (!bb)
6786 continue;
6788 if (blocks && !TEST_BIT (blocks, i))
6789 continue;
6791 gsi = gsi_last_nondebug_bb (bb);
6792 if (!gsi_end_p (gsi))
6794 last_stmt = gsi_stmt (gsi);
6797 stmt = gsi_stmt (gsi);
6798 if (need_fake_edge_p (stmt))
6800 edge e;
6802 /* The handling above of the final block before the
6803 epilogue should be enough to verify that there is
6804 no edge to the exit block in CFG already.
6805 Calling make_edge in such case would cause us to
6806 mark that edge as fake and remove it later. */
6807 #ifdef ENABLE_CHECKING
6808 if (stmt == last_stmt)
6810 e = find_edge (bb, EXIT_BLOCK_PTR);
6811 gcc_assert (e == NULL);
6813 #endif
6815 /* Note that the following may create a new basic block
6816 and renumber the existing basic blocks. */
6817 if (stmt != last_stmt)
6819 e = split_block (bb, stmt);
6820 if (e)
6821 blocks_split++;
6823 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6825 gsi_prev (&gsi);
6827 while (!gsi_end_p (gsi));
6831 if (blocks_split)
6832 verify_flow_info ();
6834 return blocks_split;
6837 /* Removes edge E and all the blocks dominated by it, and updates dominance
6838 information. The IL in E->src needs to be updated separately.
6839 If dominance info is not available, only the edge E is removed.*/
6841 void
6842 remove_edge_and_dominated_blocks (edge e)
6844 VEC (basic_block, heap) *bbs_to_remove = NULL;
6845 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6846 bitmap df, df_idom;
6847 edge f;
6848 edge_iterator ei;
6849 bool none_removed = false;
6850 unsigned i;
6851 basic_block bb, dbb;
6852 bitmap_iterator bi;
6854 if (!dom_info_available_p (CDI_DOMINATORS))
6856 remove_edge (e);
6857 return;
6860 /* No updating is needed for edges to exit. */
6861 if (e->dest == EXIT_BLOCK_PTR)
6863 if (cfgcleanup_altered_bbs)
6864 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6865 remove_edge (e);
6866 return;
6869 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6870 that is not dominated by E->dest, then this set is empty. Otherwise,
6871 all the basic blocks dominated by E->dest are removed.
6873 Also, to DF_IDOM we store the immediate dominators of the blocks in
6874 the dominance frontier of E (i.e., of the successors of the
6875 removed blocks, if there are any, and of E->dest otherwise). */
6876 FOR_EACH_EDGE (f, ei, e->dest->preds)
6878 if (f == e)
6879 continue;
6881 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6883 none_removed = true;
6884 break;
6888 df = BITMAP_ALLOC (NULL);
6889 df_idom = BITMAP_ALLOC (NULL);
6891 if (none_removed)
6892 bitmap_set_bit (df_idom,
6893 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6894 else
6896 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
6897 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
6899 FOR_EACH_EDGE (f, ei, bb->succs)
6901 if (f->dest != EXIT_BLOCK_PTR)
6902 bitmap_set_bit (df, f->dest->index);
6905 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
6906 bitmap_clear_bit (df, bb->index);
6908 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6910 bb = BASIC_BLOCK (i);
6911 bitmap_set_bit (df_idom,
6912 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6916 if (cfgcleanup_altered_bbs)
6918 /* Record the set of the altered basic blocks. */
6919 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6920 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6923 /* Remove E and the cancelled blocks. */
6924 if (none_removed)
6925 remove_edge (e);
6926 else
6928 /* Walk backwards so as to get a chance to substitute all
6929 released DEFs into debug stmts. See
6930 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
6931 details. */
6932 for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
6933 delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
6936 /* Update the dominance information. The immediate dominator may change only
6937 for blocks whose immediate dominator belongs to DF_IDOM:
6939 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6940 removal. Let Z the arbitrary block such that idom(Z) = Y and
6941 Z dominates X after the removal. Before removal, there exists a path P
6942 from Y to X that avoids Z. Let F be the last edge on P that is
6943 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6944 dominates W, and because of P, Z does not dominate W), and W belongs to
6945 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6946 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6948 bb = BASIC_BLOCK (i);
6949 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6950 dbb;
6951 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6952 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6955 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6957 BITMAP_FREE (df);
6958 BITMAP_FREE (df_idom);
6959 VEC_free (basic_block, heap, bbs_to_remove);
6960 VEC_free (basic_block, heap, bbs_to_fix_dom);
6963 /* Purge dead EH edges from basic block BB. */
6965 bool
6966 gimple_purge_dead_eh_edges (basic_block bb)
6968 bool changed = false;
6969 edge e;
6970 edge_iterator ei;
6971 gimple stmt = last_stmt (bb);
6973 if (stmt && stmt_can_throw_internal (stmt))
6974 return false;
6976 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6978 if (e->flags & EDGE_EH)
6980 remove_edge_and_dominated_blocks (e);
6981 changed = true;
6983 else
6984 ei_next (&ei);
6987 return changed;
6990 /* Purge dead EH edges from basic block listed in BLOCKS. */
6992 bool
6993 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
6995 bool changed = false;
6996 unsigned i;
6997 bitmap_iterator bi;
6999 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7001 basic_block bb = BASIC_BLOCK (i);
7003 /* Earlier gimple_purge_dead_eh_edges could have removed
7004 this basic block already. */
7005 gcc_assert (bb || changed);
7006 if (bb != NULL)
7007 changed |= gimple_purge_dead_eh_edges (bb);
7010 return changed;
7013 /* Purge dead abnormal call edges from basic block BB. */
7015 bool
7016 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7018 bool changed = false;
7019 edge e;
7020 edge_iterator ei;
7021 gimple stmt = last_stmt (bb);
7023 if (!cfun->has_nonlocal_label)
7024 return false;
7026 if (stmt && stmt_can_make_abnormal_goto (stmt))
7027 return false;
7029 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7031 if (e->flags & EDGE_ABNORMAL)
7033 remove_edge_and_dominated_blocks (e);
7034 changed = true;
7036 else
7037 ei_next (&ei);
7040 return changed;
7043 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7045 bool
7046 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7048 bool changed = false;
7049 unsigned i;
7050 bitmap_iterator bi;
7052 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7054 basic_block bb = BASIC_BLOCK (i);
7056 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7057 this basic block already. */
7058 gcc_assert (bb || changed);
7059 if (bb != NULL)
7060 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7063 return changed;
7066 /* This function is called whenever a new edge is created or
7067 redirected. */
7069 static void
7070 gimple_execute_on_growing_pred (edge e)
7072 basic_block bb = e->dest;
7074 if (!gimple_seq_empty_p (phi_nodes (bb)))
7075 reserve_phi_args_for_new_edge (bb);
7078 /* This function is called immediately before edge E is removed from
7079 the edge vector E->dest->preds. */
7081 static void
7082 gimple_execute_on_shrinking_pred (edge e)
7084 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7085 remove_phi_args (e);
7088 /*---------------------------------------------------------------------------
7089 Helper functions for Loop versioning
7090 ---------------------------------------------------------------------------*/
7092 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7093 of 'first'. Both of them are dominated by 'new_head' basic block. When
7094 'new_head' was created by 'second's incoming edge it received phi arguments
7095 on the edge by split_edge(). Later, additional edge 'e' was created to
7096 connect 'new_head' and 'first'. Now this routine adds phi args on this
7097 additional edge 'e' that new_head to second edge received as part of edge
7098 splitting. */
7100 static void
7101 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7102 basic_block new_head, edge e)
7104 gimple phi1, phi2;
7105 gimple_stmt_iterator psi1, psi2;
7106 tree def;
7107 edge e2 = find_edge (new_head, second);
7109 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7110 edge, we should always have an edge from NEW_HEAD to SECOND. */
7111 gcc_assert (e2 != NULL);
7113 /* Browse all 'second' basic block phi nodes and add phi args to
7114 edge 'e' for 'first' head. PHI args are always in correct order. */
7116 for (psi2 = gsi_start_phis (second),
7117 psi1 = gsi_start_phis (first);
7118 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7119 gsi_next (&psi2), gsi_next (&psi1))
7121 phi1 = gsi_stmt (psi1);
7122 phi2 = gsi_stmt (psi2);
7123 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7124 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7129 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7130 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7131 the destination of the ELSE part. */
7133 static void
7134 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7135 basic_block second_head ATTRIBUTE_UNUSED,
7136 basic_block cond_bb, void *cond_e)
7138 gimple_stmt_iterator gsi;
7139 gimple new_cond_expr;
7140 tree cond_expr = (tree) cond_e;
7141 edge e0;
7143 /* Build new conditional expr */
7144 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7145 NULL_TREE, NULL_TREE);
7147 /* Add new cond in cond_bb. */
7148 gsi = gsi_last_bb (cond_bb);
7149 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7151 /* Adjust edges appropriately to connect new head with first head
7152 as well as second head. */
7153 e0 = single_succ_edge (cond_bb);
7154 e0->flags &= ~EDGE_FALLTHRU;
7155 e0->flags |= EDGE_FALSE_VALUE;
7158 struct cfg_hooks gimple_cfg_hooks = {
7159 "gimple",
7160 gimple_verify_flow_info,
7161 gimple_dump_bb, /* dump_bb */
7162 create_bb, /* create_basic_block */
7163 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7164 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7165 gimple_can_remove_branch_p, /* can_remove_branch_p */
7166 remove_bb, /* delete_basic_block */
7167 gimple_split_block, /* split_block */
7168 gimple_move_block_after, /* move_block_after */
7169 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7170 gimple_merge_blocks, /* merge_blocks */
7171 gimple_predict_edge, /* predict_edge */
7172 gimple_predicted_by_p, /* predicted_by_p */
7173 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7174 gimple_duplicate_bb, /* duplicate_block */
7175 gimple_split_edge, /* split_edge */
7176 gimple_make_forwarder_block, /* make_forward_block */
7177 NULL, /* tidy_fallthru_edge */
7178 NULL, /* force_nonfallthru */
7179 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7180 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7181 gimple_flow_call_edges_add, /* flow_call_edges_add */
7182 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7183 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7184 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7185 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7186 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7187 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7188 flush_pending_stmts /* flush_pending_stmts */
7192 /* Split all critical edges. */
7194 static unsigned int
7195 split_critical_edges (void)
7197 basic_block bb;
7198 edge e;
7199 edge_iterator ei;
7201 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7202 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7203 mappings around the calls to split_edge. */
7204 start_recording_case_labels ();
7205 FOR_ALL_BB (bb)
7207 FOR_EACH_EDGE (e, ei, bb->succs)
7209 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7210 split_edge (e);
7211 /* PRE inserts statements to edges and expects that
7212 since split_critical_edges was done beforehand, committing edge
7213 insertions will not split more edges. In addition to critical
7214 edges we must split edges that have multiple successors and
7215 end by control flow statements, such as RESX.
7216 Go ahead and split them too. This matches the logic in
7217 gimple_find_edge_insert_loc. */
7218 else if ((!single_pred_p (e->dest)
7219 || !gimple_seq_empty_p (phi_nodes (e->dest))
7220 || e->dest == EXIT_BLOCK_PTR)
7221 && e->src != ENTRY_BLOCK_PTR
7222 && !(e->flags & EDGE_ABNORMAL))
7224 gimple_stmt_iterator gsi;
7226 gsi = gsi_last_bb (e->src);
7227 if (!gsi_end_p (gsi)
7228 && stmt_ends_bb_p (gsi_stmt (gsi))
7229 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7230 && !gimple_call_builtin_p (gsi_stmt (gsi),
7231 BUILT_IN_RETURN)))
7232 split_edge (e);
7236 end_recording_case_labels ();
7237 return 0;
7240 struct gimple_opt_pass pass_split_crit_edges =
7243 GIMPLE_PASS,
7244 "crited", /* name */
7245 NULL, /* gate */
7246 split_critical_edges, /* execute */
7247 NULL, /* sub */
7248 NULL, /* next */
7249 0, /* static_pass_number */
7250 TV_TREE_SPLIT_EDGES, /* tv_id */
7251 PROP_cfg, /* properties required */
7252 PROP_no_crit_edges, /* properties_provided */
7253 0, /* properties_destroyed */
7254 0, /* todo_flags_start */
7255 TODO_verify_flow /* todo_flags_finish */
7260 /* Build a ternary operation and gimplify it. Emit code before GSI.
7261 Return the gimple_val holding the result. */
7263 tree
7264 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7265 tree type, tree a, tree b, tree c)
7267 tree ret;
7268 location_t loc = gimple_location (gsi_stmt (*gsi));
7270 ret = fold_build3_loc (loc, code, type, a, b, c);
7271 STRIP_NOPS (ret);
7273 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7274 GSI_SAME_STMT);
7277 /* Build a binary operation and gimplify it. Emit code before GSI.
7278 Return the gimple_val holding the result. */
7280 tree
7281 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7282 tree type, tree a, tree b)
7284 tree ret;
7286 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7287 STRIP_NOPS (ret);
7289 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7290 GSI_SAME_STMT);
7293 /* Build a unary operation and gimplify it. Emit code before GSI.
7294 Return the gimple_val holding the result. */
7296 tree
7297 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7298 tree a)
7300 tree ret;
7302 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7303 STRIP_NOPS (ret);
7305 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7306 GSI_SAME_STMT);
7311 /* Emit return warnings. */
7313 static unsigned int
7314 execute_warn_function_return (void)
7316 source_location location;
7317 gimple last;
7318 edge e;
7319 edge_iterator ei;
7321 /* If we have a path to EXIT, then we do return. */
7322 if (TREE_THIS_VOLATILE (cfun->decl)
7323 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7325 location = UNKNOWN_LOCATION;
7326 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7328 last = last_stmt (e->src);
7329 if ((gimple_code (last) == GIMPLE_RETURN
7330 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
7331 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7332 break;
7334 if (location == UNKNOWN_LOCATION)
7335 location = cfun->function_end_locus;
7336 warning_at (location, 0, "%<noreturn%> function does return");
7339 /* If we see "return;" in some basic block, then we do reach the end
7340 without returning a value. */
7341 else if (warn_return_type
7342 && !TREE_NO_WARNING (cfun->decl)
7343 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7344 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7346 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7348 gimple last = last_stmt (e->src);
7349 if (gimple_code (last) == GIMPLE_RETURN
7350 && gimple_return_retval (last) == NULL
7351 && !gimple_no_warning_p (last))
7353 location = gimple_location (last);
7354 if (location == UNKNOWN_LOCATION)
7355 location = cfun->function_end_locus;
7356 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7357 TREE_NO_WARNING (cfun->decl) = 1;
7358 break;
7362 return 0;
7366 /* Given a basic block B which ends with a conditional and has
7367 precisely two successors, determine which of the edges is taken if
7368 the conditional is true and which is taken if the conditional is
7369 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7371 void
7372 extract_true_false_edges_from_block (basic_block b,
7373 edge *true_edge,
7374 edge *false_edge)
7376 edge e = EDGE_SUCC (b, 0);
7378 if (e->flags & EDGE_TRUE_VALUE)
7380 *true_edge = e;
7381 *false_edge = EDGE_SUCC (b, 1);
7383 else
7385 *false_edge = e;
7386 *true_edge = EDGE_SUCC (b, 1);
7390 struct gimple_opt_pass pass_warn_function_return =
7393 GIMPLE_PASS,
7394 "*warn_function_return", /* name */
7395 NULL, /* gate */
7396 execute_warn_function_return, /* execute */
7397 NULL, /* sub */
7398 NULL, /* next */
7399 0, /* static_pass_number */
7400 TV_NONE, /* tv_id */
7401 PROP_cfg, /* properties_required */
7402 0, /* properties_provided */
7403 0, /* properties_destroyed */
7404 0, /* todo_flags_start */
7405 0 /* todo_flags_finish */
7409 /* Emit noreturn warnings. */
7411 static unsigned int
7412 execute_warn_function_noreturn (void)
7414 if (!TREE_THIS_VOLATILE (current_function_decl)
7415 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
7416 warn_function_noreturn (current_function_decl);
7417 return 0;
7420 static bool
7421 gate_warn_function_noreturn (void)
7423 return warn_suggest_attribute_noreturn;
7426 struct gimple_opt_pass pass_warn_function_noreturn =
7429 GIMPLE_PASS,
7430 "*warn_function_noreturn", /* name */
7431 gate_warn_function_noreturn, /* gate */
7432 execute_warn_function_noreturn, /* execute */
7433 NULL, /* sub */
7434 NULL, /* next */
7435 0, /* static_pass_number */
7436 TV_NONE, /* tv_id */
7437 PROP_cfg, /* properties_required */
7438 0, /* properties_provided */
7439 0, /* properties_destroyed */
7440 0, /* todo_flags_start */
7441 0 /* todo_flags_finish */
7446 /* Walk a gimplified function and warn for functions whose return value is
7447 ignored and attribute((warn_unused_result)) is set. This is done before
7448 inlining, so we don't have to worry about that. */
7450 static void
7451 do_warn_unused_result (gimple_seq seq)
7453 tree fdecl, ftype;
7454 gimple_stmt_iterator i;
7456 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7458 gimple g = gsi_stmt (i);
7460 switch (gimple_code (g))
7462 case GIMPLE_BIND:
7463 do_warn_unused_result (gimple_bind_body (g));
7464 break;
7465 case GIMPLE_TRY:
7466 do_warn_unused_result (gimple_try_eval (g));
7467 do_warn_unused_result (gimple_try_cleanup (g));
7468 break;
7469 case GIMPLE_CATCH:
7470 do_warn_unused_result (gimple_catch_handler (g));
7471 break;
7472 case GIMPLE_EH_FILTER:
7473 do_warn_unused_result (gimple_eh_filter_failure (g));
7474 break;
7476 case GIMPLE_CALL:
7477 if (gimple_call_lhs (g))
7478 break;
7479 if (gimple_call_internal_p (g))
7480 break;
7482 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7483 LHS. All calls whose value is ignored should be
7484 represented like this. Look for the attribute. */
7485 fdecl = gimple_call_fndecl (g);
7486 ftype = gimple_call_fntype (g);
7488 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7490 location_t loc = gimple_location (g);
7492 if (fdecl)
7493 warning_at (loc, OPT_Wunused_result,
7494 "ignoring return value of %qD, "
7495 "declared with attribute warn_unused_result",
7496 fdecl);
7497 else
7498 warning_at (loc, OPT_Wunused_result,
7499 "ignoring return value of function "
7500 "declared with attribute warn_unused_result");
7502 break;
7504 default:
7505 /* Not a container, not a call, or a call whose value is used. */
7506 break;
7511 static unsigned int
7512 run_warn_unused_result (void)
7514 do_warn_unused_result (gimple_body (current_function_decl));
7515 return 0;
7518 static bool
7519 gate_warn_unused_result (void)
7521 return flag_warn_unused_result;
7524 struct gimple_opt_pass pass_warn_unused_result =
7527 GIMPLE_PASS,
7528 "*warn_unused_result", /* name */
7529 gate_warn_unused_result, /* gate */
7530 run_warn_unused_result, /* execute */
7531 NULL, /* sub */
7532 NULL, /* next */
7533 0, /* static_pass_number */
7534 TV_NONE, /* tv_id */
7535 PROP_gimple_any, /* properties_required */
7536 0, /* properties_provided */
7537 0, /* properties_destroyed */
7538 0, /* todo_flags_start */
7539 0, /* todo_flags_finish */