Split up branch flow simplification a bit in preparation for adding
[smatch.git] / flow.c
blob3e29b6ba9880e2471cf369185e93d47c9a12d443
1 /*
2 * Flow - walk the linearized flowgraph, simplifying it as we
3 * go along.
5 * Copyright (C) 2004 Linus Torvalds
6 */
8 #include <string.h>
9 #include <stdarg.h>
10 #include <stdlib.h>
11 #include <stdio.h>
12 #include <stddef.h>
13 #include <assert.h>
15 #include "parse.h"
16 #include "expression.h"
17 #include "linearize.h"
18 #include "flow.h"
20 unsigned long bb_generation;
23 * Dammit, if we have a phi-node followed by a conditional
24 * branch on that phi-node, we should damn well be able to
25 * do something about the source. Maybe.
27 static int rewrite_branch(struct basic_block *bb,
28 struct basic_block **ptr,
29 struct basic_block *old,
30 struct basic_block *new)
32 if (*ptr != old || new == old)
33 return 0;
35 /* We might find new if-conversions or non-dominating CSEs */
36 repeat_phase |= REPEAT_CSE;
37 *ptr = new;
38 replace_bb_in_list(&bb->children, old, new, 1);
39 remove_bb_from_list(&old->parents, bb, 1);
40 add_bb(&new->parents, bb);
41 return 1;
45 * Return the known truth value of a pseudo, or -1 if
46 * it's not known.
48 static int pseudo_truth_value(pseudo_t pseudo)
50 switch (pseudo->type) {
51 case PSEUDO_VAL:
52 return !!pseudo->value;
54 case PSEUDO_REG: {
55 struct instruction *insn = pseudo->def;
56 if (insn->opcode == OP_SETVAL && insn->target == pseudo) {
57 struct expression *expr = insn->val;
59 /* A symbol address is always considered true.. */
60 if (!expr)
61 return 1;
62 if (expr->type == EXPR_VALUE)
63 return !!expr->value;
66 /* Fall through */
67 default:
68 return -1;
73 * Does a basic block depend on the pseudos that "src" defines?
75 static int bb_depends_on(struct basic_block *target, struct basic_block *src)
77 pseudo_t pseudo;
79 FOR_EACH_PTR(src->defines, pseudo) {
80 if (pseudo_in_list(target->needs, pseudo))
81 return 1;
82 } END_FOR_EACH_PTR(pseudo);
83 return 0;
87 * When we reach here, we have:
88 * - a basic block that ends in a conditional branch and
89 * that has no side effects apart from the pseudos it
90 * may change.
91 * - the phi-node that the conditional branch depends on
92 * - full pseudo liveness information
94 * We need to check if any of the _sources_ of the phi-node
95 * may be constant, and not actually need this block at all.
97 static int try_to_simplify_bb(struct basic_block *bb, struct instruction *first, struct instruction *second)
99 int changed = 0;
100 pseudo_t phi;
102 FOR_EACH_PTR(first->phi_list, phi) {
103 struct instruction *def = phi->def;
104 struct basic_block *source = def->bb, *target;
105 pseudo_t pseudo = def->src1;
106 struct instruction *br;
107 int true;
109 if (!pseudo || !source)
110 continue;
111 br = last_instruction(source->insns);
112 if (!br)
113 continue;
114 if (br->opcode != OP_BR)
115 continue;
116 true = pseudo_truth_value(pseudo);
117 if (true < 0)
118 continue;
119 target = true ? second->bb_true : second->bb_false;
120 if (bb_depends_on(target, bb))
121 continue;
122 changed |= rewrite_branch(source, &br->bb_true, bb, target);
123 changed |= rewrite_branch(source, &br->bb_false, bb, target);
124 } END_FOR_EACH_PTR(phi);
125 return changed;
128 static int bb_has_side_effects(struct basic_block *bb)
130 struct instruction *insn;
131 FOR_EACH_PTR(bb->insns, insn) {
132 switch (insn->opcode) {
133 case OP_CALL:
134 case OP_STORE:
135 case OP_CONTEXT:
136 return 1;
137 default:
138 continue;
140 } END_FOR_EACH_PTR(insn);
141 return 0;
144 static int simplify_phi_branch(struct basic_block *bb, struct instruction *br)
146 pseudo_t cond = br->cond;
147 struct instruction *def;
149 if (!cond || cond->type != PSEUDO_REG)
150 return 0;
151 def = cond->def;
152 if (def->bb != bb || def->opcode != OP_PHI)
153 return 0;
154 if (bb_has_side_effects(bb))
155 return 0;
156 return try_to_simplify_bb(bb, def, br);
159 static int simplify_branch_nodes(struct entrypoint *ep)
161 int changed = 0;
162 struct basic_block *bb;
164 FOR_EACH_PTR(ep->bbs, bb) {
165 struct instruction *br = last_instruction(bb->insns);
167 if (!br || br->opcode != OP_BR || !br->bb_false)
168 continue;
169 changed |= simplify_phi_branch(bb, br);
170 } END_FOR_EACH_PTR(bb);
171 return changed;
175 * This is called late - when we have intra-bb liveness information..
177 int simplify_flow(struct entrypoint *ep)
179 return simplify_branch_nodes(ep);
182 static inline void concat_user_list(struct pseudo_ptr_list *src, struct pseudo_ptr_list **dst)
184 concat_ptr_list((struct ptr_list *)src, (struct ptr_list **)dst);
187 void convert_instruction_target(struct instruction *insn, pseudo_t src)
189 pseudo_t target, *usep;
192 * Go through the "insn->users" list and replace them all..
194 target = insn->target;
195 if (target == src)
196 return;
197 FOR_EACH_PTR(target->users, usep) {
198 if (*usep != VOID) {
199 assert(*usep == target);
200 *usep = src;
202 } END_FOR_EACH_PTR(usep);
203 concat_user_list(target->users, &src->users);
204 target->users = NULL;
207 void convert_load_instruction(struct instruction *insn, pseudo_t src)
209 convert_instruction_target(insn, src);
210 /* Turn the load into a no-op */
211 insn->opcode = OP_LNOP;
212 insn->bb = NULL;
215 static int overlapping_memop(struct instruction *a, struct instruction *b)
217 unsigned int a_start = a->offset << 3;
218 unsigned int b_start = b->offset << 3;
219 unsigned int a_size = a->size;
220 unsigned int b_size = b->size;
222 if (a_size + a_start <= b_start)
223 return 0;
224 if (b_size + b_start <= a_start)
225 return 0;
226 return 1;
229 static inline int same_memop(struct instruction *a, struct instruction *b)
231 return a->offset == b->offset && a->size == b->size;
235 * Return 1 if "one" dominates the access to 'pseudo'
236 * in insn.
238 * Return 0 if it doesn't, and -1 if you don't know.
240 int dominates(pseudo_t pseudo, struct instruction *insn, struct instruction *dom, int local)
242 int opcode = dom->opcode;
244 if (opcode == OP_CALL)
245 return local ? 0 : -1;
246 if (opcode != OP_LOAD && opcode != OP_STORE)
247 return 0;
248 if (dom->src != pseudo) {
249 if (local)
250 return 0;
251 /* We don't think two explicitly different symbols ever alias */
252 if (dom->src->type == PSEUDO_SYM)
253 return 0;
254 /* We could try to do some alias analysis here */
255 return -1;
257 if (!same_memop(insn, dom)) {
258 if (dom->opcode == OP_LOAD)
259 return 0;
260 if (!overlapping_memop(insn, dom))
261 return 0;
262 return -1;
264 return 1;
267 static int find_dominating_parents(pseudo_t pseudo, struct instruction *insn,
268 struct basic_block *bb, unsigned long generation, struct pseudo_list **dominators,
269 int local, int loads)
271 struct basic_block *parent;
273 if (!bb->parents)
274 return !!local;
276 if (bb_list_size(bb->parents) > 1)
277 loads = 0;
278 FOR_EACH_PTR(bb->parents, parent) {
279 struct instruction *one;
280 struct instruction *br;
281 pseudo_t phi;
283 FOR_EACH_PTR_REVERSE(parent->insns, one) {
284 int dominance;
285 if (one == insn)
286 goto no_dominance;
287 dominance = dominates(pseudo, insn, one, local);
288 if (dominance < 0) {
289 if (one->opcode == OP_LOAD)
290 continue;
291 return 0;
293 if (!dominance)
294 continue;
295 if (one->opcode == OP_LOAD && !loads)
296 continue;
297 goto found_dominator;
298 } END_FOR_EACH_PTR_REVERSE(one);
299 no_dominance:
300 if (parent->generation == generation)
301 continue;
302 parent->generation = generation;
304 if (!find_dominating_parents(pseudo, insn, parent, generation, dominators, local, loads))
305 return 0;
306 continue;
308 found_dominator:
309 br = delete_last_instruction(&parent->insns);
310 phi = alloc_phi(parent, one->target, one->size);
311 phi->ident = phi->ident ? : pseudo->ident;
312 add_instruction(&parent->insns, br);
313 use_pseudo(phi, add_pseudo(dominators, phi));
314 } END_FOR_EACH_PTR(parent);
315 return 1;
319 * We should probably sort the phi list just to make it easier to compare
320 * later for equality.
322 void rewrite_load_instruction(struct instruction *insn, struct pseudo_list *dominators)
324 pseudo_t new, phi;
327 * Check for somewhat common case of duplicate
328 * phi nodes.
330 new = first_pseudo(dominators)->def->src1;
331 FOR_EACH_PTR(dominators, phi) {
332 if (new != phi->def->src1)
333 goto complex_phi;
334 new->ident = new->ident ? : phi->ident;
335 } END_FOR_EACH_PTR(phi);
338 * All the same pseudo - mark the phi-nodes unused
339 * and convert the load into a LNOP and replace the
340 * pseudo.
342 FOR_EACH_PTR(dominators, phi) {
343 phi->def->bb = NULL;
344 } END_FOR_EACH_PTR(phi);
345 convert_load_instruction(insn, new);
346 return;
348 complex_phi:
349 /* We leave symbol pseudos with a bogus usage list here */
350 if (insn->src->type != PSEUDO_SYM)
351 kill_use(&insn->src);
352 insn->opcode = OP_PHI;
353 insn->phi_list = dominators;
356 static int find_dominating_stores(pseudo_t pseudo, struct instruction *insn,
357 unsigned long generation, int local)
359 struct basic_block *bb = insn->bb;
360 struct instruction *one, *dom = NULL;
361 struct pseudo_list *dominators;
362 int partial;
364 /* Unreachable load? Undo it */
365 if (!bb) {
366 insn->opcode = OP_LNOP;
367 return 1;
370 partial = 0;
371 FOR_EACH_PTR(bb->insns, one) {
372 int dominance;
373 if (one == insn)
374 goto found;
375 dominance = dominates(pseudo, insn, one, local);
376 if (dominance < 0) {
377 /* Ignore partial load dominators */
378 if (one->opcode == OP_LOAD)
379 continue;
380 dom = NULL;
381 partial = 1;
382 continue;
384 if (!dominance)
385 continue;
386 dom = one;
387 partial = 0;
388 } END_FOR_EACH_PTR(one);
389 /* Whaa? */
390 warning(pseudo->sym->pos, "unable to find symbol read");
391 return 0;
392 found:
393 if (partial)
394 return 0;
396 if (dom) {
397 convert_load_instruction(insn, dom->target);
398 return 1;
401 /* Ok, go find the parents */
402 bb->generation = generation;
404 dominators = NULL;
405 if (!find_dominating_parents(pseudo, insn, bb, generation, &dominators, local, 1))
406 return 0;
408 /* This happens with initial assignments to structures etc.. */
409 if (!dominators) {
410 if (!local)
411 return 0;
412 convert_load_instruction(insn, value_pseudo(0));
413 return 1;
417 * If we find just one dominating instruction, we
418 * can turn it into a direct thing. Otherwise we'll
419 * have to turn the load into a phi-node of the
420 * dominators.
422 rewrite_load_instruction(insn, dominators);
423 return 1;
426 static void kill_store(struct instruction *insn)
428 if (insn) {
429 insn->bb = NULL;
430 insn->opcode = OP_SNOP;
431 kill_use(&insn->target);
435 /* Kill a pseudo that is dead on exit from the bb */
436 static void kill_dead_stores(pseudo_t pseudo, unsigned long generation, struct basic_block *bb, int local)
438 struct instruction *insn;
439 struct basic_block *parent;
441 if (bb->generation == generation)
442 return;
443 bb->generation = generation;
444 FOR_EACH_PTR_REVERSE(bb->insns, insn) {
445 int opcode = insn->opcode;
447 if (opcode != OP_LOAD && opcode != OP_STORE) {
448 if (local)
449 continue;
450 if (opcode == OP_CALL)
451 return;
452 continue;
454 if (insn->src == pseudo) {
455 if (opcode == OP_LOAD)
456 return;
457 kill_store(insn);
458 continue;
460 if (local)
461 continue;
462 if (insn->src->type != PSEUDO_SYM)
463 return;
464 } END_FOR_EACH_PTR_REVERSE(insn);
466 FOR_EACH_PTR(bb->parents, parent) {
467 struct basic_block *child;
468 FOR_EACH_PTR(parent->children, child) {
469 if (child && child != bb)
470 return;
471 } END_FOR_EACH_PTR(child);
472 kill_dead_stores(pseudo, generation, parent, local);
473 } END_FOR_EACH_PTR(parent);
477 * This should see if the "insn" trivially dominates some previous store, and kill the
478 * store if unnecessary.
480 static void kill_dominated_stores(pseudo_t pseudo, struct instruction *insn,
481 unsigned long generation, struct basic_block *bb, int local, int found)
483 struct instruction *one;
484 struct basic_block *parent;
486 /* Unreachable store? Undo it */
487 if (!bb) {
488 kill_store(insn);
489 return;
491 if (bb->generation == generation)
492 return;
493 bb->generation = generation;
494 FOR_EACH_PTR_REVERSE(bb->insns, one) {
495 int dominance;
496 if (!found) {
497 if (one != insn)
498 continue;
499 found = 1;
500 continue;
502 dominance = dominates(pseudo, insn, one, local);
503 if (!dominance)
504 continue;
505 if (dominance < 0)
506 return;
507 if (one->opcode == OP_LOAD)
508 return;
509 kill_store(one);
510 } END_FOR_EACH_PTR_REVERSE(one);
512 if (!found) {
513 warning(bb->pos, "Unable to find instruction");
514 return;
517 FOR_EACH_PTR(bb->parents, parent) {
518 struct basic_block *child;
519 FOR_EACH_PTR(parent->children, child) {
520 if (child && child != bb)
521 return;
522 } END_FOR_EACH_PTR(child);
523 kill_dominated_stores(pseudo, insn, generation, parent, local, found);
524 } END_FOR_EACH_PTR(parent);
527 static void simplify_one_symbol(struct entrypoint *ep, struct symbol *sym)
529 pseudo_t pseudo, src, *pp;
530 struct instruction *def;
531 unsigned long mod;
532 int all, stores, complex;
534 /* Never used as a symbol? */
535 pseudo = sym->pseudo;
536 if (!pseudo)
537 return;
539 /* We don't do coverage analysis of volatiles.. */
540 if (sym->ctype.modifiers & MOD_VOLATILE)
541 return;
543 /* ..and symbols with external visibility need more care */
544 mod = sym->ctype.modifiers & (MOD_NONLOCAL | MOD_STATIC | MOD_ADDRESSABLE);
545 if (mod)
546 goto external_visibility;
548 def = NULL;
549 stores = 0;
550 complex = 0;
551 FOR_EACH_PTR(pseudo->users, pp) {
552 /* We know that the symbol-pseudo use is the "src" in the instruction */
553 struct instruction *insn = container(pp, struct instruction, src);
555 switch (insn->opcode) {
556 case OP_STORE:
557 stores++;
558 def = insn;
559 break;
560 case OP_LOAD:
561 break;
562 case OP_SETVAL:
563 if (!insn->bb)
564 continue;
565 mod |= MOD_ADDRESSABLE;
566 goto external_visibility;
567 case OP_NOP:
568 case OP_SNOP:
569 case OP_LNOP:
570 case OP_PHI:
571 continue;
572 default:
573 warning(sym->pos, "symbol '%s' pseudo used in unexpected way", show_ident(sym->ident));
575 complex |= insn->offset;
576 } END_FOR_EACH_PTR(pp);
578 if (complex)
579 goto complex_def;
580 if (stores > 1)
581 goto multi_def;
584 * Goodie, we have a single store (if even that) in the whole
585 * thing. Replace all loads with moves from the pseudo,
586 * replace the store with a def.
588 src = VOID;
589 if (def)
590 src = def->target;
592 FOR_EACH_PTR(pseudo->users, pp) {
593 struct instruction *insn = container(pp, struct instruction, src);
594 if (insn->opcode == OP_LOAD)
595 convert_load_instruction(insn, src);
596 } END_FOR_EACH_PTR(pp);
598 /* Turn the store into a no-op */
599 kill_store(def);
600 return;
602 multi_def:
603 complex_def:
604 external_visibility:
605 all = 1;
606 FOR_EACH_PTR_REVERSE(pseudo->users, pp) {
607 struct instruction *insn = container(pp, struct instruction, src);
608 if (insn->opcode == OP_LOAD)
609 all &= find_dominating_stores(pseudo, insn, ++bb_generation, !mod);
610 } END_FOR_EACH_PTR_REVERSE(pp);
612 /* If we converted all the loads, remove the stores. They are dead */
613 if (all && !mod) {
614 FOR_EACH_PTR(pseudo->users, pp) {
615 struct instruction *insn = container(pp, struct instruction, src);
616 if (insn->opcode == OP_STORE)
617 kill_store(insn);
618 } END_FOR_EACH_PTR(pp);
619 } else {
621 * If we couldn't take the shortcut, see if we can at least kill some
622 * of them..
624 FOR_EACH_PTR(pseudo->users, pp) {
625 struct instruction *insn = container(pp, struct instruction, src);
626 if (insn->opcode == OP_STORE)
627 kill_dominated_stores(pseudo, insn, ++bb_generation, insn->bb, !mod, 0);
628 } END_FOR_EACH_PTR(pp);
630 if (!(mod & (MOD_NONLOCAL | MOD_STATIC))) {
631 struct basic_block *bb;
632 FOR_EACH_PTR(ep->bbs, bb) {
633 if (!bb->children)
634 kill_dead_stores(pseudo, ++bb_generation, bb, !mod);
635 } END_FOR_EACH_PTR(bb);
639 return;
642 void simplify_symbol_usage(struct entrypoint *ep)
644 struct symbol *sym;
646 FOR_EACH_PTR(ep->accesses, sym) {
647 simplify_one_symbol(ep, sym);
648 } END_FOR_EACH_PTR(sym);
651 static void mark_bb_reachable(struct basic_block *bb, unsigned long generation)
653 struct basic_block *child;
655 if (bb->generation == generation)
656 return;
657 bb->generation = generation;
658 FOR_EACH_PTR(bb->children, child) {
659 mark_bb_reachable(child, generation);
660 } END_FOR_EACH_PTR(child);
663 static void kill_defs(struct instruction *insn)
665 pseudo_t target = insn->target;
667 if (!has_use_list(target))
668 return;
669 if (target->def != insn)
670 return;
672 convert_instruction_target(insn, VOID);
675 void kill_bb(struct basic_block *bb)
677 struct instruction *insn;
678 struct basic_block *child, *parent;
680 FOR_EACH_PTR(bb->insns, insn) {
681 kill_instruction(insn);
682 kill_defs(insn);
684 * We kill unreachable instructions even if they
685 * otherwise aren't "killable". Eg volatile loads
686 * etc.
688 insn->bb = NULL;
689 } END_FOR_EACH_PTR(insn);
690 bb->insns = NULL;
692 FOR_EACH_PTR(bb->children, child) {
693 remove_bb_from_list(&child->parents, bb, 0);
694 } END_FOR_EACH_PTR(child);
695 bb->children = NULL;
697 FOR_EACH_PTR(bb->parents, parent) {
698 remove_bb_from_list(&parent->children, bb, 0);
699 } END_FOR_EACH_PTR(parent);
700 bb->parents = NULL;
703 void kill_unreachable_bbs(struct entrypoint *ep)
705 struct basic_block *bb;
706 unsigned long generation = ++bb_generation;
708 mark_bb_reachable(ep->entry, generation);
709 FOR_EACH_PTR(ep->bbs, bb) {
710 if (bb->generation == generation)
711 continue;
712 /* Mark it as being dead */
713 kill_bb(bb);
714 } END_FOR_EACH_PTR(bb);
717 static int rewrite_parent_branch(struct basic_block *bb, struct basic_block *old, struct basic_block *new)
719 struct instruction *insn = last_instruction(bb->insns);
721 if (!insn)
722 return 0;
724 /* Infinite loops: let's not "optimize" them.. */
725 if (old == new)
726 return 0;
728 switch (insn->opcode) {
729 case OP_BR:
730 rewrite_branch(bb, &insn->bb_true, old, new);
731 rewrite_branch(bb, &insn->bb_false, old, new);
733 /* Conditional branch to same target? */
734 if (insn->bb_true == insn->bb_false) {
735 remove_bb_from_list(&new->parents, bb, 1);
736 remove_bb_from_list(&bb->children, new, 1);
737 insn->bb_false = NULL;
738 kill_use(&insn->cond);
740 return 1;
741 case OP_SWITCH: {
742 struct multijmp *jmp;
743 FOR_EACH_PTR(insn->multijmp_list, jmp) {
744 rewrite_branch(bb, &jmp->target, old, new);
745 } END_FOR_EACH_PTR(jmp);
746 return 1;
748 default:
749 return 0;
753 static struct basic_block * rewrite_branch_bb(struct basic_block *bb, struct instruction *br)
755 struct basic_block *parent;
756 struct basic_block *target = br->bb_true;
757 struct basic_block *false = br->bb_false;
759 if (target && false) {
760 pseudo_t cond = br->cond;
761 if (cond->type != PSEUDO_VAL)
762 return NULL;
763 target = cond->value ? target : false;
767 * We can't do FOR_EACH_PTR() here, because the parent list
768 * may change when we rewrite the parent.
770 while ((parent = first_basic_block(bb->parents)) != NULL) {
771 if (!rewrite_parent_branch(parent, bb, target))
772 return NULL;
774 return target;
777 static void vrfy_bb_in_list(struct basic_block *bb, struct basic_block_list *list)
779 if (bb) {
780 struct basic_block *tmp;
781 int no_bb_in_list = 0;
783 FOR_EACH_PTR(list, tmp) {
784 if (bb == tmp)
785 return;
786 } END_FOR_EACH_PTR(tmp);
787 assert(no_bb_in_list);
791 static void vrfy_parents(struct basic_block *bb)
793 struct basic_block *tmp;
794 FOR_EACH_PTR(bb->parents, tmp) {
795 vrfy_bb_in_list(bb, tmp->children);
796 } END_FOR_EACH_PTR(tmp);
799 static void vrfy_children(struct basic_block *bb)
801 struct basic_block *tmp;
802 struct instruction *br = last_instruction(bb->insns);
804 if (!br) {
805 assert(!bb->children);
806 return;
808 switch (br->opcode) {
809 struct multijmp *jmp;
810 case OP_BR:
811 vrfy_bb_in_list(br->bb_true, bb->children);
812 vrfy_bb_in_list(br->bb_false, bb->children);
813 break;
814 case OP_SWITCH:
815 case OP_COMPUTEDGOTO:
816 FOR_EACH_PTR(br->multijmp_list, jmp) {
817 vrfy_bb_in_list(jmp->target, bb->children);
818 } END_FOR_EACH_PTR(jmp);
819 break;
820 default:
821 break;
824 FOR_EACH_PTR(bb->children, tmp) {
825 vrfy_bb_in_list(bb, tmp->parents);
826 } END_FOR_EACH_PTR(tmp);
829 static void vrfy_bb_flow(struct basic_block *bb)
831 vrfy_children(bb);
832 vrfy_parents(bb);
835 void vrfy_flow(struct entrypoint *ep)
837 struct basic_block *bb;
838 struct basic_block *entry = ep->entry;
840 FOR_EACH_PTR(ep->bbs, bb) {
841 if (bb == entry)
842 entry = NULL;
843 vrfy_bb_flow(bb);
844 } END_FOR_EACH_PTR(bb);
845 assert(!entry);
848 void pack_basic_blocks(struct entrypoint *ep)
850 struct basic_block *bb;
852 /* See if we can merge a bb into another one.. */
853 FOR_EACH_PTR(ep->bbs, bb) {
854 struct instruction *first, *insn;
855 struct basic_block *parent, *child, *last;
857 if (!bb_reachable(bb))
858 continue;
861 * Just a branch?
863 FOR_EACH_PTR(bb->insns, first) {
864 if (!first->bb)
865 continue;
866 switch (first->opcode) {
867 case OP_NOP: case OP_LNOP: case OP_SNOP:
868 continue;
869 case OP_BR: {
870 struct basic_block *replace;
871 replace = rewrite_branch_bb(bb, first);
872 if (replace) {
873 if (bb == ep->entry)
874 ep->entry = replace;
875 kill_bb(bb);
876 goto no_merge;
879 /* fallthrough */
880 default:
881 goto out;
883 } END_FOR_EACH_PTR(first);
885 out:
886 if (ep->entry == bb)
887 continue;
890 * See if we only have one parent..
892 last = NULL;
893 FOR_EACH_PTR(bb->parents, parent) {
894 if (last) {
895 if (last != parent)
896 goto no_merge;
897 continue;
899 last = parent;
900 } END_FOR_EACH_PTR(parent);
902 parent = last;
903 if (!parent || parent == bb)
904 continue;
907 * Goodie. See if the parent can merge..
909 FOR_EACH_PTR(parent->children, child) {
910 if (child != bb)
911 goto no_merge;
912 } END_FOR_EACH_PTR(child);
915 * Merge the two.
917 repeat_phase |= REPEAT_CSE;
920 * But don't allow phi-source merges after this.
921 * FIXME, FIXME! I really need to think about this.
922 * Is it true? I think it's ok to merge phi-sources,
923 * as long as we keep their relative position in
924 * the stream. It's the re-ordering we can't have.
925 * I think.
927 merge_phi_sources = 0;
929 parent->children = bb->children;
930 bb->children = NULL;
931 bb->parents = NULL;
933 FOR_EACH_PTR(parent->children, child) {
934 replace_bb_in_list(&child->parents, bb, parent, 0);
935 } END_FOR_EACH_PTR(child);
937 delete_last_instruction(&parent->insns);
938 FOR_EACH_PTR(bb->insns, insn) {
939 if (insn->bb) {
940 assert(insn->bb == bb);
941 insn->bb = parent;
943 add_instruction(&parent->insns, insn);
944 } END_FOR_EACH_PTR(insn);
945 bb->insns = NULL;
947 no_merge:
948 /* nothing to do */;
949 } END_FOR_EACH_PTR(bb);