Start tracking cross-basic-block pseudo usage.
[smatch.git] / flow.c
blobd67322a27b2b56db0697d57a14acfb5e9732ba5f
1 /*
2 * Flow - walk the linearized flowgraph, simplifying it as we
3 * go along.
5 * Copyright (C) 2004 Linus Torvalds
6 */
8 #include <string.h>
9 #include <stdarg.h>
10 #include <stdlib.h>
11 #include <stdio.h>
12 #include <stddef.h>
13 #include <assert.h>
15 #include "parse.h"
16 #include "expression.h"
17 #include "linearize.h"
18 #include "flow.h"
20 unsigned long bb_generation;
23 * Dammit, if we have a phi-node followed by a conditional
24 * branch on that phi-node, we should damn well be able to
25 * do something about the source. Maybe.
27 static void rewrite_branch(struct basic_block *bb,
28 struct basic_block **ptr,
29 struct basic_block *old,
30 struct basic_block *new)
32 if (*ptr != old || new == old)
33 return;
35 /* We might find new if-conversions or non-dominating CSEs */
36 repeat_phase |= REPEAT_CSE;
37 *ptr = new;
38 replace_bb_in_list(&bb->children, old, new, 1);
39 remove_bb_from_list(&old->parents, bb, 1);
40 add_bb(&new->parents, bb);
44 * Return the known truth value of a pseudo, or -1 if
45 * it's not known.
47 static int pseudo_truth_value(pseudo_t pseudo)
49 switch (pseudo->type) {
50 case PSEUDO_VAL:
51 return !!pseudo->value;
53 case PSEUDO_REG: {
54 struct instruction *insn = pseudo->def;
55 if (insn->opcode == OP_SETVAL && insn->target == pseudo) {
56 struct expression *expr = insn->val;
58 /* A symbol address is always considered true.. */
59 if (!expr)
60 return 1;
61 if (expr->type == EXPR_VALUE)
62 return !!expr->value;
65 /* Fall through */
66 default:
67 return -1;
72 static void try_to_simplify_bb(struct entrypoint *ep, struct basic_block *bb,
73 struct instruction *first, struct instruction *second)
75 pseudo_t phi;
77 FOR_EACH_PTR(first->phi_list, phi) {
78 struct instruction *def = phi->def;
79 struct basic_block *source = def->bb, *target;
80 pseudo_t pseudo = def->src1;
81 struct instruction *br;
82 int true;
84 if (!pseudo || !source)
85 continue;
86 br = last_instruction(source->insns);
87 if (!br)
88 continue;
89 if (br->opcode != OP_BR)
90 continue;
92 true = pseudo_truth_value(pseudo);
93 if (true < 0)
94 continue;
95 target = true ? second->bb_true : second->bb_false;
96 rewrite_branch(source, &br->bb_true, bb, target);
97 rewrite_branch(source, &br->bb_false, bb, target);
98 } END_FOR_EACH_PTR(phi);
101 static inline int linearize_insn_list(struct instruction_list *list, struct instruction **arr, int nr)
103 return linearize_ptr_list((struct ptr_list *)list, (void **)arr, nr);
106 static void simplify_phi_nodes(struct entrypoint *ep)
108 struct basic_block *bb;
110 FOR_EACH_PTR(ep->bbs, bb) {
111 struct instruction *insns[2], *first, *second;
113 if (linearize_insn_list(bb->insns, insns, 2) < 2)
114 continue;
116 first = insns[0];
117 second = insns[1];
119 if (first->opcode != OP_PHI)
120 continue;
121 if (second->opcode != OP_BR)
122 continue;
123 if (first->target != second->cond)
124 continue;
125 try_to_simplify_bb(ep, bb, first, second);
126 } END_FOR_EACH_PTR(bb);
129 static inline void concat_user_list(struct pseudo_ptr_list *src, struct pseudo_ptr_list **dst)
131 concat_ptr_list((struct ptr_list *)src, (struct ptr_list **)dst);
134 void convert_instruction_target(struct instruction *insn, pseudo_t src)
136 pseudo_t target, *usep;
139 * Go through the "insn->users" list and replace them all..
141 target = insn->target;
142 if (target == src)
143 return;
144 FOR_EACH_PTR(target->users, usep) {
145 if (*usep != VOID) {
146 assert(*usep == target);
147 *usep = src;
149 } END_FOR_EACH_PTR(usep);
150 concat_user_list(target->users, &src->users);
151 target->users = NULL;
154 void convert_load_instruction(struct instruction *insn, pseudo_t src)
156 convert_instruction_target(insn, src);
157 /* Turn the load into a no-op */
158 insn->opcode = OP_LNOP;
159 insn->bb = NULL;
162 static int overlapping_memop(struct instruction *a, struct instruction *b)
164 unsigned int a_start = a->offset << 3;
165 unsigned int b_start = b->offset << 3;
166 unsigned int a_size = a->size;
167 unsigned int b_size = b->size;
169 if (a_size + a_start <= b_start)
170 return 0;
171 if (b_size + b_start <= a_start)
172 return 0;
173 return 1;
176 static inline int same_memop(struct instruction *a, struct instruction *b)
178 return a->offset == b->offset && a->size == b->size;
182 * Return 1 if "one" dominates the access to 'pseudo'
183 * in insn.
185 * Return 0 if it doesn't, and -1 if you don't know.
187 int dominates(pseudo_t pseudo, struct instruction *insn, struct instruction *dom, int local)
189 int opcode = dom->opcode;
191 if (opcode == OP_CALL)
192 return local ? 0 : -1;
193 if (opcode != OP_LOAD && opcode != OP_STORE)
194 return 0;
195 if (dom->src != pseudo) {
196 if (local)
197 return 0;
198 /* We don't think two explicitly different symbols ever alias */
199 if (dom->src->type == PSEUDO_SYM)
200 return 0;
201 /* We could try to do some alias analysis here */
202 return -1;
204 if (!same_memop(insn, dom)) {
205 if (dom->opcode == OP_LOAD)
206 return 0;
207 if (!overlapping_memop(insn, dom))
208 return 0;
209 return -1;
211 return 1;
214 static int find_dominating_parents(pseudo_t pseudo, struct instruction *insn,
215 struct basic_block *bb, unsigned long generation, struct pseudo_list **dominators,
216 int local, int loads)
218 struct basic_block *parent;
220 if (!bb->parents)
221 return !!local;
223 if (bb_list_size(bb->parents) > 1)
224 loads = 0;
225 FOR_EACH_PTR(bb->parents, parent) {
226 struct instruction *one;
227 struct instruction *br;
228 pseudo_t phi;
230 FOR_EACH_PTR_REVERSE(parent->insns, one) {
231 int dominance;
232 if (one == insn)
233 goto no_dominance;
234 dominance = dominates(pseudo, insn, one, local);
235 if (dominance < 0) {
236 if (one->opcode == OP_LOAD)
237 continue;
238 return 0;
240 if (!dominance)
241 continue;
242 if (one->opcode == OP_LOAD && !loads)
243 continue;
244 goto found_dominator;
245 } END_FOR_EACH_PTR_REVERSE(one);
246 no_dominance:
247 if (parent->generation == generation)
248 continue;
249 parent->generation = generation;
251 if (!find_dominating_parents(pseudo, insn, parent, generation, dominators, local, loads))
252 return 0;
253 continue;
255 found_dominator:
256 br = delete_last_instruction(&parent->insns);
257 phi = alloc_phi(parent, one->target, one->size);
258 add_instruction(&parent->insns, br);
259 use_pseudo(phi, add_pseudo(dominators, phi));
260 } END_FOR_EACH_PTR(parent);
261 return 1;
265 * We should probably sort the phi list just to make it easier to compare
266 * later for equality.
268 void rewrite_load_instruction(struct instruction *insn, struct pseudo_list *dominators)
270 pseudo_t new, phi;
273 * Check for somewhat common case of duplicate
274 * phi nodes.
276 new = first_pseudo(dominators)->def->src1;
277 FOR_EACH_PTR(dominators, phi) {
278 if (new != phi->def->src1)
279 goto complex_phi;
280 } END_FOR_EACH_PTR(phi);
283 * All the same pseudo - mark the phi-nodes unused
284 * and convert the load into a LNOP and replace the
285 * pseudo.
287 FOR_EACH_PTR(dominators, phi) {
288 phi->def->bb = NULL;
289 } END_FOR_EACH_PTR(phi);
290 convert_load_instruction(insn, new);
291 return;
293 complex_phi:
294 /* We leave symbol pseudos with a bogus usage list here */
295 if (insn->src->type != PSEUDO_SYM)
296 kill_use(&insn->src);
297 insn->opcode = OP_PHI;
298 insn->phi_list = dominators;
301 static int find_dominating_stores(pseudo_t pseudo, struct instruction *insn,
302 unsigned long generation, int local)
304 struct basic_block *bb = insn->bb;
305 struct instruction *one, *dom = NULL;
306 struct pseudo_list *dominators;
307 int partial;
309 /* Unreachable load? Undo it */
310 if (!bb) {
311 insn->opcode = OP_LNOP;
312 return 1;
315 partial = 0;
316 FOR_EACH_PTR(bb->insns, one) {
317 int dominance;
318 if (one == insn)
319 goto found;
320 dominance = dominates(pseudo, insn, one, local);
321 if (dominance < 0) {
322 /* Ignore partial load dominators */
323 if (one->opcode == OP_LOAD)
324 continue;
325 dom = NULL;
326 partial = 1;
327 continue;
329 if (!dominance)
330 continue;
331 dom = one;
332 partial = 0;
333 } END_FOR_EACH_PTR(one);
334 /* Whaa? */
335 warning(pseudo->sym->pos, "unable to find symbol read");
336 return 0;
337 found:
338 if (partial)
339 return 0;
341 if (dom) {
342 convert_load_instruction(insn, dom->target);
343 return 1;
346 /* Ok, go find the parents */
347 bb->generation = generation;
349 dominators = NULL;
350 if (!find_dominating_parents(pseudo, insn, bb, generation, &dominators, local, 1))
351 return 0;
353 /* This happens with initial assignments to structures etc.. */
354 if (!dominators) {
355 if (!local)
356 return 0;
357 convert_load_instruction(insn, value_pseudo(0));
358 return 1;
362 * If we find just one dominating instruction, we
363 * can turn it into a direct thing. Otherwise we'll
364 * have to turn the load into a phi-node of the
365 * dominators.
367 rewrite_load_instruction(insn, dominators);
368 return 1;
371 static void kill_store(struct instruction *insn)
373 if (insn) {
374 insn->bb = NULL;
375 insn->opcode = OP_SNOP;
376 kill_use(&insn->target);
380 /* Kill a pseudo that is dead on exit from the bb */
381 static void kill_dead_stores(pseudo_t pseudo, unsigned long generation, struct basic_block *bb, int local)
383 struct instruction *insn;
384 struct basic_block *parent;
386 if (bb->generation == generation)
387 return;
388 bb->generation = generation;
389 FOR_EACH_PTR_REVERSE(bb->insns, insn) {
390 int opcode = insn->opcode;
392 if (opcode != OP_LOAD && opcode != OP_STORE) {
393 if (local)
394 continue;
395 if (opcode == OP_CALL)
396 return;
397 continue;
399 if (insn->src == pseudo) {
400 if (opcode == OP_LOAD)
401 return;
402 kill_store(insn);
403 continue;
405 if (local)
406 continue;
407 if (insn->src->type != PSEUDO_SYM)
408 return;
409 } END_FOR_EACH_PTR_REVERSE(insn);
411 FOR_EACH_PTR(bb->parents, parent) {
412 struct basic_block *child;
413 FOR_EACH_PTR(parent->children, child) {
414 if (child && child != bb)
415 return;
416 } END_FOR_EACH_PTR(child);
417 kill_dead_stores(pseudo, generation, parent, local);
418 } END_FOR_EACH_PTR(parent);
422 * This should see if the "insn" trivially dominates some previous store, and kill the
423 * store if unnecessary.
425 static void kill_dominated_stores(pseudo_t pseudo, struct instruction *insn,
426 unsigned long generation, struct basic_block *bb, int local, int found)
428 struct instruction *one;
429 struct basic_block *parent;
431 /* Unreachable store? Undo it */
432 if (!bb) {
433 kill_store(insn);
434 return;
436 if (bb->generation == generation)
437 return;
438 bb->generation = generation;
439 FOR_EACH_PTR_REVERSE(bb->insns, one) {
440 int dominance;
441 if (!found) {
442 if (one != insn)
443 continue;
444 found = 1;
445 continue;
447 dominance = dominates(pseudo, insn, one, local);
448 if (!dominance)
449 continue;
450 if (dominance < 0)
451 return;
452 if (one->opcode == OP_LOAD)
453 return;
454 kill_store(one);
455 } END_FOR_EACH_PTR_REVERSE(one);
457 if (!found) {
458 warning(bb->pos, "Unable to find instruction");
459 return;
462 FOR_EACH_PTR(bb->parents, parent) {
463 struct basic_block *child;
464 FOR_EACH_PTR(parent->children, child) {
465 if (child && child != bb)
466 return;
467 } END_FOR_EACH_PTR(child);
468 kill_dominated_stores(pseudo, insn, generation, parent, local, found);
469 } END_FOR_EACH_PTR(parent);
472 static void simplify_one_symbol(struct entrypoint *ep, struct symbol *sym)
474 pseudo_t pseudo, src, *pp;
475 struct instruction *def;
476 unsigned long mod;
477 int all, stores, complex;
479 /* Never used as a symbol? */
480 pseudo = sym->pseudo;
481 if (!pseudo)
482 return;
484 /* We don't do coverage analysis of volatiles.. */
485 if (sym->ctype.modifiers & MOD_VOLATILE)
486 return;
488 /* ..and symbols with external visibility need more care */
489 mod = sym->ctype.modifiers & (MOD_NONLOCAL | MOD_STATIC | MOD_ADDRESSABLE);
490 if (mod)
491 goto external_visibility;
493 def = NULL;
494 stores = 0;
495 complex = 0;
496 FOR_EACH_PTR(pseudo->users, pp) {
497 /* We know that the symbol-pseudo use is the "src" in the instruction */
498 struct instruction *insn = container(pp, struct instruction, src);
500 switch (insn->opcode) {
501 case OP_STORE:
502 stores++;
503 def = insn;
504 break;
505 case OP_LOAD:
506 break;
507 case OP_SETVAL:
508 if (!insn->bb)
509 continue;
510 mod |= MOD_ADDRESSABLE;
511 goto external_visibility;
512 case OP_NOP:
513 case OP_SNOP:
514 case OP_LNOP:
515 case OP_PHI:
516 continue;
517 default:
518 warning(sym->pos, "symbol '%s' pseudo used in unexpected way", show_ident(sym->ident));
520 complex |= insn->offset;
521 } END_FOR_EACH_PTR(pp);
523 if (complex)
524 goto complex_def;
525 if (stores > 1)
526 goto multi_def;
529 * Goodie, we have a single store (if even that) in the whole
530 * thing. Replace all loads with moves from the pseudo,
531 * replace the store with a def.
533 src = VOID;
534 if (def)
535 src = def->target;
537 FOR_EACH_PTR(pseudo->users, pp) {
538 struct instruction *insn = container(pp, struct instruction, src);
539 if (insn->opcode == OP_LOAD)
540 convert_load_instruction(insn, src);
541 } END_FOR_EACH_PTR(pp);
543 /* Turn the store into a no-op */
544 kill_store(def);
545 return;
547 multi_def:
548 complex_def:
549 external_visibility:
550 all = 1;
551 FOR_EACH_PTR_REVERSE(pseudo->users, pp) {
552 struct instruction *insn = container(pp, struct instruction, src);
553 if (insn->opcode == OP_LOAD)
554 all &= find_dominating_stores(pseudo, insn, ++bb_generation, !mod);
555 } END_FOR_EACH_PTR_REVERSE(pp);
557 /* If we converted all the loads, remove the stores. They are dead */
558 if (all && !mod) {
559 FOR_EACH_PTR(pseudo->users, pp) {
560 struct instruction *insn = container(pp, struct instruction, src);
561 if (insn->opcode == OP_STORE)
562 kill_store(insn);
563 } END_FOR_EACH_PTR(pp);
564 } else {
566 * If we couldn't take the shortcut, see if we can at least kill some
567 * of them..
569 FOR_EACH_PTR(pseudo->users, pp) {
570 struct instruction *insn = container(pp, struct instruction, src);
571 if (insn->opcode == OP_STORE)
572 kill_dominated_stores(pseudo, insn, ++bb_generation, insn->bb, !mod, 0);
573 } END_FOR_EACH_PTR(pp);
575 if (!(mod & (MOD_NONLOCAL | MOD_STATIC))) {
576 struct basic_block *bb;
577 FOR_EACH_PTR(ep->bbs, bb) {
578 if (!bb->children)
579 kill_dead_stores(pseudo, ++bb_generation, bb, !mod);
580 } END_FOR_EACH_PTR(bb);
584 return;
587 void simplify_symbol_usage(struct entrypoint *ep)
589 struct symbol *sym;
591 FOR_EACH_PTR(ep->accesses, sym) {
592 simplify_one_symbol(ep, sym);
593 } END_FOR_EACH_PTR(sym);
596 static void mark_bb_reachable(struct basic_block *bb, unsigned long generation)
598 struct basic_block *child;
600 if (bb->generation == generation)
601 return;
602 bb->generation = generation;
603 FOR_EACH_PTR(bb->children, child) {
604 mark_bb_reachable(child, generation);
605 } END_FOR_EACH_PTR(child);
608 static void kill_defs(struct instruction *insn)
610 pseudo_t target = insn->target;
612 if (!has_use_list(target))
613 return;
614 if (target->def != insn)
615 return;
617 convert_instruction_target(insn, VOID);
620 void kill_bb(struct basic_block *bb)
622 struct instruction *insn;
623 struct basic_block *child, *parent;
625 FOR_EACH_PTR(bb->insns, insn) {
626 kill_instruction(insn);
627 kill_defs(insn);
629 * We kill unreachable instructions even if they
630 * otherwise aren't "killable". Eg volatile loads
631 * etc.
633 insn->bb = NULL;
634 } END_FOR_EACH_PTR(insn);
635 bb->insns = NULL;
637 FOR_EACH_PTR(bb->children, child) {
638 remove_bb_from_list(&child->parents, bb, 0);
639 } END_FOR_EACH_PTR(child);
640 bb->children = NULL;
642 FOR_EACH_PTR(bb->parents, parent) {
643 remove_bb_from_list(&parent->children, bb, 0);
644 } END_FOR_EACH_PTR(parent);
645 bb->parents = NULL;
648 static void kill_unreachable_bbs(struct entrypoint *ep)
650 struct basic_block *bb;
651 unsigned long generation = ++bb_generation;
653 mark_bb_reachable(ep->entry, generation);
654 FOR_EACH_PTR(ep->bbs, bb) {
655 if (bb->generation == generation)
656 continue;
657 /* Mark it as being dead */
658 kill_bb(bb);
659 } END_FOR_EACH_PTR(bb);
662 static int rewrite_parent_branch(struct basic_block *bb, struct basic_block *old, struct basic_block *new)
664 struct instruction *insn = last_instruction(bb->insns);
666 if (!insn)
667 return 0;
669 /* Infinite loops: let's not "optimize" them.. */
670 if (old == new)
671 return 0;
673 switch (insn->opcode) {
674 case OP_BR:
675 rewrite_branch(bb, &insn->bb_true, old, new);
676 rewrite_branch(bb, &insn->bb_false, old, new);
678 /* Conditional branch to same target? */
679 if (insn->bb_true == insn->bb_false) {
680 remove_bb_from_list(&new->parents, bb, 1);
681 remove_bb_from_list(&bb->children, new, 1);
682 insn->bb_false = NULL;
683 kill_use(&insn->cond);
685 return 1;
686 case OP_SWITCH: {
687 struct multijmp *jmp;
688 FOR_EACH_PTR(insn->multijmp_list, jmp) {
689 rewrite_branch(bb, &jmp->target, old, new);
690 } END_FOR_EACH_PTR(jmp);
691 return 1;
693 default:
694 return 0;
698 static struct basic_block * rewrite_branch_bb(struct basic_block *bb, struct instruction *br)
700 struct basic_block *parent;
701 struct basic_block *target = br->bb_true;
702 struct basic_block *false = br->bb_false;
704 if (target && false) {
705 pseudo_t cond = br->cond;
706 if (cond->type != PSEUDO_VAL)
707 return NULL;
708 target = cond->value ? target : false;
712 * We can't do FOR_EACH_PTR() here, because the parent list
713 * may change when we rewrite the parent.
715 while ((parent = first_basic_block(bb->parents)) != NULL) {
716 if (!rewrite_parent_branch(parent, bb, target))
717 return NULL;
719 return target;
722 static void simplify_one_switch(struct basic_block *bb,
723 long long val,
724 struct instruction *insn)
726 struct multijmp *jmp;
728 FOR_EACH_PTR(insn->multijmp_list, jmp) {
729 /* Default case */
730 if (jmp->begin > jmp->end)
731 goto found;
732 if (val >= jmp->begin && val <= jmp->end)
733 goto found;
734 } END_FOR_EACH_PTR(jmp);
735 warning(bb->pos, "Impossible case statement");
736 return;
738 found:
739 insert_branch(bb, insn, jmp->target);
742 static void simplify_switch(struct entrypoint *ep)
744 struct basic_block *bb;
746 FOR_EACH_PTR(ep->bbs, bb) {
747 pseudo_t pseudo;
748 struct instruction *insn = last_instruction(bb->insns);
750 if (!insn || insn->opcode != OP_SWITCH)
751 continue;
752 pseudo = insn->target;
753 if (pseudo->type == PSEUDO_VAL)
754 simplify_one_switch(bb, pseudo->value, insn);
755 } END_FOR_EACH_PTR(bb);
758 static void vrfy_bb_in_list(struct basic_block *bb, struct basic_block_list *list)
760 if (bb) {
761 struct basic_block *tmp;
762 int no_bb_in_list = 0;
764 FOR_EACH_PTR(list, tmp) {
765 if (bb == tmp)
766 return;
767 } END_FOR_EACH_PTR(tmp);
768 assert(no_bb_in_list);
772 static void vrfy_parents(struct basic_block *bb)
774 struct basic_block *tmp;
775 FOR_EACH_PTR(bb->parents, tmp) {
776 vrfy_bb_in_list(bb, tmp->children);
777 } END_FOR_EACH_PTR(tmp);
780 static void vrfy_children(struct basic_block *bb)
782 struct basic_block *tmp;
783 struct instruction *br = last_instruction(bb->insns);
785 if (!br) {
786 assert(!bb->children);
787 return;
789 switch (br->opcode) {
790 struct multijmp *jmp;
791 case OP_BR:
792 vrfy_bb_in_list(br->bb_true, bb->children);
793 vrfy_bb_in_list(br->bb_false, bb->children);
794 break;
795 case OP_SWITCH:
796 case OP_COMPUTEDGOTO:
797 FOR_EACH_PTR(br->multijmp_list, jmp) {
798 vrfy_bb_in_list(jmp->target, bb->children);
799 } END_FOR_EACH_PTR(jmp);
800 break;
801 default:
802 break;
805 FOR_EACH_PTR(bb->children, tmp) {
806 vrfy_bb_in_list(bb, tmp->parents);
807 } END_FOR_EACH_PTR(tmp);
810 static void vrfy_bb_flow(struct basic_block *bb)
812 vrfy_children(bb);
813 vrfy_parents(bb);
816 void vrfy_flow(struct entrypoint *ep)
818 struct basic_block *bb;
819 struct basic_block *entry = ep->entry;
821 FOR_EACH_PTR(ep->bbs, bb) {
822 if (bb == entry)
823 entry = NULL;
824 vrfy_bb_flow(bb);
825 } END_FOR_EACH_PTR(bb);
826 assert(!entry);
829 void simplify_flow(struct entrypoint *ep)
831 simplify_phi_nodes(ep);
832 simplify_switch(ep);
833 kill_unreachable_bbs(ep);
836 void pack_basic_blocks(struct entrypoint *ep)
838 struct basic_block *bb;
840 /* See if we can merge a bb into another one.. */
841 FOR_EACH_PTR(ep->bbs, bb) {
842 struct instruction *first, *insn;
843 struct basic_block *parent, *child, *last;
845 if (!bb_reachable(bb))
846 continue;
849 * Just a branch?
851 FOR_EACH_PTR(bb->insns, first) {
852 if (!first->bb)
853 continue;
854 switch (first->opcode) {
855 case OP_NOP: case OP_LNOP: case OP_SNOP:
856 continue;
857 case OP_BR: {
858 struct basic_block *replace;
859 replace = rewrite_branch_bb(bb, first);
860 if (replace) {
861 if (bb == ep->entry)
862 ep->entry = replace;
863 kill_bb(bb);
864 goto no_merge;
867 /* fallthrough */
868 default:
869 goto out;
871 } END_FOR_EACH_PTR(first);
873 out:
874 if (ep->entry == bb)
875 continue;
878 * See if we only have one parent..
880 last = NULL;
881 FOR_EACH_PTR(bb->parents, parent) {
882 if (last) {
883 if (last != parent)
884 goto no_merge;
885 continue;
887 last = parent;
888 } END_FOR_EACH_PTR(parent);
890 parent = last;
891 if (!parent || parent == bb)
892 continue;
895 * Goodie. See if the parent can merge..
897 FOR_EACH_PTR(parent->children, child) {
898 if (child != bb)
899 goto no_merge;
900 } END_FOR_EACH_PTR(child);
903 * Merge the two.
905 repeat_phase |= REPEAT_CSE;
908 * But don't allow phi-source merges after this.
909 * FIXME, FIXME! I really need to think about this.
910 * Is it true? I think it's ok to merge phi-sources,
911 * as long as we keep their relative position in
912 * the stream. It's the re-ordering we can't have.
913 * I think.
915 merge_phi_sources = 0;
917 parent->children = bb->children;
918 bb->children = NULL;
919 bb->parents = NULL;
921 FOR_EACH_PTR(parent->children, child) {
922 replace_bb_in_list(&child->parents, bb, parent, 0);
923 } END_FOR_EACH_PTR(child);
925 delete_last_instruction(&parent->insns);
926 FOR_EACH_PTR(bb->insns, insn) {
927 if (insn->bb) {
928 assert(insn->bb == bb);
929 insn->bb = parent;
931 add_instruction(&parent->insns, insn);
932 } END_FOR_EACH_PTR(insn);
933 bb->insns = NULL;
935 no_merge:
936 /* nothing to do */;
937 } END_FOR_EACH_PTR(bb);