2 * Flow - walk the linearized flowgraph, simplifying it as we
5 * Copyright (C) 2004 Linus Torvalds
16 #include "expression.h"
17 #include "linearize.h"
21 unsigned long bb_generation
;
24 * Dammit, if we have a phi-node followed by a conditional
25 * branch on that phi-node, we should damn well be able to
26 * do something about the source. Maybe.
28 static int rewrite_branch(struct basic_block
*bb
,
29 struct basic_block
**ptr
,
30 struct basic_block
*old
,
31 struct basic_block
*new)
33 if (*ptr
!= old
|| new == old
)
36 /* We might find new if-conversions or non-dominating CSEs */
37 repeat_phase
|= REPEAT_CSE
;
39 replace_bb_in_list(&bb
->children
, old
, new, 1);
40 remove_bb_from_list(&old
->parents
, bb
, 1);
41 add_bb(&new->parents
, bb
);
46 * Return the known truth value of a pseudo, or -1 if
49 static int pseudo_truth_value(pseudo_t pseudo
)
51 switch (pseudo
->type
) {
53 return !!pseudo
->value
;
56 struct instruction
*insn
= pseudo
->def
;
58 /* A symbol address is always considered true.. */
59 if (insn
->opcode
== OP_SYMADDR
&& insn
->target
== pseudo
)
69 * Does a basic block depend on the pseudos that "src" defines?
71 static int bb_depends_on(struct basic_block
*target
, struct basic_block
*src
)
75 FOR_EACH_PTR(src
->defines
, pseudo
) {
76 if (pseudo_in_list(target
->needs
, pseudo
))
78 } END_FOR_EACH_PTR(pseudo
);
83 * Return 1 if 'pseudo' is needed in some parent of 'bb'.
86 static int needed_phisrc(struct instruction
*phi
, struct basic_block
*curr
, unsigned long generation
)
88 pseudo_t target
= phi
->target
;
89 struct basic_block
*bb
;
91 curr
->generation
= generation
;
92 FOR_EACH_PTR(curr
->children
, bb
) {
93 if (bb
->generation
== generation
)
97 if (pseudo_in_list(bb
->defines
, target
)) {
100 if (pseudo_in_list(bb
->needs
, target
))
102 if (needed_phisrc(phi
, bb
, generation
))
105 } END_FOR_EACH_PTR(bb
);
111 * When we reach here, we have:
112 * - a basic block that ends in a conditional branch and
113 * that has no side effects apart from the pseudos it
115 * - the phi-node that the conditional branch depends on
116 * - full pseudo liveness information
118 * We need to check if any of the _sources_ of the phi-node
119 * may be constant, and not actually need this block at all.
121 static int try_to_simplify_bb(struct basic_block
*bb
, struct instruction
*first
, struct instruction
*second
)
126 FOR_EACH_PTR(first
->phi_list
, phi
) {
127 struct instruction
*def
= phi
->def
;
128 struct basic_block
*source
, *target
;
130 struct instruction
*br
;
137 if (!pseudo
|| !source
)
139 br
= last_instruction(source
->insns
);
142 if (br
->opcode
!= OP_CBR
&& br
->opcode
!= OP_BR
)
144 true = pseudo_truth_value(pseudo
);
147 target
= true ? second
->bb_true
: second
->bb_false
;
148 if (bb_depends_on(target
, bb
))
150 changed
|= rewrite_branch(source
, &br
->bb_true
, bb
, target
);
151 changed
|= rewrite_branch(source
, &br
->bb_false
, bb
, target
);
152 if (changed
&& !needed_phisrc(first
, source
, ++bb_generation
))
153 kill_use(THIS_ADDRESS(phi
));
154 } END_FOR_EACH_PTR(phi
);
158 static int bb_has_side_effects(struct basic_block
*bb
)
160 struct instruction
*insn
;
161 FOR_EACH_PTR(bb
->insns
, insn
) {
162 switch (insn
->opcode
) {
164 /* FIXME! This should take "const" etc into account */
172 /* FIXME! This should take "volatile" etc into account */
178 } END_FOR_EACH_PTR(insn
);
182 static int simplify_phi_branch(struct basic_block
*bb
, struct instruction
*br
)
184 pseudo_t cond
= br
->cond
;
185 struct instruction
*def
;
187 if (cond
->type
!= PSEUDO_REG
)
190 if (def
->bb
!= bb
|| def
->opcode
!= OP_PHI
)
192 if (bb_has_side_effects(bb
))
194 return try_to_simplify_bb(bb
, def
, br
);
197 static int simplify_branch_branch(struct basic_block
*bb
, struct instruction
*br
,
198 struct basic_block
**target_p
, int true)
200 struct basic_block
*target
= *target_p
, *final
;
201 struct instruction
*insn
;
206 insn
= last_instruction(target
->insns
);
207 if (!insn
|| insn
->opcode
!= OP_CBR
|| insn
->cond
!= br
->cond
)
210 * Ahhah! We've found a branch to a branch on the same conditional!
211 * Now we just need to see if we can rewrite the branch..
214 final
= true ? insn
->bb_true
: insn
->bb_false
;
215 if (bb_has_side_effects(target
))
216 goto try_to_rewrite_target
;
217 if (bb_depends_on(final
, target
))
218 goto try_to_rewrite_target
;
219 return rewrite_branch(bb
, target_p
, target
, final
);
221 try_to_rewrite_target
:
223 * If we're the only parent, at least we can rewrite the
224 * now-known second branch.
226 if (bb_list_size(target
->parents
) != 1)
228 insert_branch(target
, insn
, final
);
229 kill_instruction(insn
);
233 static int simplify_one_branch(struct basic_block
*bb
, struct instruction
*br
)
235 if (simplify_phi_branch(bb
, br
))
237 return simplify_branch_branch(bb
, br
, &br
->bb_true
, 1) |
238 simplify_branch_branch(bb
, br
, &br
->bb_false
, 0);
241 static int simplify_branch_nodes(struct entrypoint
*ep
)
244 struct basic_block
*bb
;
246 FOR_EACH_PTR(ep
->bbs
, bb
) {
247 struct instruction
*br
= last_instruction(bb
->insns
);
249 if (!br
|| br
->opcode
!= OP_CBR
)
251 changed
|= simplify_one_branch(bb
, br
);
252 } END_FOR_EACH_PTR(bb
);
257 * This is called late - when we have intra-bb liveness information..
259 int simplify_flow(struct entrypoint
*ep
)
261 return simplify_branch_nodes(ep
);
264 static inline void concat_user_list(struct pseudo_user_list
*src
, struct pseudo_user_list
**dst
)
266 concat_ptr_list((struct ptr_list
*)src
, (struct ptr_list
**)dst
);
269 void convert_instruction_target(struct instruction
*insn
, pseudo_t src
)
272 struct pseudo_user
*pu
;
274 * Go through the "insn->users" list and replace them all..
276 target
= insn
->target
;
279 FOR_EACH_PTR(target
->users
, pu
) {
280 if (*pu
->userp
!= VOID
) {
281 assert(*pu
->userp
== target
);
284 } END_FOR_EACH_PTR(pu
);
285 if (has_use_list(src
))
286 concat_user_list(target
->users
, &src
->users
);
287 target
->users
= NULL
;
290 void convert_load_instruction(struct instruction
*insn
, pseudo_t src
)
292 convert_instruction_target(insn
, src
);
293 /* Turn the load into a no-op */
294 insn
->opcode
= OP_LNOP
;
298 static int overlapping_memop(struct instruction
*a
, struct instruction
*b
)
300 unsigned int a_start
= bytes_to_bits(a
->offset
);
301 unsigned int b_start
= bytes_to_bits(b
->offset
);
302 unsigned int a_size
= a
->size
;
303 unsigned int b_size
= b
->size
;
305 if (a_size
+ a_start
<= b_start
)
307 if (b_size
+ b_start
<= a_start
)
312 static inline int same_memop(struct instruction
*a
, struct instruction
*b
)
314 return a
->offset
== b
->offset
&& a
->size
== b
->size
;
318 * Return 1 if "dom" dominates the access to "pseudo"
321 * Return 0 if it doesn't, and -1 if you don't know.
323 int dominates(pseudo_t pseudo
, struct instruction
*insn
, struct instruction
*dom
, int local
)
325 int opcode
= dom
->opcode
;
327 if (opcode
== OP_CALL
|| opcode
== OP_ENTRY
)
328 return local
? 0 : -1;
329 if (opcode
!= OP_LOAD
&& opcode
!= OP_STORE
)
331 if (dom
->src
!= pseudo
) {
334 /* We don't think two explicitly different symbols ever alias */
335 if (dom
->src
->type
== PSEUDO_SYM
)
337 /* We could try to do some alias analysis here */
340 if (!same_memop(insn
, dom
)) {
341 if (dom
->opcode
== OP_LOAD
)
343 if (!overlapping_memop(insn
, dom
))
350 static int phisrc_in_bb(struct pseudo_list
*list
, struct basic_block
*bb
)
353 FOR_EACH_PTR(list
, p
) {
354 if (p
->def
->bb
== bb
)
356 } END_FOR_EACH_PTR(p
);
361 static int find_dominating_parents(pseudo_t pseudo
, struct instruction
*insn
,
362 struct basic_block
*bb
, unsigned long generation
, struct pseudo_list
**dominators
,
365 struct basic_block
*parent
;
370 FOR_EACH_PTR(bb
->parents
, parent
) {
371 struct instruction
*one
;
372 struct instruction
*br
;
375 FOR_EACH_PTR_REVERSE(parent
->insns
, one
) {
379 dominance
= dominates(pseudo
, insn
, one
, local
);
381 if (one
->opcode
== OP_LOAD
)
387 goto found_dominator
;
388 } END_FOR_EACH_PTR_REVERSE(one
);
390 if (parent
->generation
== generation
)
392 parent
->generation
= generation
;
394 if (!find_dominating_parents(pseudo
, insn
, parent
, generation
, dominators
, local
))
399 if (dominators
&& phisrc_in_bb(*dominators
, parent
))
401 br
= delete_last_instruction(&parent
->insns
);
402 phi
= alloc_phi(parent
, one
->target
, one
->size
);
403 phi
->ident
= phi
->ident
? : pseudo
->ident
;
404 add_instruction(&parent
->insns
, br
);
405 use_pseudo(insn
, phi
, add_pseudo(dominators
, phi
));
406 } END_FOR_EACH_PTR(parent
);
411 * We should probably sort the phi list just to make it easier to compare
412 * later for equality.
414 void rewrite_load_instruction(struct instruction
*insn
, struct pseudo_list
*dominators
)
419 * Check for somewhat common case of duplicate
422 new = first_pseudo(dominators
)->def
->src1
;
423 FOR_EACH_PTR(dominators
, phi
) {
424 if (new != phi
->def
->src1
)
426 new->ident
= new->ident
? : phi
->ident
;
427 } END_FOR_EACH_PTR(phi
);
430 * All the same pseudo - mark the phi-nodes unused
431 * and convert the load into a LNOP and replace the
434 FOR_EACH_PTR(dominators
, phi
) {
435 kill_instruction(phi
->def
);
436 } END_FOR_EACH_PTR(phi
);
437 convert_load_instruction(insn
, new);
441 /* We leave symbol pseudos with a bogus usage list here */
442 if (insn
->src
->type
!= PSEUDO_SYM
)
443 kill_use(&insn
->src
);
444 insn
->opcode
= OP_PHI
;
445 insn
->phi_list
= dominators
;
448 static int find_dominating_stores(pseudo_t pseudo
, struct instruction
*insn
,
449 unsigned long generation
, int local
)
451 struct basic_block
*bb
= insn
->bb
;
452 struct instruction
*one
, *dom
= NULL
;
453 struct pseudo_list
*dominators
;
456 /* Unreachable load? Undo it */
458 insn
->opcode
= OP_LNOP
;
463 FOR_EACH_PTR(bb
->insns
, one
) {
467 dominance
= dominates(pseudo
, insn
, one
, local
);
469 /* Ignore partial load dominators */
470 if (one
->opcode
== OP_LOAD
)
480 } END_FOR_EACH_PTR(one
);
482 warning(pseudo
->sym
->pos
, "unable to find symbol read");
489 convert_load_instruction(insn
, dom
->target
);
493 /* OK, go find the parents */
494 bb
->generation
= generation
;
497 if (!find_dominating_parents(pseudo
, insn
, bb
, generation
, &dominators
, local
))
500 /* This happens with initial assignments to structures etc.. */
505 convert_load_instruction(insn
, value_pseudo(0));
510 * If we find just one dominating instruction, we
511 * can turn it into a direct thing. Otherwise we'll
512 * have to turn the load into a phi-node of the
515 rewrite_load_instruction(insn
, dominators
);
519 static void kill_store(struct instruction
*insn
)
523 insn
->opcode
= OP_SNOP
;
524 kill_use(&insn
->target
);
528 /* Kill a pseudo that is dead on exit from the bb */
529 static void kill_dead_stores(pseudo_t pseudo
, unsigned long generation
, struct basic_block
*bb
, int local
)
531 struct instruction
*insn
;
532 struct basic_block
*parent
;
534 if (bb
->generation
== generation
)
536 bb
->generation
= generation
;
537 FOR_EACH_PTR_REVERSE(bb
->insns
, insn
) {
538 int opcode
= insn
->opcode
;
540 if (opcode
!= OP_LOAD
&& opcode
!= OP_STORE
) {
543 if (opcode
== OP_CALL
)
547 if (insn
->src
== pseudo
) {
548 if (opcode
== OP_LOAD
)
555 if (insn
->src
->type
!= PSEUDO_SYM
)
557 } END_FOR_EACH_PTR_REVERSE(insn
);
559 FOR_EACH_PTR(bb
->parents
, parent
) {
560 struct basic_block
*child
;
561 FOR_EACH_PTR(parent
->children
, child
) {
562 if (child
&& child
!= bb
)
564 } END_FOR_EACH_PTR(child
);
565 kill_dead_stores(pseudo
, generation
, parent
, local
);
566 } END_FOR_EACH_PTR(parent
);
570 * This should see if the "insn" trivially dominates some previous store, and kill the
571 * store if unnecessary.
573 static void kill_dominated_stores(pseudo_t pseudo
, struct instruction
*insn
,
574 unsigned long generation
, struct basic_block
*bb
, int local
, int found
)
576 struct instruction
*one
;
577 struct basic_block
*parent
;
579 /* Unreachable store? Undo it */
584 if (bb
->generation
== generation
)
586 bb
->generation
= generation
;
587 FOR_EACH_PTR_REVERSE(bb
->insns
, one
) {
595 dominance
= dominates(pseudo
, insn
, one
, local
);
600 if (one
->opcode
== OP_LOAD
)
603 } END_FOR_EACH_PTR_REVERSE(one
);
606 warning(bb
->pos
, "Unable to find instruction");
610 FOR_EACH_PTR(bb
->parents
, parent
) {
611 struct basic_block
*child
;
612 FOR_EACH_PTR(parent
->children
, child
) {
613 if (child
&& child
!= bb
)
615 } END_FOR_EACH_PTR(child
);
616 kill_dominated_stores(pseudo
, insn
, generation
, parent
, local
, found
);
617 } END_FOR_EACH_PTR(parent
);
620 void check_access(struct instruction
*insn
)
622 pseudo_t pseudo
= insn
->src
;
624 if (insn
->bb
&& pseudo
->type
== PSEUDO_SYM
) {
625 int offset
= insn
->offset
, bit
= bytes_to_bits(offset
) + insn
->size
;
626 struct symbol
*sym
= pseudo
->sym
;
628 if (sym
->bit_size
> 0 && (offset
< 0 || bit
> sym
->bit_size
))
629 warning(insn
->pos
, "invalid access %s '%s' (%d %d)",
630 offset
< 0 ? "below" : "past the end of",
631 show_ident(sym
->ident
), offset
,
632 bits_to_bytes(sym
->bit_size
));
636 static void simplify_one_symbol(struct entrypoint
*ep
, struct symbol
*sym
)
638 pseudo_t pseudo
, src
;
639 struct pseudo_user
*pu
;
640 struct instruction
*def
;
642 int all
, stores
, complex;
644 /* Never used as a symbol? */
645 pseudo
= sym
->pseudo
;
649 /* We don't do coverage analysis of volatiles.. */
650 if (sym
->ctype
.modifiers
& MOD_VOLATILE
)
653 /* ..and symbols with external visibility need more care */
654 mod
= sym
->ctype
.modifiers
& (MOD_NONLOCAL
| MOD_STATIC
| MOD_ADDRESSABLE
);
656 goto external_visibility
;
661 FOR_EACH_PTR(pseudo
->users
, pu
) {
662 /* We know that the symbol-pseudo use is the "src" in the instruction */
663 struct instruction
*insn
= pu
->insn
;
665 switch (insn
->opcode
) {
675 mod
|= MOD_ADDRESSABLE
;
676 goto external_visibility
;
683 warning(sym
->pos
, "symbol '%s' pseudo used in unexpected way", show_ident(sym
->ident
));
685 complex |= insn
->offset
;
686 } END_FOR_EACH_PTR(pu
);
694 * Goodie, we have a single store (if even that) in the whole
695 * thing. Replace all loads with moves from the pseudo,
696 * replace the store with a def.
702 FOR_EACH_PTR(pseudo
->users
, pu
) {
703 struct instruction
*insn
= pu
->insn
;
704 if (insn
->opcode
== OP_LOAD
) {
706 convert_load_instruction(insn
, src
);
708 } END_FOR_EACH_PTR(pu
);
710 /* Turn the store into a no-op */
718 FOR_EACH_PTR_REVERSE(pseudo
->users
, pu
) {
719 struct instruction
*insn
= pu
->insn
;
720 if (insn
->opcode
== OP_LOAD
)
721 all
&= find_dominating_stores(pseudo
, insn
, ++bb_generation
, !mod
);
722 } END_FOR_EACH_PTR_REVERSE(pu
);
724 /* If we converted all the loads, remove the stores. They are dead */
726 FOR_EACH_PTR(pseudo
->users
, pu
) {
727 struct instruction
*insn
= pu
->insn
;
728 if (insn
->opcode
== OP_STORE
)
730 } END_FOR_EACH_PTR(pu
);
733 * If we couldn't take the shortcut, see if we can at least kill some
736 FOR_EACH_PTR(pseudo
->users
, pu
) {
737 struct instruction
*insn
= pu
->insn
;
738 if (insn
->opcode
== OP_STORE
)
739 kill_dominated_stores(pseudo
, insn
, ++bb_generation
, insn
->bb
, !mod
, 0);
740 } END_FOR_EACH_PTR(pu
);
742 if (!(mod
& (MOD_NONLOCAL
| MOD_STATIC
))) {
743 struct basic_block
*bb
;
744 FOR_EACH_PTR(ep
->bbs
, bb
) {
746 kill_dead_stores(pseudo
, ++bb_generation
, bb
, !mod
);
747 } END_FOR_EACH_PTR(bb
);
754 void simplify_symbol_usage(struct entrypoint
*ep
)
758 FOR_EACH_PTR(ep
->accesses
, pseudo
) {
759 simplify_one_symbol(ep
, pseudo
->sym
);
760 } END_FOR_EACH_PTR(pseudo
);
763 static void mark_bb_reachable(struct basic_block
*bb
, unsigned long generation
)
765 struct basic_block
*child
;
767 if (bb
->generation
== generation
)
769 bb
->generation
= generation
;
770 FOR_EACH_PTR(bb
->children
, child
) {
771 mark_bb_reachable(child
, generation
);
772 } END_FOR_EACH_PTR(child
);
775 static void kill_defs(struct instruction
*insn
)
777 pseudo_t target
= insn
->target
;
779 if (!has_use_list(target
))
781 if (target
->def
!= insn
)
784 convert_instruction_target(insn
, VOID
);
787 void kill_bb(struct basic_block
*bb
)
789 struct instruction
*insn
;
790 struct basic_block
*child
, *parent
;
792 FOR_EACH_PTR(bb
->insns
, insn
) {
793 kill_instruction_force(insn
);
796 * We kill unreachable instructions even if they
797 * otherwise aren't "killable" (e.g. volatile loads)
799 } END_FOR_EACH_PTR(insn
);
802 FOR_EACH_PTR(bb
->children
, child
) {
803 remove_bb_from_list(&child
->parents
, bb
, 0);
804 } END_FOR_EACH_PTR(child
);
807 FOR_EACH_PTR(bb
->parents
, parent
) {
808 remove_bb_from_list(&parent
->children
, bb
, 0);
809 } END_FOR_EACH_PTR(parent
);
813 void kill_unreachable_bbs(struct entrypoint
*ep
)
815 struct basic_block
*bb
;
816 unsigned long generation
= ++bb_generation
;
818 mark_bb_reachable(ep
->entry
->bb
, generation
);
819 FOR_EACH_PTR(ep
->bbs
, bb
) {
820 if (bb
->generation
== generation
)
822 /* Mark it as being dead */
825 DELETE_CURRENT_PTR(bb
);
826 } END_FOR_EACH_PTR(bb
);
827 PACK_PTR_LIST(&ep
->bbs
);
830 static int rewrite_parent_branch(struct basic_block
*bb
, struct basic_block
*old
, struct basic_block
*new)
833 struct instruction
*insn
= last_instruction(bb
->insns
);
838 /* Infinite loops: let's not "optimize" them.. */
842 switch (insn
->opcode
) {
844 changed
|= rewrite_branch(bb
, &insn
->bb_false
, old
, new);
847 changed
|= rewrite_branch(bb
, &insn
->bb_true
, old
, new);
851 struct multijmp
*jmp
;
852 FOR_EACH_PTR(insn
->multijmp_list
, jmp
) {
853 changed
|= rewrite_branch(bb
, &jmp
->target
, old
, new);
854 } END_FOR_EACH_PTR(jmp
);
863 static struct basic_block
* rewrite_branch_bb(struct basic_block
*bb
, struct instruction
*br
)
865 struct basic_block
*parent
;
866 struct basic_block
*target
= br
->bb_true
;
867 struct basic_block
*false = br
->bb_false
;
869 if (br
->opcode
== OP_CBR
) {
870 pseudo_t cond
= br
->cond
;
871 if (cond
->type
!= PSEUDO_VAL
)
873 target
= cond
->value
? target
: false;
877 * We can't do FOR_EACH_PTR() here, because the parent list
878 * may change when we rewrite the parent.
880 while ((parent
= first_basic_block(bb
->parents
)) != NULL
) {
881 if (!rewrite_parent_branch(parent
, bb
, target
))
887 static void vrfy_bb_in_list(struct basic_block
*bb
, struct basic_block_list
*list
)
890 struct basic_block
*tmp
;
891 int no_bb_in_list
= 0;
893 FOR_EACH_PTR(list
, tmp
) {
896 } END_FOR_EACH_PTR(tmp
);
897 assert(no_bb_in_list
);
901 static void vrfy_parents(struct basic_block
*bb
)
903 struct basic_block
*tmp
;
904 FOR_EACH_PTR(bb
->parents
, tmp
) {
905 vrfy_bb_in_list(bb
, tmp
->children
);
906 } END_FOR_EACH_PTR(tmp
);
909 static void vrfy_children(struct basic_block
*bb
)
911 struct basic_block
*tmp
;
912 struct instruction
*br
= last_instruction(bb
->insns
);
915 assert(!bb
->children
);
918 switch (br
->opcode
) {
919 struct multijmp
*jmp
;
921 vrfy_bb_in_list(br
->bb_false
, bb
->children
);
924 vrfy_bb_in_list(br
->bb_true
, bb
->children
);
927 case OP_COMPUTEDGOTO
:
928 FOR_EACH_PTR(br
->multijmp_list
, jmp
) {
929 vrfy_bb_in_list(jmp
->target
, bb
->children
);
930 } END_FOR_EACH_PTR(jmp
);
936 FOR_EACH_PTR(bb
->children
, tmp
) {
937 vrfy_bb_in_list(bb
, tmp
->parents
);
938 } END_FOR_EACH_PTR(tmp
);
941 static void vrfy_bb_flow(struct basic_block
*bb
)
947 void vrfy_flow(struct entrypoint
*ep
)
949 struct basic_block
*bb
;
950 struct basic_block
*entry
= ep
->entry
->bb
;
952 FOR_EACH_PTR(ep
->bbs
, bb
) {
956 } END_FOR_EACH_PTR(bb
);
960 void pack_basic_blocks(struct entrypoint
*ep
)
962 struct basic_block
*bb
;
964 /* See if we can merge a bb into another one.. */
965 FOR_EACH_PTR(ep
->bbs
, bb
) {
966 struct instruction
*first
, *insn
;
967 struct basic_block
*parent
, *child
, *last
;
969 if (!bb_reachable(bb
))
975 FOR_EACH_PTR(bb
->insns
, first
) {
978 switch (first
->opcode
) {
979 case OP_NOP
: case OP_LNOP
: case OP_SNOP
:
983 struct basic_block
*replace
;
984 replace
= rewrite_branch_bb(bb
, first
);
994 } END_FOR_EACH_PTR(first
);
998 * See if we only have one parent..
1001 FOR_EACH_PTR(bb
->parents
, parent
) {
1008 } END_FOR_EACH_PTR(parent
);
1011 if (!parent
|| parent
== bb
)
1015 * Goodie. See if the parent can merge..
1017 FOR_EACH_PTR(parent
->children
, child
) {
1020 } END_FOR_EACH_PTR(child
);
1025 repeat_phase
|= REPEAT_CSE
;
1027 parent
->children
= bb
->children
;
1028 bb
->children
= NULL
;
1031 FOR_EACH_PTR(parent
->children
, child
) {
1032 replace_bb_in_list(&child
->parents
, bb
, parent
, 0);
1033 } END_FOR_EACH_PTR(child
);
1035 kill_instruction(delete_last_instruction(&parent
->insns
));
1036 FOR_EACH_PTR(bb
->insns
, insn
) {
1038 assert(insn
->bb
== bb
);
1041 add_instruction(&parent
->insns
, insn
);
1042 } END_FOR_EACH_PTR(insn
);
1046 /* nothing to do */;
1047 } END_FOR_EACH_PTR(bb
);