testsuite: cleanup result files
[smatch.git] / flow.c
blob94a8ec746aa92d1de9e0385db304d352235ff912
1 /*
2 * Flow - walk the linearized flowgraph, simplifying it as we
3 * go along.
5 * Copyright (C) 2004 Linus Torvalds
6 */
8 #include <string.h>
9 #include <stdarg.h>
10 #include <stdlib.h>
11 #include <stdio.h>
12 #include <stddef.h>
13 #include <assert.h>
15 #include "parse.h"
16 #include "expression.h"
17 #include "linearize.h"
18 #include "flow.h"
19 #include "target.h"
21 unsigned long bb_generation;
24 * Dammit, if we have a phi-node followed by a conditional
25 * branch on that phi-node, we should damn well be able to
26 * do something about the source. Maybe.
28 static int rewrite_branch(struct basic_block *bb,
29 struct basic_block **ptr,
30 struct basic_block *old,
31 struct basic_block *new)
33 if (*ptr != old || new == old)
34 return 0;
36 /* We might find new if-conversions or non-dominating CSEs */
37 repeat_phase |= REPEAT_CSE;
38 *ptr = new;
39 replace_bb_in_list(&bb->children, old, new, 1);
40 remove_bb_from_list(&old->parents, bb, 1);
41 add_bb(&new->parents, bb);
42 return 1;
46 * Return the known truth value of a pseudo, or -1 if
47 * it's not known.
49 static int pseudo_truth_value(pseudo_t pseudo)
51 switch (pseudo->type) {
52 case PSEUDO_VAL:
53 return !!pseudo->value;
55 case PSEUDO_REG: {
56 struct instruction *insn = pseudo->def;
58 /* A symbol address is always considered true.. */
59 if (insn->opcode == OP_SYMADDR && insn->target == pseudo)
60 return 1;
62 /* Fall through */
63 default:
64 return -1;
69 * Does a basic block depend on the pseudos that "src" defines?
71 static int bb_depends_on(struct basic_block *target, struct basic_block *src)
73 pseudo_t pseudo;
75 FOR_EACH_PTR(src->defines, pseudo) {
76 if (pseudo_in_list(target->needs, pseudo))
77 return 1;
78 } END_FOR_EACH_PTR(pseudo);
79 return 0;
83 * Return 1 if 'pseudo' is needed in some parent of 'bb'.
84 * Need liveness info.
86 static int needed_phisrc(struct instruction *phi, struct basic_block *curr, unsigned long generation)
88 pseudo_t target = phi->target;
89 struct basic_block *bb;
91 curr->generation = generation;
92 FOR_EACH_PTR(curr->children, bb) {
93 if (bb->generation == generation)
94 continue;
95 if (bb == phi->bb)
96 continue;
97 if (pseudo_in_list(bb->defines, target)) {
98 continue;
100 if (pseudo_in_list(bb->needs, target))
101 return 1;
102 if (needed_phisrc(phi, bb, generation))
103 return 1;
105 } END_FOR_EACH_PTR(bb);
107 return 0;
111 * When we reach here, we have:
112 * - a basic block that ends in a conditional branch and
113 * that has no side effects apart from the pseudos it
114 * may change.
115 * - the phi-node that the conditional branch depends on
116 * - full pseudo liveness information
118 * We need to check if any of the _sources_ of the phi-node
119 * may be constant, and not actually need this block at all.
121 static int try_to_simplify_bb(struct basic_block *bb, struct instruction *first, struct instruction *second)
123 int changed = 0;
124 pseudo_t phi;
126 FOR_EACH_PTR(first->phi_list, phi) {
127 struct instruction *def = phi->def;
128 struct basic_block *source, *target;
129 pseudo_t pseudo;
130 struct instruction *br;
131 int true;
133 if (!def)
134 continue;
135 source = def->bb;
136 pseudo = def->src1;
137 if (!pseudo || !source)
138 continue;
139 br = last_instruction(source->insns);
140 if (!br)
141 continue;
142 if (br->opcode != OP_CBR && br->opcode != OP_BR)
143 continue;
144 true = pseudo_truth_value(pseudo);
145 if (true < 0)
146 continue;
147 target = true ? second->bb_true : second->bb_false;
148 if (bb_depends_on(target, bb))
149 continue;
150 changed |= rewrite_branch(source, &br->bb_true, bb, target);
151 changed |= rewrite_branch(source, &br->bb_false, bb, target);
152 if (changed && !needed_phisrc(first, source, ++bb_generation))
153 kill_use(THIS_ADDRESS(phi));
154 } END_FOR_EACH_PTR(phi);
155 return changed;
158 static int bb_has_side_effects(struct basic_block *bb)
160 struct instruction *insn;
161 FOR_EACH_PTR(bb->insns, insn) {
162 switch (insn->opcode) {
163 case OP_CALL:
164 /* FIXME! This should take "const" etc into account */
165 return 1;
167 case OP_STORE:
168 case OP_CONTEXT:
169 return 1;
171 case OP_ASM:
172 /* FIXME! This should take "volatile" etc into account */
173 return 1;
175 default:
176 continue;
178 } END_FOR_EACH_PTR(insn);
179 return 0;
182 static int simplify_phi_branch(struct basic_block *bb, struct instruction *br)
184 pseudo_t cond = br->cond;
185 struct instruction *def;
187 if (cond->type != PSEUDO_REG)
188 return 0;
189 def = cond->def;
190 if (def->bb != bb || def->opcode != OP_PHI)
191 return 0;
192 if (bb_has_side_effects(bb))
193 return 0;
194 return try_to_simplify_bb(bb, def, br);
197 static int simplify_branch_branch(struct basic_block *bb, struct instruction *br,
198 struct basic_block **target_p, int true)
200 struct basic_block *target = *target_p, *final;
201 struct instruction *insn;
202 int retval;
204 if (target == bb)
205 return 0;
206 insn = last_instruction(target->insns);
207 if (!insn || insn->opcode != OP_CBR || insn->cond != br->cond)
208 return 0;
210 * Ahhah! We've found a branch to a branch on the same conditional!
211 * Now we just need to see if we can rewrite the branch..
213 retval = 0;
214 final = true ? insn->bb_true : insn->bb_false;
215 if (bb_has_side_effects(target))
216 goto try_to_rewrite_target;
217 if (bb_depends_on(final, target))
218 goto try_to_rewrite_target;
219 return rewrite_branch(bb, target_p, target, final);
221 try_to_rewrite_target:
223 * If we're the only parent, at least we can rewrite the
224 * now-known second branch.
226 if (bb_list_size(target->parents) != 1)
227 return retval;
228 insert_branch(target, insn, final);
229 kill_instruction(insn);
230 return 1;
233 static int simplify_one_branch(struct basic_block *bb, struct instruction *br)
235 if (simplify_phi_branch(bb, br))
236 return 1;
237 return simplify_branch_branch(bb, br, &br->bb_true, 1) |
238 simplify_branch_branch(bb, br, &br->bb_false, 0);
241 static int simplify_branch_nodes(struct entrypoint *ep)
243 int changed = 0;
244 struct basic_block *bb;
246 FOR_EACH_PTR(ep->bbs, bb) {
247 struct instruction *br = last_instruction(bb->insns);
249 if (!br || br->opcode != OP_CBR)
250 continue;
251 changed |= simplify_one_branch(bb, br);
252 } END_FOR_EACH_PTR(bb);
253 return changed;
257 * This is called late - when we have intra-bb liveness information..
259 int simplify_flow(struct entrypoint *ep)
261 return simplify_branch_nodes(ep);
264 static inline void concat_user_list(struct pseudo_user_list *src, struct pseudo_user_list **dst)
266 concat_ptr_list((struct ptr_list *)src, (struct ptr_list **)dst);
269 void convert_instruction_target(struct instruction *insn, pseudo_t src)
271 pseudo_t target;
272 struct pseudo_user *pu;
274 * Go through the "insn->users" list and replace them all..
276 target = insn->target;
277 if (target == src)
278 return;
279 FOR_EACH_PTR(target->users, pu) {
280 if (*pu->userp != VOID) {
281 assert(*pu->userp == target);
282 *pu->userp = src;
284 } END_FOR_EACH_PTR(pu);
285 if (has_use_list(src))
286 concat_user_list(target->users, &src->users);
287 target->users = NULL;
290 void convert_load_instruction(struct instruction *insn, pseudo_t src)
292 convert_instruction_target(insn, src);
293 /* Turn the load into a no-op */
294 insn->opcode = OP_LNOP;
295 insn->bb = NULL;
298 static int overlapping_memop(struct instruction *a, struct instruction *b)
300 unsigned int a_start = bytes_to_bits(a->offset);
301 unsigned int b_start = bytes_to_bits(b->offset);
302 unsigned int a_size = a->size;
303 unsigned int b_size = b->size;
305 if (a_size + a_start <= b_start)
306 return 0;
307 if (b_size + b_start <= a_start)
308 return 0;
309 return 1;
312 static inline int same_memop(struct instruction *a, struct instruction *b)
314 return a->offset == b->offset && a->size == b->size;
318 * Return 1 if "dom" dominates the access to "pseudo"
319 * in "insn".
321 * Return 0 if it doesn't, and -1 if you don't know.
323 int dominates(pseudo_t pseudo, struct instruction *insn, struct instruction *dom, int local)
325 int opcode = dom->opcode;
327 if (opcode == OP_CALL || opcode == OP_ENTRY)
328 return local ? 0 : -1;
329 if (opcode != OP_LOAD && opcode != OP_STORE)
330 return 0;
331 if (dom->src != pseudo) {
332 if (local)
333 return 0;
334 /* We don't think two explicitly different symbols ever alias */
335 if (dom->src->type == PSEUDO_SYM)
336 return 0;
337 /* We could try to do some alias analysis here */
338 return -1;
340 if (!same_memop(insn, dom)) {
341 if (dom->opcode == OP_LOAD)
342 return 0;
343 if (!overlapping_memop(insn, dom))
344 return 0;
345 return -1;
347 return 1;
350 static int phisrc_in_bb(struct pseudo_list *list, struct basic_block *bb)
352 pseudo_t p;
353 FOR_EACH_PTR(list, p) {
354 if (p->def->bb == bb)
355 return 1;
356 } END_FOR_EACH_PTR(p);
358 return 0;
361 static int find_dominating_parents(pseudo_t pseudo, struct instruction *insn,
362 struct basic_block *bb, unsigned long generation, struct pseudo_list **dominators,
363 int local)
365 struct basic_block *parent;
367 if (!bb->parents)
368 return !!local;
370 FOR_EACH_PTR(bb->parents, parent) {
371 struct instruction *one;
372 struct instruction *br;
373 pseudo_t phi;
375 FOR_EACH_PTR_REVERSE(parent->insns, one) {
376 int dominance;
377 if (one == insn)
378 goto no_dominance;
379 dominance = dominates(pseudo, insn, one, local);
380 if (dominance < 0) {
381 if (one->opcode == OP_LOAD)
382 continue;
383 return 0;
385 if (!dominance)
386 continue;
387 goto found_dominator;
388 } END_FOR_EACH_PTR_REVERSE(one);
389 no_dominance:
390 if (parent->generation == generation)
391 continue;
392 parent->generation = generation;
394 if (!find_dominating_parents(pseudo, insn, parent, generation, dominators, local))
395 return 0;
396 continue;
398 found_dominator:
399 if (dominators && phisrc_in_bb(*dominators, parent))
400 continue;
401 br = delete_last_instruction(&parent->insns);
402 phi = alloc_phi(parent, one->target, one->size);
403 phi->ident = phi->ident ? : pseudo->ident;
404 add_instruction(&parent->insns, br);
405 use_pseudo(insn, phi, add_pseudo(dominators, phi));
406 } END_FOR_EACH_PTR(parent);
407 return 1;
411 * We should probably sort the phi list just to make it easier to compare
412 * later for equality.
414 void rewrite_load_instruction(struct instruction *insn, struct pseudo_list *dominators)
416 pseudo_t new, phi;
419 * Check for somewhat common case of duplicate
420 * phi nodes.
422 new = first_pseudo(dominators)->def->src1;
423 FOR_EACH_PTR(dominators, phi) {
424 if (new != phi->def->src1)
425 goto complex_phi;
426 new->ident = new->ident ? : phi->ident;
427 } END_FOR_EACH_PTR(phi);
430 * All the same pseudo - mark the phi-nodes unused
431 * and convert the load into a LNOP and replace the
432 * pseudo.
434 FOR_EACH_PTR(dominators, phi) {
435 kill_instruction(phi->def);
436 } END_FOR_EACH_PTR(phi);
437 convert_load_instruction(insn, new);
438 return;
440 complex_phi:
441 /* We leave symbol pseudos with a bogus usage list here */
442 if (insn->src->type != PSEUDO_SYM)
443 kill_use(&insn->src);
444 insn->opcode = OP_PHI;
445 insn->phi_list = dominators;
448 static int find_dominating_stores(pseudo_t pseudo, struct instruction *insn,
449 unsigned long generation, int local)
451 struct basic_block *bb = insn->bb;
452 struct instruction *one, *dom = NULL;
453 struct pseudo_list *dominators;
454 int partial;
456 /* Unreachable load? Undo it */
457 if (!bb) {
458 insn->opcode = OP_LNOP;
459 return 1;
462 partial = 0;
463 FOR_EACH_PTR(bb->insns, one) {
464 int dominance;
465 if (one == insn)
466 goto found;
467 dominance = dominates(pseudo, insn, one, local);
468 if (dominance < 0) {
469 /* Ignore partial load dominators */
470 if (one->opcode == OP_LOAD)
471 continue;
472 dom = NULL;
473 partial = 1;
474 continue;
476 if (!dominance)
477 continue;
478 dom = one;
479 partial = 0;
480 } END_FOR_EACH_PTR(one);
481 /* Whaa? */
482 warning(pseudo->sym->pos, "unable to find symbol read");
483 return 0;
484 found:
485 if (partial)
486 return 0;
488 if (dom) {
489 convert_load_instruction(insn, dom->target);
490 return 1;
493 /* OK, go find the parents */
494 bb->generation = generation;
496 dominators = NULL;
497 if (!find_dominating_parents(pseudo, insn, bb, generation, &dominators, local))
498 return 0;
500 /* This happens with initial assignments to structures etc.. */
501 if (!dominators) {
502 if (!local)
503 return 0;
504 check_access(insn);
505 convert_load_instruction(insn, value_pseudo(0));
506 return 1;
510 * If we find just one dominating instruction, we
511 * can turn it into a direct thing. Otherwise we'll
512 * have to turn the load into a phi-node of the
513 * dominators.
515 rewrite_load_instruction(insn, dominators);
516 return 1;
519 static void kill_store(struct instruction *insn)
521 if (insn) {
522 insn->bb = NULL;
523 insn->opcode = OP_SNOP;
524 kill_use(&insn->target);
528 /* Kill a pseudo that is dead on exit from the bb */
529 static void kill_dead_stores(pseudo_t pseudo, unsigned long generation, struct basic_block *bb, int local)
531 struct instruction *insn;
532 struct basic_block *parent;
534 if (bb->generation == generation)
535 return;
536 bb->generation = generation;
537 FOR_EACH_PTR_REVERSE(bb->insns, insn) {
538 int opcode = insn->opcode;
540 if (opcode != OP_LOAD && opcode != OP_STORE) {
541 if (local)
542 continue;
543 if (opcode == OP_CALL)
544 return;
545 continue;
547 if (insn->src == pseudo) {
548 if (opcode == OP_LOAD)
549 return;
550 kill_store(insn);
551 continue;
553 if (local)
554 continue;
555 if (insn->src->type != PSEUDO_SYM)
556 return;
557 } END_FOR_EACH_PTR_REVERSE(insn);
559 FOR_EACH_PTR(bb->parents, parent) {
560 struct basic_block *child;
561 FOR_EACH_PTR(parent->children, child) {
562 if (child && child != bb)
563 return;
564 } END_FOR_EACH_PTR(child);
565 kill_dead_stores(pseudo, generation, parent, local);
566 } END_FOR_EACH_PTR(parent);
570 * This should see if the "insn" trivially dominates some previous store, and kill the
571 * store if unnecessary.
573 static void kill_dominated_stores(pseudo_t pseudo, struct instruction *insn,
574 unsigned long generation, struct basic_block *bb, int local, int found)
576 struct instruction *one;
577 struct basic_block *parent;
579 /* Unreachable store? Undo it */
580 if (!bb) {
581 kill_store(insn);
582 return;
584 if (bb->generation == generation)
585 return;
586 bb->generation = generation;
587 FOR_EACH_PTR_REVERSE(bb->insns, one) {
588 int dominance;
589 if (!found) {
590 if (one != insn)
591 continue;
592 found = 1;
593 continue;
595 dominance = dominates(pseudo, insn, one, local);
596 if (!dominance)
597 continue;
598 if (dominance < 0)
599 return;
600 if (one->opcode == OP_LOAD)
601 return;
602 kill_store(one);
603 } END_FOR_EACH_PTR_REVERSE(one);
605 if (!found) {
606 warning(bb->pos, "Unable to find instruction");
607 return;
610 FOR_EACH_PTR(bb->parents, parent) {
611 struct basic_block *child;
612 FOR_EACH_PTR(parent->children, child) {
613 if (child && child != bb)
614 return;
615 } END_FOR_EACH_PTR(child);
616 kill_dominated_stores(pseudo, insn, generation, parent, local, found);
617 } END_FOR_EACH_PTR(parent);
620 void check_access(struct instruction *insn)
622 pseudo_t pseudo = insn->src;
624 if (insn->bb && pseudo->type == PSEUDO_SYM) {
625 int offset = insn->offset, bit = bytes_to_bits(offset) + insn->size;
626 struct symbol *sym = pseudo->sym;
628 if (sym->bit_size > 0 && (offset < 0 || bit > sym->bit_size))
629 warning(insn->pos, "invalid access %s '%s' (%d %d)",
630 offset < 0 ? "below" : "past the end of",
631 show_ident(sym->ident), offset,
632 bits_to_bytes(sym->bit_size));
636 static void simplify_one_symbol(struct entrypoint *ep, struct symbol *sym)
638 pseudo_t pseudo, src;
639 struct pseudo_user *pu;
640 struct instruction *def;
641 unsigned long mod;
642 int all, stores, complex;
644 /* Never used as a symbol? */
645 pseudo = sym->pseudo;
646 if (!pseudo)
647 return;
649 /* We don't do coverage analysis of volatiles.. */
650 if (sym->ctype.modifiers & MOD_VOLATILE)
651 return;
653 /* ..and symbols with external visibility need more care */
654 mod = sym->ctype.modifiers & (MOD_NONLOCAL | MOD_STATIC | MOD_ADDRESSABLE);
655 if (mod)
656 goto external_visibility;
658 def = NULL;
659 stores = 0;
660 complex = 0;
661 FOR_EACH_PTR(pseudo->users, pu) {
662 /* We know that the symbol-pseudo use is the "src" in the instruction */
663 struct instruction *insn = pu->insn;
665 switch (insn->opcode) {
666 case OP_STORE:
667 stores++;
668 def = insn;
669 break;
670 case OP_LOAD:
671 break;
672 case OP_SYMADDR:
673 if (!insn->bb)
674 continue;
675 mod |= MOD_ADDRESSABLE;
676 goto external_visibility;
677 case OP_NOP:
678 case OP_SNOP:
679 case OP_LNOP:
680 case OP_PHI:
681 continue;
682 default:
683 warning(sym->pos, "symbol '%s' pseudo used in unexpected way", show_ident(sym->ident));
685 complex |= insn->offset;
686 } END_FOR_EACH_PTR(pu);
688 if (complex)
689 goto complex_def;
690 if (stores > 1)
691 goto multi_def;
694 * Goodie, we have a single store (if even that) in the whole
695 * thing. Replace all loads with moves from the pseudo,
696 * replace the store with a def.
698 src = VOID;
699 if (def)
700 src = def->target;
702 FOR_EACH_PTR(pseudo->users, pu) {
703 struct instruction *insn = pu->insn;
704 if (insn->opcode == OP_LOAD) {
705 check_access(insn);
706 convert_load_instruction(insn, src);
708 } END_FOR_EACH_PTR(pu);
710 /* Turn the store into a no-op */
711 kill_store(def);
712 return;
714 multi_def:
715 complex_def:
716 external_visibility:
717 all = 1;
718 FOR_EACH_PTR_REVERSE(pseudo->users, pu) {
719 struct instruction *insn = pu->insn;
720 if (insn->opcode == OP_LOAD)
721 all &= find_dominating_stores(pseudo, insn, ++bb_generation, !mod);
722 } END_FOR_EACH_PTR_REVERSE(pu);
724 /* If we converted all the loads, remove the stores. They are dead */
725 if (all && !mod) {
726 FOR_EACH_PTR(pseudo->users, pu) {
727 struct instruction *insn = pu->insn;
728 if (insn->opcode == OP_STORE)
729 kill_store(insn);
730 } END_FOR_EACH_PTR(pu);
731 } else {
733 * If we couldn't take the shortcut, see if we can at least kill some
734 * of them..
736 FOR_EACH_PTR(pseudo->users, pu) {
737 struct instruction *insn = pu->insn;
738 if (insn->opcode == OP_STORE)
739 kill_dominated_stores(pseudo, insn, ++bb_generation, insn->bb, !mod, 0);
740 } END_FOR_EACH_PTR(pu);
742 if (!(mod & (MOD_NONLOCAL | MOD_STATIC))) {
743 struct basic_block *bb;
744 FOR_EACH_PTR(ep->bbs, bb) {
745 if (!bb->children)
746 kill_dead_stores(pseudo, ++bb_generation, bb, !mod);
747 } END_FOR_EACH_PTR(bb);
751 return;
754 void simplify_symbol_usage(struct entrypoint *ep)
756 pseudo_t pseudo;
758 FOR_EACH_PTR(ep->accesses, pseudo) {
759 simplify_one_symbol(ep, pseudo->sym);
760 } END_FOR_EACH_PTR(pseudo);
763 static void mark_bb_reachable(struct basic_block *bb, unsigned long generation)
765 struct basic_block *child;
767 if (bb->generation == generation)
768 return;
769 bb->generation = generation;
770 FOR_EACH_PTR(bb->children, child) {
771 mark_bb_reachable(child, generation);
772 } END_FOR_EACH_PTR(child);
775 static void kill_defs(struct instruction *insn)
777 pseudo_t target = insn->target;
779 if (!has_use_list(target))
780 return;
781 if (target->def != insn)
782 return;
784 convert_instruction_target(insn, VOID);
787 void kill_bb(struct basic_block *bb)
789 struct instruction *insn;
790 struct basic_block *child, *parent;
792 FOR_EACH_PTR(bb->insns, insn) {
793 kill_instruction_force(insn);
794 kill_defs(insn);
796 * We kill unreachable instructions even if they
797 * otherwise aren't "killable" (e.g. volatile loads)
799 } END_FOR_EACH_PTR(insn);
800 bb->insns = NULL;
802 FOR_EACH_PTR(bb->children, child) {
803 remove_bb_from_list(&child->parents, bb, 0);
804 } END_FOR_EACH_PTR(child);
805 bb->children = NULL;
807 FOR_EACH_PTR(bb->parents, parent) {
808 remove_bb_from_list(&parent->children, bb, 0);
809 } END_FOR_EACH_PTR(parent);
810 bb->parents = NULL;
813 void kill_unreachable_bbs(struct entrypoint *ep)
815 struct basic_block *bb;
816 unsigned long generation = ++bb_generation;
818 mark_bb_reachable(ep->entry->bb, generation);
819 FOR_EACH_PTR(ep->bbs, bb) {
820 if (bb->generation == generation)
821 continue;
822 /* Mark it as being dead */
823 kill_bb(bb);
824 bb->ep = NULL;
825 DELETE_CURRENT_PTR(bb);
826 } END_FOR_EACH_PTR(bb);
827 PACK_PTR_LIST(&ep->bbs);
830 static int rewrite_parent_branch(struct basic_block *bb, struct basic_block *old, struct basic_block *new)
832 int changed = 0;
833 struct instruction *insn = last_instruction(bb->insns);
835 if (!insn)
836 return 0;
838 /* Infinite loops: let's not "optimize" them.. */
839 if (old == new)
840 return 0;
842 switch (insn->opcode) {
843 case OP_CBR:
844 changed |= rewrite_branch(bb, &insn->bb_false, old, new);
845 /* fall through */
846 case OP_BR:
847 changed |= rewrite_branch(bb, &insn->bb_true, old, new);
848 assert(changed);
849 return changed;
850 case OP_SWITCH: {
851 struct multijmp *jmp;
852 FOR_EACH_PTR(insn->multijmp_list, jmp) {
853 changed |= rewrite_branch(bb, &jmp->target, old, new);
854 } END_FOR_EACH_PTR(jmp);
855 assert(changed);
856 return changed;
858 default:
859 return 0;
863 static struct basic_block * rewrite_branch_bb(struct basic_block *bb, struct instruction *br)
865 struct basic_block *parent;
866 struct basic_block *target = br->bb_true;
867 struct basic_block *false = br->bb_false;
869 if (br->opcode == OP_CBR) {
870 pseudo_t cond = br->cond;
871 if (cond->type != PSEUDO_VAL)
872 return NULL;
873 target = cond->value ? target : false;
877 * We can't do FOR_EACH_PTR() here, because the parent list
878 * may change when we rewrite the parent.
880 while ((parent = first_basic_block(bb->parents)) != NULL) {
881 if (!rewrite_parent_branch(parent, bb, target))
882 return NULL;
884 return target;
887 static void vrfy_bb_in_list(struct basic_block *bb, struct basic_block_list *list)
889 if (bb) {
890 struct basic_block *tmp;
891 int no_bb_in_list = 0;
893 FOR_EACH_PTR(list, tmp) {
894 if (bb == tmp)
895 return;
896 } END_FOR_EACH_PTR(tmp);
897 assert(no_bb_in_list);
901 static void vrfy_parents(struct basic_block *bb)
903 struct basic_block *tmp;
904 FOR_EACH_PTR(bb->parents, tmp) {
905 vrfy_bb_in_list(bb, tmp->children);
906 } END_FOR_EACH_PTR(tmp);
909 static void vrfy_children(struct basic_block *bb)
911 struct basic_block *tmp;
912 struct instruction *br = last_instruction(bb->insns);
914 if (!br) {
915 assert(!bb->children);
916 return;
918 switch (br->opcode) {
919 struct multijmp *jmp;
920 case OP_CBR:
921 vrfy_bb_in_list(br->bb_false, bb->children);
922 /* fall through */
923 case OP_BR:
924 vrfy_bb_in_list(br->bb_true, bb->children);
925 break;
926 case OP_SWITCH:
927 case OP_COMPUTEDGOTO:
928 FOR_EACH_PTR(br->multijmp_list, jmp) {
929 vrfy_bb_in_list(jmp->target, bb->children);
930 } END_FOR_EACH_PTR(jmp);
931 break;
932 default:
933 break;
936 FOR_EACH_PTR(bb->children, tmp) {
937 vrfy_bb_in_list(bb, tmp->parents);
938 } END_FOR_EACH_PTR(tmp);
941 static void vrfy_bb_flow(struct basic_block *bb)
943 vrfy_children(bb);
944 vrfy_parents(bb);
947 void vrfy_flow(struct entrypoint *ep)
949 struct basic_block *bb;
950 struct basic_block *entry = ep->entry->bb;
952 FOR_EACH_PTR(ep->bbs, bb) {
953 if (bb == entry)
954 entry = NULL;
955 vrfy_bb_flow(bb);
956 } END_FOR_EACH_PTR(bb);
957 assert(!entry);
960 void pack_basic_blocks(struct entrypoint *ep)
962 struct basic_block *bb;
964 /* See if we can merge a bb into another one.. */
965 FOR_EACH_PTR(ep->bbs, bb) {
966 struct instruction *first, *insn;
967 struct basic_block *parent, *child, *last;
969 if (!bb_reachable(bb))
970 continue;
973 * Just a branch?
975 FOR_EACH_PTR(bb->insns, first) {
976 if (!first->bb)
977 continue;
978 switch (first->opcode) {
979 case OP_NOP: case OP_LNOP: case OP_SNOP:
980 continue;
981 case OP_CBR:
982 case OP_BR: {
983 struct basic_block *replace;
984 replace = rewrite_branch_bb(bb, first);
985 if (replace) {
986 kill_bb(bb);
987 goto no_merge;
990 /* fallthrough */
991 default:
992 goto out;
994 } END_FOR_EACH_PTR(first);
996 out:
998 * See if we only have one parent..
1000 last = NULL;
1001 FOR_EACH_PTR(bb->parents, parent) {
1002 if (last) {
1003 if (last != parent)
1004 goto no_merge;
1005 continue;
1007 last = parent;
1008 } END_FOR_EACH_PTR(parent);
1010 parent = last;
1011 if (!parent || parent == bb)
1012 continue;
1015 * Goodie. See if the parent can merge..
1017 FOR_EACH_PTR(parent->children, child) {
1018 if (child != bb)
1019 goto no_merge;
1020 } END_FOR_EACH_PTR(child);
1023 * Merge the two.
1025 repeat_phase |= REPEAT_CSE;
1027 parent->children = bb->children;
1028 bb->children = NULL;
1029 bb->parents = NULL;
1031 FOR_EACH_PTR(parent->children, child) {
1032 replace_bb_in_list(&child->parents, bb, parent, 0);
1033 } END_FOR_EACH_PTR(child);
1035 kill_instruction(delete_last_instruction(&parent->insns));
1036 FOR_EACH_PTR(bb->insns, insn) {
1037 if (insn->bb) {
1038 assert(insn->bb == bb);
1039 insn->bb = parent;
1041 add_instruction(&parent->insns, insn);
1042 } END_FOR_EACH_PTR(insn);
1043 bb->insns = NULL;
1045 no_merge:
1046 /* nothing to do */;
1047 } END_FOR_EACH_PTR(bb);