simplify '(x / -1)' to '-x' (but only for signed division)
[smatch.git] / flow.c
blobef07b6db93ae89b931c26eee1f5aed0bd8044b61
1 /*
2 * Flow - walk the linearized flowgraph, simplifying it as we
3 * go along.
5 * Copyright (C) 2004 Linus Torvalds
6 */
8 #include <string.h>
9 #include <stdarg.h>
10 #include <stdlib.h>
11 #include <stdio.h>
12 #include <stddef.h>
13 #include <assert.h>
15 #include "parse.h"
16 #include "expression.h"
17 #include "linearize.h"
18 #include "flow.h"
19 #include "target.h"
21 unsigned long bb_generation;
24 * Dammit, if we have a phi-node followed by a conditional
25 * branch on that phi-node, we should damn well be able to
26 * do something about the source. Maybe.
28 static int rewrite_branch(struct basic_block *bb,
29 struct basic_block **ptr,
30 struct basic_block *old,
31 struct basic_block *new)
33 if (*ptr != old || new == old)
34 return 0;
36 /* We might find new if-conversions or non-dominating CSEs */
37 repeat_phase |= REPEAT_CSE;
38 *ptr = new;
39 replace_bb_in_list(&bb->children, old, new, 1);
40 remove_bb_from_list(&old->parents, bb, 1);
41 add_bb(&new->parents, bb);
42 return 1;
46 * Return the known truth value of a pseudo, or -1 if
47 * it's not known.
49 static int pseudo_truth_value(pseudo_t pseudo)
51 switch (pseudo->type) {
52 case PSEUDO_VAL:
53 return !!pseudo->value;
55 case PSEUDO_REG: {
56 struct instruction *insn = pseudo->def;
58 /* A symbol address is always considered true.. */
59 if (insn->opcode == OP_SYMADDR && insn->target == pseudo)
60 return 1;
62 /* Fall through */
63 default:
64 return -1;
69 * Does a basic block depend on the pseudos that "src" defines?
71 static int bb_depends_on(struct basic_block *target, struct basic_block *src)
73 pseudo_t pseudo;
75 FOR_EACH_PTR(src->defines, pseudo) {
76 if (pseudo_in_list(target->needs, pseudo))
77 return 1;
78 } END_FOR_EACH_PTR(pseudo);
79 return 0;
83 * When we reach here, we have:
84 * - a basic block that ends in a conditional branch and
85 * that has no side effects apart from the pseudos it
86 * may change.
87 * - the phi-node that the conditional branch depends on
88 * - full pseudo liveness information
90 * We need to check if any of the _sources_ of the phi-node
91 * may be constant, and not actually need this block at all.
93 static int try_to_simplify_bb(struct basic_block *bb, struct instruction *first, struct instruction *second)
95 int changed = 0;
96 pseudo_t phi;
98 FOR_EACH_PTR(first->phi_list, phi) {
99 struct instruction *def = phi->def;
100 struct basic_block *source, *target;
101 pseudo_t pseudo;
102 struct instruction *br;
103 int true;
105 if (!def)
106 continue;
107 source = def->bb;
108 pseudo = def->src1;
109 if (!pseudo || !source)
110 continue;
111 br = last_instruction(source->insns);
112 if (!br)
113 continue;
114 if (br->opcode != OP_BR)
115 continue;
116 true = pseudo_truth_value(pseudo);
117 if (true < 0)
118 continue;
119 target = true ? second->bb_true : second->bb_false;
120 if (bb_depends_on(target, bb))
121 continue;
122 changed |= rewrite_branch(source, &br->bb_true, bb, target);
123 changed |= rewrite_branch(source, &br->bb_false, bb, target);
124 } END_FOR_EACH_PTR(phi);
125 return changed;
128 static int bb_has_side_effects(struct basic_block *bb)
130 struct instruction *insn;
131 FOR_EACH_PTR(bb->insns, insn) {
132 switch (insn->opcode) {
133 case OP_CALL:
134 /* FIXME! This should take "const" etc into account */
135 return 1;
137 case OP_STORE:
138 case OP_CONTEXT:
139 return 1;
141 case OP_ASM:
142 /* FIXME! This should take "volatile" etc into account */
143 return 1;
145 default:
146 continue;
148 } END_FOR_EACH_PTR(insn);
149 return 0;
152 static int simplify_phi_branch(struct basic_block *bb, struct instruction *br)
154 pseudo_t cond = br->cond;
155 struct instruction *def;
157 if (cond->type != PSEUDO_REG)
158 return 0;
159 def = cond->def;
160 if (def->bb != bb || def->opcode != OP_PHI)
161 return 0;
162 if (bb_has_side_effects(bb))
163 return 0;
164 return try_to_simplify_bb(bb, def, br);
167 static int simplify_branch_branch(struct basic_block *bb, struct instruction *br,
168 struct basic_block **target_p, int true)
170 struct basic_block *target = *target_p, *final;
171 struct instruction *insn;
172 int retval;
174 if (target == bb)
175 return 0;
176 insn = last_instruction(target->insns);
177 if (!insn || insn->opcode != OP_BR || insn->cond != br->cond)
178 return 0;
180 * Ahhah! We've found a branch to a branch on the same conditional!
181 * Now we just need to see if we can rewrite the branch..
183 retval = 0;
184 final = true ? insn->bb_true : insn->bb_false;
185 if (bb_has_side_effects(target))
186 goto try_to_rewrite_target;
187 if (bb_depends_on(final, target))
188 goto try_to_rewrite_target;
189 return rewrite_branch(bb, target_p, target, final);
191 try_to_rewrite_target:
193 * If we're the only parent, at least we can rewrite the
194 * now-known second branch.
196 if (bb_list_size(target->parents) != 1)
197 return retval;
198 insert_branch(target, insn, final);
199 kill_instruction(insn);
200 return 1;
203 static int simplify_one_branch(struct basic_block *bb, struct instruction *br)
205 if (simplify_phi_branch(bb, br))
206 return 1;
207 return simplify_branch_branch(bb, br, &br->bb_true, 1) |
208 simplify_branch_branch(bb, br, &br->bb_false, 0);
211 static int simplify_branch_nodes(struct entrypoint *ep)
213 int changed = 0;
214 struct basic_block *bb;
216 FOR_EACH_PTR(ep->bbs, bb) {
217 struct instruction *br = last_instruction(bb->insns);
219 if (!br || br->opcode != OP_BR || !br->bb_false)
220 continue;
221 changed |= simplify_one_branch(bb, br);
222 } END_FOR_EACH_PTR(bb);
223 return changed;
227 * This is called late - when we have intra-bb liveness information..
229 int simplify_flow(struct entrypoint *ep)
231 return simplify_branch_nodes(ep);
234 static inline void concat_user_list(struct pseudo_user_list *src, struct pseudo_user_list **dst)
236 concat_ptr_list((struct ptr_list *)src, (struct ptr_list **)dst);
239 void convert_instruction_target(struct instruction *insn, pseudo_t src)
241 pseudo_t target;
242 struct pseudo_user *pu;
244 * Go through the "insn->users" list and replace them all..
246 target = insn->target;
247 if (target == src)
248 return;
249 FOR_EACH_PTR(target->users, pu) {
250 if (*pu->userp != VOID) {
251 assert(*pu->userp == target);
252 *pu->userp = src;
254 } END_FOR_EACH_PTR(pu);
255 concat_user_list(target->users, &src->users);
256 target->users = NULL;
259 void convert_load_instruction(struct instruction *insn, pseudo_t src)
261 convert_instruction_target(insn, src);
262 /* Turn the load into a no-op */
263 insn->opcode = OP_LNOP;
264 insn->bb = NULL;
267 static int overlapping_memop(struct instruction *a, struct instruction *b)
269 unsigned int a_start = bytes_to_bits(a->offset);
270 unsigned int b_start = bytes_to_bits(b->offset);
271 unsigned int a_size = a->size;
272 unsigned int b_size = b->size;
274 if (a_size + a_start <= b_start)
275 return 0;
276 if (b_size + b_start <= a_start)
277 return 0;
278 return 1;
281 static inline int same_memop(struct instruction *a, struct instruction *b)
283 return a->offset == b->offset && a->size == b->size;
287 * Return 1 if "dom" dominates the access to "pseudo"
288 * in "insn".
290 * Return 0 if it doesn't, and -1 if you don't know.
292 int dominates(pseudo_t pseudo, struct instruction *insn, struct instruction *dom, int local)
294 int opcode = dom->opcode;
296 if (opcode == OP_CALL || opcode == OP_ENTRY)
297 return local ? 0 : -1;
298 if (opcode != OP_LOAD && opcode != OP_STORE)
299 return 0;
300 if (dom->src != pseudo) {
301 if (local)
302 return 0;
303 /* We don't think two explicitly different symbols ever alias */
304 if (dom->src->type == PSEUDO_SYM)
305 return 0;
306 /* We could try to do some alias analysis here */
307 return -1;
309 if (!same_memop(insn, dom)) {
310 if (dom->opcode == OP_LOAD)
311 return 0;
312 if (!overlapping_memop(insn, dom))
313 return 0;
314 return -1;
316 return 1;
319 static int phisrc_in_bb(struct pseudo_list *list, struct basic_block *bb)
321 pseudo_t p;
322 FOR_EACH_PTR(list, p) {
323 if (p->def->bb == bb)
324 return 1;
325 } END_FOR_EACH_PTR(p);
327 return 0;
330 static int find_dominating_parents(pseudo_t pseudo, struct instruction *insn,
331 struct basic_block *bb, unsigned long generation, struct pseudo_list **dominators,
332 int local)
334 struct basic_block *parent;
336 if (!bb->parents)
337 return !!local;
339 FOR_EACH_PTR(bb->parents, parent) {
340 struct instruction *one;
341 struct instruction *br;
342 pseudo_t phi;
344 FOR_EACH_PTR_REVERSE(parent->insns, one) {
345 int dominance;
346 if (one == insn)
347 goto no_dominance;
348 dominance = dominates(pseudo, insn, one, local);
349 if (dominance < 0) {
350 if (one->opcode == OP_LOAD)
351 continue;
352 return 0;
354 if (!dominance)
355 continue;
356 goto found_dominator;
357 } END_FOR_EACH_PTR_REVERSE(one);
358 no_dominance:
359 if (parent->generation == generation)
360 continue;
361 parent->generation = generation;
363 if (!find_dominating_parents(pseudo, insn, parent, generation, dominators, local))
364 return 0;
365 continue;
367 found_dominator:
368 if (dominators && phisrc_in_bb(*dominators, parent))
369 continue;
370 br = delete_last_instruction(&parent->insns);
371 phi = alloc_phi(parent, one->target, one->size);
372 phi->ident = phi->ident ? : pseudo->ident;
373 add_instruction(&parent->insns, br);
374 use_pseudo(insn, phi, add_pseudo(dominators, phi));
375 } END_FOR_EACH_PTR(parent);
376 return 1;
380 * We should probably sort the phi list just to make it easier to compare
381 * later for equality.
383 void rewrite_load_instruction(struct instruction *insn, struct pseudo_list *dominators)
385 pseudo_t new, phi;
388 * Check for somewhat common case of duplicate
389 * phi nodes.
391 new = first_pseudo(dominators)->def->src1;
392 FOR_EACH_PTR(dominators, phi) {
393 if (new != phi->def->src1)
394 goto complex_phi;
395 new->ident = new->ident ? : phi->ident;
396 } END_FOR_EACH_PTR(phi);
399 * All the same pseudo - mark the phi-nodes unused
400 * and convert the load into a LNOP and replace the
401 * pseudo.
403 FOR_EACH_PTR(dominators, phi) {
404 phi->def->bb = NULL;
405 } END_FOR_EACH_PTR(phi);
406 convert_load_instruction(insn, new);
407 return;
409 complex_phi:
410 /* We leave symbol pseudos with a bogus usage list here */
411 if (insn->src->type != PSEUDO_SYM)
412 kill_use(&insn->src);
413 insn->opcode = OP_PHI;
414 insn->phi_list = dominators;
417 static int find_dominating_stores(pseudo_t pseudo, struct instruction *insn,
418 unsigned long generation, int local)
420 struct basic_block *bb = insn->bb;
421 struct instruction *one, *dom = NULL;
422 struct pseudo_list *dominators;
423 int partial;
425 /* Unreachable load? Undo it */
426 if (!bb) {
427 insn->opcode = OP_LNOP;
428 return 1;
431 partial = 0;
432 FOR_EACH_PTR(bb->insns, one) {
433 int dominance;
434 if (one == insn)
435 goto found;
436 dominance = dominates(pseudo, insn, one, local);
437 if (dominance < 0) {
438 /* Ignore partial load dominators */
439 if (one->opcode == OP_LOAD)
440 continue;
441 dom = NULL;
442 partial = 1;
443 continue;
445 if (!dominance)
446 continue;
447 dom = one;
448 partial = 0;
449 } END_FOR_EACH_PTR(one);
450 /* Whaa? */
451 warning(pseudo->sym->pos, "unable to find symbol read");
452 return 0;
453 found:
454 if (partial)
455 return 0;
457 if (dom) {
458 convert_load_instruction(insn, dom->target);
459 return 1;
462 /* OK, go find the parents */
463 bb->generation = generation;
465 dominators = NULL;
466 if (!find_dominating_parents(pseudo, insn, bb, generation, &dominators, local))
467 return 0;
469 /* This happens with initial assignments to structures etc.. */
470 if (!dominators) {
471 if (!local)
472 return 0;
473 check_access(insn);
474 convert_load_instruction(insn, value_pseudo(0));
475 return 1;
479 * If we find just one dominating instruction, we
480 * can turn it into a direct thing. Otherwise we'll
481 * have to turn the load into a phi-node of the
482 * dominators.
484 rewrite_load_instruction(insn, dominators);
485 return 1;
488 static void kill_store(struct instruction *insn)
490 if (insn) {
491 insn->bb = NULL;
492 insn->opcode = OP_SNOP;
493 kill_use(&insn->target);
497 /* Kill a pseudo that is dead on exit from the bb */
498 static void kill_dead_stores(pseudo_t pseudo, unsigned long generation, struct basic_block *bb, int local)
500 struct instruction *insn;
501 struct basic_block *parent;
503 if (bb->generation == generation)
504 return;
505 bb->generation = generation;
506 FOR_EACH_PTR_REVERSE(bb->insns, insn) {
507 int opcode = insn->opcode;
509 if (opcode != OP_LOAD && opcode != OP_STORE) {
510 if (local)
511 continue;
512 if (opcode == OP_CALL)
513 return;
514 continue;
516 if (insn->src == pseudo) {
517 if (opcode == OP_LOAD)
518 return;
519 kill_store(insn);
520 continue;
522 if (local)
523 continue;
524 if (insn->src->type != PSEUDO_SYM)
525 return;
526 } END_FOR_EACH_PTR_REVERSE(insn);
528 FOR_EACH_PTR(bb->parents, parent) {
529 struct basic_block *child;
530 FOR_EACH_PTR(parent->children, child) {
531 if (child && child != bb)
532 return;
533 } END_FOR_EACH_PTR(child);
534 kill_dead_stores(pseudo, generation, parent, local);
535 } END_FOR_EACH_PTR(parent);
539 * This should see if the "insn" trivially dominates some previous store, and kill the
540 * store if unnecessary.
542 static void kill_dominated_stores(pseudo_t pseudo, struct instruction *insn,
543 unsigned long generation, struct basic_block *bb, int local, int found)
545 struct instruction *one;
546 struct basic_block *parent;
548 /* Unreachable store? Undo it */
549 if (!bb) {
550 kill_store(insn);
551 return;
553 if (bb->generation == generation)
554 return;
555 bb->generation = generation;
556 FOR_EACH_PTR_REVERSE(bb->insns, one) {
557 int dominance;
558 if (!found) {
559 if (one != insn)
560 continue;
561 found = 1;
562 continue;
564 dominance = dominates(pseudo, insn, one, local);
565 if (!dominance)
566 continue;
567 if (dominance < 0)
568 return;
569 if (one->opcode == OP_LOAD)
570 return;
571 kill_store(one);
572 } END_FOR_EACH_PTR_REVERSE(one);
574 if (!found) {
575 warning(bb->pos, "Unable to find instruction");
576 return;
579 FOR_EACH_PTR(bb->parents, parent) {
580 struct basic_block *child;
581 FOR_EACH_PTR(parent->children, child) {
582 if (child && child != bb)
583 return;
584 } END_FOR_EACH_PTR(child);
585 kill_dominated_stores(pseudo, insn, generation, parent, local, found);
586 } END_FOR_EACH_PTR(parent);
589 void check_access(struct instruction *insn)
591 pseudo_t pseudo = insn->src;
593 if (insn->bb && pseudo->type == PSEUDO_SYM) {
594 int offset = insn->offset, bit = bytes_to_bits(offset) + insn->size;
595 struct symbol *sym = pseudo->sym;
597 if (sym->bit_size > 0 && (offset < 0 || bit > sym->bit_size))
598 warning(insn->pos, "invalid access %s '%s' (%d %d)",
599 offset < 0 ? "below" : "past the end of",
600 show_ident(sym->ident), offset,
601 bits_to_bytes(sym->bit_size));
605 static void simplify_one_symbol(struct entrypoint *ep, struct symbol *sym)
607 pseudo_t pseudo, src;
608 struct pseudo_user *pu;
609 struct instruction *def;
610 unsigned long mod;
611 int all, stores, complex;
613 /* Never used as a symbol? */
614 pseudo = sym->pseudo;
615 if (!pseudo)
616 return;
618 /* We don't do coverage analysis of volatiles.. */
619 if (sym->ctype.modifiers & MOD_VOLATILE)
620 return;
622 /* ..and symbols with external visibility need more care */
623 mod = sym->ctype.modifiers & (MOD_NONLOCAL | MOD_STATIC | MOD_ADDRESSABLE);
624 if (mod)
625 goto external_visibility;
627 def = NULL;
628 stores = 0;
629 complex = 0;
630 FOR_EACH_PTR(pseudo->users, pu) {
631 /* We know that the symbol-pseudo use is the "src" in the instruction */
632 struct instruction *insn = pu->insn;
634 switch (insn->opcode) {
635 case OP_STORE:
636 stores++;
637 def = insn;
638 break;
639 case OP_LOAD:
640 break;
641 case OP_SYMADDR:
642 if (!insn->bb)
643 continue;
644 mod |= MOD_ADDRESSABLE;
645 goto external_visibility;
646 case OP_NOP:
647 case OP_SNOP:
648 case OP_LNOP:
649 case OP_PHI:
650 continue;
651 default:
652 warning(sym->pos, "symbol '%s' pseudo used in unexpected way", show_ident(sym->ident));
654 complex |= insn->offset;
655 } END_FOR_EACH_PTR(pu);
657 if (complex)
658 goto complex_def;
659 if (stores > 1)
660 goto multi_def;
663 * Goodie, we have a single store (if even that) in the whole
664 * thing. Replace all loads with moves from the pseudo,
665 * replace the store with a def.
667 src = VOID;
668 if (def)
669 src = def->target;
671 FOR_EACH_PTR(pseudo->users, pu) {
672 struct instruction *insn = pu->insn;
673 if (insn->opcode == OP_LOAD) {
674 check_access(insn);
675 convert_load_instruction(insn, src);
677 } END_FOR_EACH_PTR(pu);
679 /* Turn the store into a no-op */
680 kill_store(def);
681 return;
683 multi_def:
684 complex_def:
685 external_visibility:
686 all = 1;
687 FOR_EACH_PTR_REVERSE(pseudo->users, pu) {
688 struct instruction *insn = pu->insn;
689 if (insn->opcode == OP_LOAD)
690 all &= find_dominating_stores(pseudo, insn, ++bb_generation, !mod);
691 } END_FOR_EACH_PTR_REVERSE(pu);
693 /* If we converted all the loads, remove the stores. They are dead */
694 if (all && !mod) {
695 FOR_EACH_PTR(pseudo->users, pu) {
696 struct instruction *insn = pu->insn;
697 if (insn->opcode == OP_STORE)
698 kill_store(insn);
699 } END_FOR_EACH_PTR(pu);
700 } else {
702 * If we couldn't take the shortcut, see if we can at least kill some
703 * of them..
705 FOR_EACH_PTR(pseudo->users, pu) {
706 struct instruction *insn = pu->insn;
707 if (insn->opcode == OP_STORE)
708 kill_dominated_stores(pseudo, insn, ++bb_generation, insn->bb, !mod, 0);
709 } END_FOR_EACH_PTR(pu);
711 if (!(mod & (MOD_NONLOCAL | MOD_STATIC))) {
712 struct basic_block *bb;
713 FOR_EACH_PTR(ep->bbs, bb) {
714 if (!bb->children)
715 kill_dead_stores(pseudo, ++bb_generation, bb, !mod);
716 } END_FOR_EACH_PTR(bb);
720 return;
723 void simplify_symbol_usage(struct entrypoint *ep)
725 pseudo_t pseudo;
727 FOR_EACH_PTR(ep->accesses, pseudo) {
728 simplify_one_symbol(ep, pseudo->sym);
729 } END_FOR_EACH_PTR(pseudo);
732 static void mark_bb_reachable(struct basic_block *bb, unsigned long generation)
734 struct basic_block *child;
736 if (bb->generation == generation)
737 return;
738 bb->generation = generation;
739 FOR_EACH_PTR(bb->children, child) {
740 mark_bb_reachable(child, generation);
741 } END_FOR_EACH_PTR(child);
744 static void kill_defs(struct instruction *insn)
746 pseudo_t target = insn->target;
748 if (!has_use_list(target))
749 return;
750 if (target->def != insn)
751 return;
753 convert_instruction_target(insn, VOID);
756 void kill_bb(struct basic_block *bb)
758 struct instruction *insn;
759 struct basic_block *child, *parent;
761 FOR_EACH_PTR(bb->insns, insn) {
762 kill_instruction(insn);
763 kill_defs(insn);
765 * We kill unreachable instructions even if they
766 * otherwise aren't "killable" (e.g. volatile loads)
768 insn->bb = NULL;
769 } END_FOR_EACH_PTR(insn);
770 bb->insns = NULL;
772 FOR_EACH_PTR(bb->children, child) {
773 remove_bb_from_list(&child->parents, bb, 0);
774 } END_FOR_EACH_PTR(child);
775 bb->children = NULL;
777 FOR_EACH_PTR(bb->parents, parent) {
778 remove_bb_from_list(&parent->children, bb, 0);
779 } END_FOR_EACH_PTR(parent);
780 bb->parents = NULL;
783 void kill_unreachable_bbs(struct entrypoint *ep)
785 struct basic_block *bb;
786 unsigned long generation = ++bb_generation;
788 mark_bb_reachable(ep->entry->bb, generation);
789 FOR_EACH_PTR(ep->bbs, bb) {
790 if (bb->generation == generation)
791 continue;
792 /* Mark it as being dead */
793 kill_bb(bb);
794 bb->ep = NULL;
795 DELETE_CURRENT_PTR(bb);
796 } END_FOR_EACH_PTR(bb);
797 PACK_PTR_LIST(&ep->bbs);
800 static int rewrite_parent_branch(struct basic_block *bb, struct basic_block *old, struct basic_block *new)
802 int changed = 0;
803 struct instruction *insn = last_instruction(bb->insns);
805 if (!insn)
806 return 0;
808 /* Infinite loops: let's not "optimize" them.. */
809 if (old == new)
810 return 0;
812 switch (insn->opcode) {
813 case OP_BR:
814 changed |= rewrite_branch(bb, &insn->bb_true, old, new);
815 changed |= rewrite_branch(bb, &insn->bb_false, old, new);
816 assert(changed);
817 return changed;
818 case OP_SWITCH: {
819 struct multijmp *jmp;
820 FOR_EACH_PTR(insn->multijmp_list, jmp) {
821 changed |= rewrite_branch(bb, &jmp->target, old, new);
822 } END_FOR_EACH_PTR(jmp);
823 assert(changed);
824 return changed;
826 default:
827 return 0;
831 static struct basic_block * rewrite_branch_bb(struct basic_block *bb, struct instruction *br)
833 struct basic_block *parent;
834 struct basic_block *target = br->bb_true;
835 struct basic_block *false = br->bb_false;
837 if (target && false) {
838 pseudo_t cond = br->cond;
839 if (cond->type != PSEUDO_VAL)
840 return NULL;
841 target = cond->value ? target : false;
845 * We can't do FOR_EACH_PTR() here, because the parent list
846 * may change when we rewrite the parent.
848 while ((parent = first_basic_block(bb->parents)) != NULL) {
849 if (!rewrite_parent_branch(parent, bb, target))
850 return NULL;
852 return target;
855 static void vrfy_bb_in_list(struct basic_block *bb, struct basic_block_list *list)
857 if (bb) {
858 struct basic_block *tmp;
859 int no_bb_in_list = 0;
861 FOR_EACH_PTR(list, tmp) {
862 if (bb == tmp)
863 return;
864 } END_FOR_EACH_PTR(tmp);
865 assert(no_bb_in_list);
869 static void vrfy_parents(struct basic_block *bb)
871 struct basic_block *tmp;
872 FOR_EACH_PTR(bb->parents, tmp) {
873 vrfy_bb_in_list(bb, tmp->children);
874 } END_FOR_EACH_PTR(tmp);
877 static void vrfy_children(struct basic_block *bb)
879 struct basic_block *tmp;
880 struct instruction *br = last_instruction(bb->insns);
882 if (!br) {
883 assert(!bb->children);
884 return;
886 switch (br->opcode) {
887 struct multijmp *jmp;
888 case OP_BR:
889 vrfy_bb_in_list(br->bb_true, bb->children);
890 vrfy_bb_in_list(br->bb_false, bb->children);
891 break;
892 case OP_SWITCH:
893 case OP_COMPUTEDGOTO:
894 FOR_EACH_PTR(br->multijmp_list, jmp) {
895 vrfy_bb_in_list(jmp->target, bb->children);
896 } END_FOR_EACH_PTR(jmp);
897 break;
898 default:
899 break;
902 FOR_EACH_PTR(bb->children, tmp) {
903 vrfy_bb_in_list(bb, tmp->parents);
904 } END_FOR_EACH_PTR(tmp);
907 static void vrfy_bb_flow(struct basic_block *bb)
909 vrfy_children(bb);
910 vrfy_parents(bb);
913 void vrfy_flow(struct entrypoint *ep)
915 struct basic_block *bb;
916 struct basic_block *entry = ep->entry->bb;
918 FOR_EACH_PTR(ep->bbs, bb) {
919 if (bb == entry)
920 entry = NULL;
921 vrfy_bb_flow(bb);
922 } END_FOR_EACH_PTR(bb);
923 assert(!entry);
926 void pack_basic_blocks(struct entrypoint *ep)
928 struct basic_block *bb;
930 /* See if we can merge a bb into another one.. */
931 FOR_EACH_PTR(ep->bbs, bb) {
932 struct instruction *first, *insn;
933 struct basic_block *parent, *child, *last;
935 if (!bb_reachable(bb))
936 continue;
939 * Just a branch?
941 FOR_EACH_PTR(bb->insns, first) {
942 if (!first->bb)
943 continue;
944 switch (first->opcode) {
945 case OP_NOP: case OP_LNOP: case OP_SNOP:
946 continue;
947 case OP_BR: {
948 struct basic_block *replace;
949 replace = rewrite_branch_bb(bb, first);
950 if (replace) {
951 kill_bb(bb);
952 goto no_merge;
955 /* fallthrough */
956 default:
957 goto out;
959 } END_FOR_EACH_PTR(first);
961 out:
963 * See if we only have one parent..
965 last = NULL;
966 FOR_EACH_PTR(bb->parents, parent) {
967 if (last) {
968 if (last != parent)
969 goto no_merge;
970 continue;
972 last = parent;
973 } END_FOR_EACH_PTR(parent);
975 parent = last;
976 if (!parent || parent == bb)
977 continue;
980 * Goodie. See if the parent can merge..
982 FOR_EACH_PTR(parent->children, child) {
983 if (child != bb)
984 goto no_merge;
985 } END_FOR_EACH_PTR(child);
988 * Merge the two.
990 repeat_phase |= REPEAT_CSE;
992 parent->children = bb->children;
993 bb->children = NULL;
994 bb->parents = NULL;
996 FOR_EACH_PTR(parent->children, child) {
997 replace_bb_in_list(&child->parents, bb, parent, 0);
998 } END_FOR_EACH_PTR(child);
1000 kill_instruction(delete_last_instruction(&parent->insns));
1001 FOR_EACH_PTR(bb->insns, insn) {
1002 if (insn->bb) {
1003 assert(insn->bb == bb);
1004 insn->bb = parent;
1006 add_instruction(&parent->insns, insn);
1007 } END_FOR_EACH_PTR(insn);
1008 bb->insns = NULL;
1010 no_merge:
1011 /* nothing to do */;
1012 } END_FOR_EACH_PTR(bb);