Duh! Remove a very very incorrect left-over flow simplification call.
[smatch.git] / flow.c
blob2eb282cfbcd289916360667cf76a31367b3b2975
1 /*
2 * Flow - walk the linearized flowgraph, simplifying it as we
3 * go along.
5 * Copyright (C) 2004 Linus Torvalds
6 */
8 #include <string.h>
9 #include <stdarg.h>
10 #include <stdlib.h>
11 #include <stdio.h>
12 #include <stddef.h>
13 #include <assert.h>
15 #include "parse.h"
16 #include "expression.h"
17 #include "linearize.h"
18 #include "flow.h"
20 unsigned long bb_generation;
23 * Dammit, if we have a phi-node followed by a conditional
24 * branch on that phi-node, we should damn well be able to
25 * do something about the source. Maybe.
27 static int rewrite_branch(struct basic_block *bb,
28 struct basic_block **ptr,
29 struct basic_block *old,
30 struct basic_block *new)
32 if (*ptr != old || new == old)
33 return 0;
35 /* We might find new if-conversions or non-dominating CSEs */
36 repeat_phase |= REPEAT_CSE;
37 *ptr = new;
38 replace_bb_in_list(&bb->children, old, new, 1);
39 remove_bb_from_list(&old->parents, bb, 1);
40 add_bb(&new->parents, bb);
41 return 1;
45 * Return the known truth value of a pseudo, or -1 if
46 * it's not known.
48 static int pseudo_truth_value(pseudo_t pseudo)
50 switch (pseudo->type) {
51 case PSEUDO_VAL:
52 return !!pseudo->value;
54 case PSEUDO_REG: {
55 struct instruction *insn = pseudo->def;
56 if (insn->opcode == OP_SETVAL && insn->target == pseudo) {
57 struct expression *expr = insn->val;
59 /* A symbol address is always considered true.. */
60 if (!expr)
61 return 1;
62 if (expr->type == EXPR_VALUE)
63 return !!expr->value;
66 /* Fall through */
67 default:
68 return -1;
73 * Does a basic block depend on the pseudos that "src" defines?
75 static int bb_depends_on(struct basic_block *target, struct basic_block *src)
77 pseudo_t pseudo;
79 FOR_EACH_PTR(src->defines, pseudo) {
80 if (pseudo_in_list(target->needs, pseudo))
81 return 1;
82 } END_FOR_EACH_PTR(pseudo);
83 return 0;
87 * When we reach here, we have:
88 * - a basic block that ends in a conditional branch and
89 * that has no side effects apart from the pseudos it
90 * may change.
91 * - the phi-node that the conditional branch depends on
92 * - full pseudo liveness information
94 * We need to check if any of the _sources_ of the phi-node
95 * may be constant, and not actually need this block at all.
97 static int try_to_simplify_bb(struct basic_block *bb, struct instruction *first, struct instruction *second)
99 int changed = 0;
100 pseudo_t phi;
102 FOR_EACH_PTR(first->phi_list, phi) {
103 struct instruction *def = phi->def;
104 struct basic_block *source, *target;
105 pseudo_t pseudo;
106 struct instruction *br;
107 int true;
109 if (!def)
110 continue;
111 source = def->bb;
112 pseudo = def->src1;
113 if (!pseudo || !source)
114 continue;
115 br = last_instruction(source->insns);
116 if (!br)
117 continue;
118 if (br->opcode != OP_BR)
119 continue;
120 true = pseudo_truth_value(pseudo);
121 if (true < 0)
122 continue;
123 target = true ? second->bb_true : second->bb_false;
124 if (bb_depends_on(target, bb))
125 continue;
126 changed |= rewrite_branch(source, &br->bb_true, bb, target);
127 changed |= rewrite_branch(source, &br->bb_false, bb, target);
128 } END_FOR_EACH_PTR(phi);
129 return changed;
132 static int bb_has_side_effects(struct basic_block *bb)
134 struct instruction *insn;
135 FOR_EACH_PTR(bb->insns, insn) {
136 switch (insn->opcode) {
137 case OP_CALL:
138 case OP_STORE:
139 case OP_CONTEXT:
140 return 1;
141 default:
142 continue;
144 } END_FOR_EACH_PTR(insn);
145 return 0;
148 static int simplify_phi_branch(struct basic_block *bb, struct instruction *br)
150 pseudo_t cond = br->cond;
151 struct instruction *def;
153 if (cond->type != PSEUDO_REG)
154 return 0;
155 def = cond->def;
156 if (def->bb != bb || def->opcode != OP_PHI)
157 return 0;
158 if (bb_has_side_effects(bb))
159 return 0;
160 return try_to_simplify_bb(bb, def, br);
163 static int simplify_branch_branch(struct basic_block *bb, struct instruction *br,
164 struct basic_block **target_p, int true)
166 struct basic_block *target = *target_p, *final;
167 struct instruction *insn;
168 int retval;
170 if (target == bb)
171 return 0;
172 insn = last_instruction(target->insns);
173 if (!insn || insn->opcode != OP_BR || insn->cond != br->cond)
174 return 0;
176 * Ahhah! We've found a branch to a branch on the same conditional!
177 * Now we just need to see if we can rewrite the branch..
179 retval = 0;
180 final = true ? insn->bb_true : insn->bb_false;
181 if (bb_has_side_effects(target))
182 goto try_to_rewrite_target;
183 if (bb_depends_on(final, target))
184 goto try_to_rewrite_target;
185 retval = rewrite_branch(bb, target_p, target, final);
187 try_to_rewrite_target:
189 * If we're the only parent, at least we can rewrite the
190 * now-known second branch.
192 if (bb_list_size(target->parents) != 1)
193 return retval;
194 insert_branch(target, insn, final);
195 return 1;
198 static int simplify_one_branch(struct basic_block *bb, struct instruction *br)
200 if (simplify_phi_branch(bb, br))
201 return 1;
202 return simplify_branch_branch(bb, br, &br->bb_true, 1) |
203 simplify_branch_branch(bb, br, &br->bb_false, 0);
206 static int simplify_branch_nodes(struct entrypoint *ep)
208 int changed = 0;
209 struct basic_block *bb;
211 FOR_EACH_PTR(ep->bbs, bb) {
212 struct instruction *br = last_instruction(bb->insns);
214 if (!br || br->opcode != OP_BR || !br->bb_false)
215 continue;
216 changed |= simplify_one_branch(bb, br);
217 } END_FOR_EACH_PTR(bb);
218 return changed;
222 * This is called late - when we have intra-bb liveness information..
224 int simplify_flow(struct entrypoint *ep)
226 return simplify_branch_nodes(ep);
229 static inline void concat_user_list(struct pseudo_ptr_list *src, struct pseudo_ptr_list **dst)
231 concat_ptr_list((struct ptr_list *)src, (struct ptr_list **)dst);
234 void convert_instruction_target(struct instruction *insn, pseudo_t src)
236 pseudo_t target, *usep;
239 * Go through the "insn->users" list and replace them all..
241 target = insn->target;
242 if (target == src)
243 return;
244 FOR_EACH_PTR(target->users, usep) {
245 if (*usep != VOID) {
246 assert(*usep == target);
247 *usep = src;
249 } END_FOR_EACH_PTR(usep);
250 concat_user_list(target->users, &src->users);
251 target->users = NULL;
254 void convert_load_instruction(struct instruction *insn, pseudo_t src)
256 convert_instruction_target(insn, src);
257 /* Turn the load into a no-op */
258 insn->opcode = OP_LNOP;
259 insn->bb = NULL;
262 static int overlapping_memop(struct instruction *a, struct instruction *b)
264 unsigned int a_start = a->offset << 3;
265 unsigned int b_start = b->offset << 3;
266 unsigned int a_size = a->size;
267 unsigned int b_size = b->size;
269 if (a_size + a_start <= b_start)
270 return 0;
271 if (b_size + b_start <= a_start)
272 return 0;
273 return 1;
276 static inline int same_memop(struct instruction *a, struct instruction *b)
278 return a->offset == b->offset && a->size == b->size;
282 * Return 1 if "one" dominates the access to 'pseudo'
283 * in insn.
285 * Return 0 if it doesn't, and -1 if you don't know.
287 int dominates(pseudo_t pseudo, struct instruction *insn, struct instruction *dom, int local)
289 int opcode = dom->opcode;
291 if (opcode == OP_CALL)
292 return local ? 0 : -1;
293 if (opcode != OP_LOAD && opcode != OP_STORE)
294 return 0;
295 if (dom->src != pseudo) {
296 if (local)
297 return 0;
298 /* We don't think two explicitly different symbols ever alias */
299 if (dom->src->type == PSEUDO_SYM)
300 return 0;
301 /* We could try to do some alias analysis here */
302 return -1;
304 if (!same_memop(insn, dom)) {
305 if (dom->opcode == OP_LOAD)
306 return 0;
307 if (!overlapping_memop(insn, dom))
308 return 0;
309 return -1;
311 return 1;
314 static int find_dominating_parents(pseudo_t pseudo, struct instruction *insn,
315 struct basic_block *bb, unsigned long generation, struct pseudo_list **dominators,
316 int local, int loads)
318 struct basic_block *parent;
320 if (!bb->parents)
321 return !!local;
323 if (bb_list_size(bb->parents) > 1)
324 loads = 0;
325 FOR_EACH_PTR(bb->parents, parent) {
326 struct instruction *one;
327 struct instruction *br;
328 pseudo_t phi;
330 FOR_EACH_PTR_REVERSE(parent->insns, one) {
331 int dominance;
332 if (one == insn)
333 goto no_dominance;
334 dominance = dominates(pseudo, insn, one, local);
335 if (dominance < 0) {
336 if (one->opcode == OP_LOAD)
337 continue;
338 return 0;
340 if (!dominance)
341 continue;
342 if (one->opcode == OP_LOAD && !loads)
343 continue;
344 goto found_dominator;
345 } END_FOR_EACH_PTR_REVERSE(one);
346 no_dominance:
347 if (parent->generation == generation)
348 continue;
349 parent->generation = generation;
351 if (!find_dominating_parents(pseudo, insn, parent, generation, dominators, local, loads))
352 return 0;
353 continue;
355 found_dominator:
356 br = delete_last_instruction(&parent->insns);
357 phi = alloc_phi(parent, one->target, one->size);
358 phi->ident = phi->ident ? : pseudo->ident;
359 add_instruction(&parent->insns, br);
360 use_pseudo(phi, add_pseudo(dominators, phi));
361 } END_FOR_EACH_PTR(parent);
362 return 1;
366 * We should probably sort the phi list just to make it easier to compare
367 * later for equality.
369 void rewrite_load_instruction(struct instruction *insn, struct pseudo_list *dominators)
371 pseudo_t new, phi;
374 * Check for somewhat common case of duplicate
375 * phi nodes.
377 new = first_pseudo(dominators)->def->src1;
378 FOR_EACH_PTR(dominators, phi) {
379 if (new != phi->def->src1)
380 goto complex_phi;
381 new->ident = new->ident ? : phi->ident;
382 } END_FOR_EACH_PTR(phi);
385 * All the same pseudo - mark the phi-nodes unused
386 * and convert the load into a LNOP and replace the
387 * pseudo.
389 FOR_EACH_PTR(dominators, phi) {
390 phi->def->bb = NULL;
391 } END_FOR_EACH_PTR(phi);
392 convert_load_instruction(insn, new);
393 return;
395 complex_phi:
396 /* We leave symbol pseudos with a bogus usage list here */
397 if (insn->src->type != PSEUDO_SYM)
398 kill_use(&insn->src);
399 insn->opcode = OP_PHI;
400 insn->phi_list = dominators;
403 static int find_dominating_stores(pseudo_t pseudo, struct instruction *insn,
404 unsigned long generation, int local)
406 struct basic_block *bb = insn->bb;
407 struct instruction *one, *dom = NULL;
408 struct pseudo_list *dominators;
409 int partial;
411 /* Unreachable load? Undo it */
412 if (!bb) {
413 insn->opcode = OP_LNOP;
414 return 1;
417 partial = 0;
418 FOR_EACH_PTR(bb->insns, one) {
419 int dominance;
420 if (one == insn)
421 goto found;
422 dominance = dominates(pseudo, insn, one, local);
423 if (dominance < 0) {
424 /* Ignore partial load dominators */
425 if (one->opcode == OP_LOAD)
426 continue;
427 dom = NULL;
428 partial = 1;
429 continue;
431 if (!dominance)
432 continue;
433 dom = one;
434 partial = 0;
435 } END_FOR_EACH_PTR(one);
436 /* Whaa? */
437 warning(pseudo->sym->pos, "unable to find symbol read");
438 return 0;
439 found:
440 if (partial)
441 return 0;
443 if (dom) {
444 convert_load_instruction(insn, dom->target);
445 return 1;
448 /* Ok, go find the parents */
449 bb->generation = generation;
451 dominators = NULL;
452 if (!find_dominating_parents(pseudo, insn, bb, generation, &dominators, local, 1))
453 return 0;
455 /* This happens with initial assignments to structures etc.. */
456 if (!dominators) {
457 if (!local)
458 return 0;
459 convert_load_instruction(insn, value_pseudo(0));
460 return 1;
464 * If we find just one dominating instruction, we
465 * can turn it into a direct thing. Otherwise we'll
466 * have to turn the load into a phi-node of the
467 * dominators.
469 rewrite_load_instruction(insn, dominators);
470 return 1;
473 static void kill_store(struct instruction *insn)
475 if (insn) {
476 insn->bb = NULL;
477 insn->opcode = OP_SNOP;
478 kill_use(&insn->target);
482 /* Kill a pseudo that is dead on exit from the bb */
483 static void kill_dead_stores(pseudo_t pseudo, unsigned long generation, struct basic_block *bb, int local)
485 struct instruction *insn;
486 struct basic_block *parent;
488 if (bb->generation == generation)
489 return;
490 bb->generation = generation;
491 FOR_EACH_PTR_REVERSE(bb->insns, insn) {
492 int opcode = insn->opcode;
494 if (opcode != OP_LOAD && opcode != OP_STORE) {
495 if (local)
496 continue;
497 if (opcode == OP_CALL)
498 return;
499 continue;
501 if (insn->src == pseudo) {
502 if (opcode == OP_LOAD)
503 return;
504 kill_store(insn);
505 continue;
507 if (local)
508 continue;
509 if (insn->src->type != PSEUDO_SYM)
510 return;
511 } END_FOR_EACH_PTR_REVERSE(insn);
513 FOR_EACH_PTR(bb->parents, parent) {
514 struct basic_block *child;
515 FOR_EACH_PTR(parent->children, child) {
516 if (child && child != bb)
517 return;
518 } END_FOR_EACH_PTR(child);
519 kill_dead_stores(pseudo, generation, parent, local);
520 } END_FOR_EACH_PTR(parent);
524 * This should see if the "insn" trivially dominates some previous store, and kill the
525 * store if unnecessary.
527 static void kill_dominated_stores(pseudo_t pseudo, struct instruction *insn,
528 unsigned long generation, struct basic_block *bb, int local, int found)
530 struct instruction *one;
531 struct basic_block *parent;
533 /* Unreachable store? Undo it */
534 if (!bb) {
535 kill_store(insn);
536 return;
538 if (bb->generation == generation)
539 return;
540 bb->generation = generation;
541 FOR_EACH_PTR_REVERSE(bb->insns, one) {
542 int dominance;
543 if (!found) {
544 if (one != insn)
545 continue;
546 found = 1;
547 continue;
549 dominance = dominates(pseudo, insn, one, local);
550 if (!dominance)
551 continue;
552 if (dominance < 0)
553 return;
554 if (one->opcode == OP_LOAD)
555 return;
556 kill_store(one);
557 } END_FOR_EACH_PTR_REVERSE(one);
559 if (!found) {
560 warning(bb->pos, "Unable to find instruction");
561 return;
564 FOR_EACH_PTR(bb->parents, parent) {
565 struct basic_block *child;
566 FOR_EACH_PTR(parent->children, child) {
567 if (child && child != bb)
568 return;
569 } END_FOR_EACH_PTR(child);
570 kill_dominated_stores(pseudo, insn, generation, parent, local, found);
571 } END_FOR_EACH_PTR(parent);
574 static void simplify_one_symbol(struct entrypoint *ep, struct symbol *sym)
576 pseudo_t pseudo, src, *pp;
577 struct instruction *def;
578 unsigned long mod;
579 int all, stores, complex;
581 /* Never used as a symbol? */
582 pseudo = sym->pseudo;
583 if (!pseudo)
584 return;
586 /* We don't do coverage analysis of volatiles.. */
587 if (sym->ctype.modifiers & MOD_VOLATILE)
588 return;
590 /* ..and symbols with external visibility need more care */
591 mod = sym->ctype.modifiers & (MOD_NONLOCAL | MOD_STATIC | MOD_ADDRESSABLE);
592 if (mod)
593 goto external_visibility;
595 def = NULL;
596 stores = 0;
597 complex = 0;
598 FOR_EACH_PTR(pseudo->users, pp) {
599 /* We know that the symbol-pseudo use is the "src" in the instruction */
600 struct instruction *insn = container(pp, struct instruction, src);
602 switch (insn->opcode) {
603 case OP_STORE:
604 stores++;
605 def = insn;
606 break;
607 case OP_LOAD:
608 break;
609 case OP_SETVAL:
610 if (!insn->bb)
611 continue;
612 mod |= MOD_ADDRESSABLE;
613 goto external_visibility;
614 case OP_NOP:
615 case OP_SNOP:
616 case OP_LNOP:
617 case OP_PHI:
618 continue;
619 default:
620 warning(sym->pos, "symbol '%s' pseudo used in unexpected way", show_ident(sym->ident));
622 complex |= insn->offset;
623 } END_FOR_EACH_PTR(pp);
625 if (complex)
626 goto complex_def;
627 if (stores > 1)
628 goto multi_def;
631 * Goodie, we have a single store (if even that) in the whole
632 * thing. Replace all loads with moves from the pseudo,
633 * replace the store with a def.
635 src = VOID;
636 if (def)
637 src = def->target;
639 FOR_EACH_PTR(pseudo->users, pp) {
640 struct instruction *insn = container(pp, struct instruction, src);
641 if (insn->opcode == OP_LOAD)
642 convert_load_instruction(insn, src);
643 } END_FOR_EACH_PTR(pp);
645 /* Turn the store into a no-op */
646 kill_store(def);
647 return;
649 multi_def:
650 complex_def:
651 external_visibility:
652 all = 1;
653 FOR_EACH_PTR_REVERSE(pseudo->users, pp) {
654 struct instruction *insn = container(pp, struct instruction, src);
655 if (insn->opcode == OP_LOAD)
656 all &= find_dominating_stores(pseudo, insn, ++bb_generation, !mod);
657 } END_FOR_EACH_PTR_REVERSE(pp);
659 /* If we converted all the loads, remove the stores. They are dead */
660 if (all && !mod) {
661 FOR_EACH_PTR(pseudo->users, pp) {
662 struct instruction *insn = container(pp, struct instruction, src);
663 if (insn->opcode == OP_STORE)
664 kill_store(insn);
665 } END_FOR_EACH_PTR(pp);
666 } else {
668 * If we couldn't take the shortcut, see if we can at least kill some
669 * of them..
671 FOR_EACH_PTR(pseudo->users, pp) {
672 struct instruction *insn = container(pp, struct instruction, src);
673 if (insn->opcode == OP_STORE)
674 kill_dominated_stores(pseudo, insn, ++bb_generation, insn->bb, !mod, 0);
675 } END_FOR_EACH_PTR(pp);
677 if (!(mod & (MOD_NONLOCAL | MOD_STATIC))) {
678 struct basic_block *bb;
679 FOR_EACH_PTR(ep->bbs, bb) {
680 if (!bb->children)
681 kill_dead_stores(pseudo, ++bb_generation, bb, !mod);
682 } END_FOR_EACH_PTR(bb);
686 return;
689 void simplify_symbol_usage(struct entrypoint *ep)
691 struct symbol *sym;
693 FOR_EACH_PTR(ep->accesses, sym) {
694 simplify_one_symbol(ep, sym);
695 } END_FOR_EACH_PTR(sym);
698 static void mark_bb_reachable(struct basic_block *bb, unsigned long generation)
700 struct basic_block *child;
702 if (bb->generation == generation)
703 return;
704 bb->generation = generation;
705 FOR_EACH_PTR(bb->children, child) {
706 mark_bb_reachable(child, generation);
707 } END_FOR_EACH_PTR(child);
710 static void kill_defs(struct instruction *insn)
712 pseudo_t target = insn->target;
714 if (!has_use_list(target))
715 return;
716 if (target->def != insn)
717 return;
719 convert_instruction_target(insn, VOID);
722 void kill_bb(struct basic_block *bb)
724 struct instruction *insn;
725 struct basic_block *child, *parent;
727 FOR_EACH_PTR(bb->insns, insn) {
728 kill_instruction(insn);
729 kill_defs(insn);
731 * We kill unreachable instructions even if they
732 * otherwise aren't "killable". Eg volatile loads
733 * etc.
735 insn->bb = NULL;
736 } END_FOR_EACH_PTR(insn);
737 bb->insns = NULL;
739 FOR_EACH_PTR(bb->children, child) {
740 remove_bb_from_list(&child->parents, bb, 0);
741 } END_FOR_EACH_PTR(child);
742 bb->children = NULL;
744 FOR_EACH_PTR(bb->parents, parent) {
745 remove_bb_from_list(&parent->children, bb, 0);
746 } END_FOR_EACH_PTR(parent);
747 bb->parents = NULL;
750 void kill_unreachable_bbs(struct entrypoint *ep)
752 struct basic_block *bb;
753 unsigned long generation = ++bb_generation;
755 mark_bb_reachable(ep->entry, generation);
756 FOR_EACH_PTR(ep->bbs, bb) {
757 if (bb->generation == generation)
758 continue;
759 /* Mark it as being dead */
760 kill_bb(bb);
761 } END_FOR_EACH_PTR(bb);
764 static int rewrite_parent_branch(struct basic_block *bb, struct basic_block *old, struct basic_block *new)
766 struct instruction *insn = last_instruction(bb->insns);
768 if (!insn)
769 return 0;
771 /* Infinite loops: let's not "optimize" them.. */
772 if (old == new)
773 return 0;
775 switch (insn->opcode) {
776 case OP_BR:
777 rewrite_branch(bb, &insn->bb_true, old, new);
778 rewrite_branch(bb, &insn->bb_false, old, new);
780 /* Conditional branch to same target? */
781 if (insn->bb_true == insn->bb_false) {
782 remove_bb_from_list(&new->parents, bb, 1);
783 remove_bb_from_list(&bb->children, new, 1);
784 insn->bb_false = NULL;
785 kill_use(&insn->cond);
787 return 1;
788 case OP_SWITCH: {
789 struct multijmp *jmp;
790 FOR_EACH_PTR(insn->multijmp_list, jmp) {
791 rewrite_branch(bb, &jmp->target, old, new);
792 } END_FOR_EACH_PTR(jmp);
793 return 1;
795 default:
796 return 0;
800 static struct basic_block * rewrite_branch_bb(struct basic_block *bb, struct instruction *br)
802 struct basic_block *parent;
803 struct basic_block *target = br->bb_true;
804 struct basic_block *false = br->bb_false;
806 if (target && false) {
807 pseudo_t cond = br->cond;
808 if (cond->type != PSEUDO_VAL)
809 return NULL;
810 target = cond->value ? target : false;
814 * We can't do FOR_EACH_PTR() here, because the parent list
815 * may change when we rewrite the parent.
817 while ((parent = first_basic_block(bb->parents)) != NULL) {
818 if (!rewrite_parent_branch(parent, bb, target))
819 return NULL;
821 return target;
824 static void vrfy_bb_in_list(struct basic_block *bb, struct basic_block_list *list)
826 if (bb) {
827 struct basic_block *tmp;
828 int no_bb_in_list = 0;
830 FOR_EACH_PTR(list, tmp) {
831 if (bb == tmp)
832 return;
833 } END_FOR_EACH_PTR(tmp);
834 assert(no_bb_in_list);
838 static void vrfy_parents(struct basic_block *bb)
840 struct basic_block *tmp;
841 FOR_EACH_PTR(bb->parents, tmp) {
842 vrfy_bb_in_list(bb, tmp->children);
843 } END_FOR_EACH_PTR(tmp);
846 static void vrfy_children(struct basic_block *bb)
848 struct basic_block *tmp;
849 struct instruction *br = last_instruction(bb->insns);
851 if (!br) {
852 assert(!bb->children);
853 return;
855 switch (br->opcode) {
856 struct multijmp *jmp;
857 case OP_BR:
858 vrfy_bb_in_list(br->bb_true, bb->children);
859 vrfy_bb_in_list(br->bb_false, bb->children);
860 break;
861 case OP_SWITCH:
862 case OP_COMPUTEDGOTO:
863 FOR_EACH_PTR(br->multijmp_list, jmp) {
864 vrfy_bb_in_list(jmp->target, bb->children);
865 } END_FOR_EACH_PTR(jmp);
866 break;
867 default:
868 break;
871 FOR_EACH_PTR(bb->children, tmp) {
872 vrfy_bb_in_list(bb, tmp->parents);
873 } END_FOR_EACH_PTR(tmp);
876 static void vrfy_bb_flow(struct basic_block *bb)
878 vrfy_children(bb);
879 vrfy_parents(bb);
882 void vrfy_flow(struct entrypoint *ep)
884 struct basic_block *bb;
885 struct basic_block *entry = ep->entry;
887 FOR_EACH_PTR(ep->bbs, bb) {
888 if (bb == entry)
889 entry = NULL;
890 vrfy_bb_flow(bb);
891 } END_FOR_EACH_PTR(bb);
892 assert(!entry);
895 void pack_basic_blocks(struct entrypoint *ep)
897 struct basic_block *bb;
899 /* See if we can merge a bb into another one.. */
900 FOR_EACH_PTR(ep->bbs, bb) {
901 struct instruction *first, *insn;
902 struct basic_block *parent, *child, *last;
904 if (!bb_reachable(bb))
905 continue;
908 * Just a branch?
910 FOR_EACH_PTR(bb->insns, first) {
911 if (!first->bb)
912 continue;
913 switch (first->opcode) {
914 case OP_NOP: case OP_LNOP: case OP_SNOP:
915 continue;
916 case OP_BR: {
917 struct basic_block *replace;
918 replace = rewrite_branch_bb(bb, first);
919 if (replace) {
920 if (bb == ep->entry)
921 ep->entry = replace;
922 kill_bb(bb);
923 goto no_merge;
926 /* fallthrough */
927 default:
928 goto out;
930 } END_FOR_EACH_PTR(first);
932 out:
933 if (ep->entry == bb)
934 continue;
937 * See if we only have one parent..
939 last = NULL;
940 FOR_EACH_PTR(bb->parents, parent) {
941 if (last) {
942 if (last != parent)
943 goto no_merge;
944 continue;
946 last = parent;
947 } END_FOR_EACH_PTR(parent);
949 parent = last;
950 if (!parent || parent == bb)
951 continue;
954 * Goodie. See if the parent can merge..
956 FOR_EACH_PTR(parent->children, child) {
957 if (child != bb)
958 goto no_merge;
959 } END_FOR_EACH_PTR(child);
962 * Merge the two.
964 repeat_phase |= REPEAT_CSE;
967 * But don't allow phi-source merges after this.
968 * FIXME, FIXME! I really need to think about this.
969 * Is it true? I think it's ok to merge phi-sources,
970 * as long as we keep their relative position in
971 * the stream. It's the re-ordering we can't have.
972 * I think.
974 merge_phi_sources = 0;
976 parent->children = bb->children;
977 bb->children = NULL;
978 bb->parents = NULL;
980 FOR_EACH_PTR(parent->children, child) {
981 replace_bb_in_list(&child->parents, bb, parent, 0);
982 } END_FOR_EACH_PTR(child);
984 delete_last_instruction(&parent->insns);
985 FOR_EACH_PTR(bb->insns, insn) {
986 if (insn->bb) {
987 assert(insn->bb == bb);
988 insn->bb = parent;
990 add_instruction(&parent->insns, insn);
991 } END_FOR_EACH_PTR(insn);
992 bb->insns = NULL;
994 no_merge:
995 /* nothing to do */;
996 } END_FOR_EACH_PTR(bb);