allocate.h: Stop needlessly returning a void value in __DO_ALLOCATOR
[smatch.git] / flow.c
blob82fb23ac6fb0fcbb3e8d4a643806872a4b0fd6d9
1 /*
2 * Flow - walk the linearized flowgraph, simplifying it as we
3 * go along.
5 * Copyright (C) 2004 Linus Torvalds
6 */
8 #include <string.h>
9 #include <stdarg.h>
10 #include <stdlib.h>
11 #include <stdio.h>
12 #include <stddef.h>
13 #include <assert.h>
15 #include "parse.h"
16 #include "expression.h"
17 #include "linearize.h"
18 #include "flow.h"
20 unsigned long bb_generation;
23 * Dammit, if we have a phi-node followed by a conditional
24 * branch on that phi-node, we should damn well be able to
25 * do something about the source. Maybe.
27 static int rewrite_branch(struct basic_block *bb,
28 struct basic_block **ptr,
29 struct basic_block *old,
30 struct basic_block *new)
32 if (*ptr != old || new == old)
33 return 0;
35 /* We might find new if-conversions or non-dominating CSEs */
36 repeat_phase |= REPEAT_CSE;
37 *ptr = new;
38 replace_bb_in_list(&bb->children, old, new, 1);
39 remove_bb_from_list(&old->parents, bb, 1);
40 add_bb(&new->parents, bb);
41 return 1;
45 * Return the known truth value of a pseudo, or -1 if
46 * it's not known.
48 static int pseudo_truth_value(pseudo_t pseudo)
50 switch (pseudo->type) {
51 case PSEUDO_VAL:
52 return !!pseudo->value;
54 case PSEUDO_REG: {
55 struct instruction *insn = pseudo->def;
57 /* A symbol address is always considered true.. */
58 if (insn->opcode == OP_SYMADDR && insn->target == pseudo)
59 return 1;
61 /* Fall through */
62 default:
63 return -1;
68 * Does a basic block depend on the pseudos that "src" defines?
70 static int bb_depends_on(struct basic_block *target, struct basic_block *src)
72 pseudo_t pseudo;
74 FOR_EACH_PTR(src->defines, pseudo) {
75 if (pseudo_in_list(target->needs, pseudo))
76 return 1;
77 } END_FOR_EACH_PTR(pseudo);
78 return 0;
82 * When we reach here, we have:
83 * - a basic block that ends in a conditional branch and
84 * that has no side effects apart from the pseudos it
85 * may change.
86 * - the phi-node that the conditional branch depends on
87 * - full pseudo liveness information
89 * We need to check if any of the _sources_ of the phi-node
90 * may be constant, and not actually need this block at all.
92 static int try_to_simplify_bb(struct basic_block *bb, struct instruction *first, struct instruction *second)
94 int changed = 0;
95 pseudo_t phi;
97 FOR_EACH_PTR(first->phi_list, phi) {
98 struct instruction *def = phi->def;
99 struct basic_block *source, *target;
100 pseudo_t pseudo;
101 struct instruction *br;
102 int true;
104 if (!def)
105 continue;
106 source = def->bb;
107 pseudo = def->src1;
108 if (!pseudo || !source)
109 continue;
110 br = last_instruction(source->insns);
111 if (!br)
112 continue;
113 if (br->opcode != OP_BR)
114 continue;
115 true = pseudo_truth_value(pseudo);
116 if (true < 0)
117 continue;
118 target = true ? second->bb_true : second->bb_false;
119 if (bb_depends_on(target, bb))
120 continue;
121 changed |= rewrite_branch(source, &br->bb_true, bb, target);
122 changed |= rewrite_branch(source, &br->bb_false, bb, target);
123 } END_FOR_EACH_PTR(phi);
124 return changed;
127 static int bb_has_side_effects(struct basic_block *bb)
129 struct instruction *insn;
130 FOR_EACH_PTR(bb->insns, insn) {
131 switch (insn->opcode) {
132 case OP_CALL:
133 /* FIXME! This should take "const" etc into account */
134 return 1;
136 case OP_STORE:
137 case OP_CONTEXT:
138 return 1;
140 case OP_ASM:
141 /* FIXME! This should take "volatile" etc into account */
142 return 1;
144 default:
145 continue;
147 } END_FOR_EACH_PTR(insn);
148 return 0;
151 static int simplify_phi_branch(struct basic_block *bb, struct instruction *br)
153 pseudo_t cond = br->cond;
154 struct instruction *def;
156 if (cond->type != PSEUDO_REG)
157 return 0;
158 def = cond->def;
159 if (def->bb != bb || def->opcode != OP_PHI)
160 return 0;
161 if (bb_has_side_effects(bb))
162 return 0;
163 return try_to_simplify_bb(bb, def, br);
166 static int simplify_branch_branch(struct basic_block *bb, struct instruction *br,
167 struct basic_block **target_p, int true)
169 struct basic_block *target = *target_p, *final;
170 struct instruction *insn;
171 int retval;
173 if (target == bb)
174 return 0;
175 insn = last_instruction(target->insns);
176 if (!insn || insn->opcode != OP_BR || insn->cond != br->cond)
177 return 0;
179 * Ahhah! We've found a branch to a branch on the same conditional!
180 * Now we just need to see if we can rewrite the branch..
182 retval = 0;
183 final = true ? insn->bb_true : insn->bb_false;
184 if (bb_has_side_effects(target))
185 goto try_to_rewrite_target;
186 if (bb_depends_on(final, target))
187 goto try_to_rewrite_target;
188 return rewrite_branch(bb, target_p, target, final);
190 try_to_rewrite_target:
192 * If we're the only parent, at least we can rewrite the
193 * now-known second branch.
195 if (bb_list_size(target->parents) != 1)
196 return retval;
197 insert_branch(target, insn, final);
198 kill_instruction(insn);
199 return 1;
202 static int simplify_one_branch(struct basic_block *bb, struct instruction *br)
204 if (simplify_phi_branch(bb, br))
205 return 1;
206 return simplify_branch_branch(bb, br, &br->bb_true, 1) |
207 simplify_branch_branch(bb, br, &br->bb_false, 0);
210 static int simplify_branch_nodes(struct entrypoint *ep)
212 int changed = 0;
213 struct basic_block *bb;
215 FOR_EACH_PTR(ep->bbs, bb) {
216 struct instruction *br = last_instruction(bb->insns);
218 if (!br || br->opcode != OP_BR || !br->bb_false)
219 continue;
220 changed |= simplify_one_branch(bb, br);
221 } END_FOR_EACH_PTR(bb);
222 return changed;
226 * This is called late - when we have intra-bb liveness information..
228 int simplify_flow(struct entrypoint *ep)
230 return simplify_branch_nodes(ep);
233 static inline void concat_user_list(struct pseudo_user_list *src, struct pseudo_user_list **dst)
235 concat_ptr_list((struct ptr_list *)src, (struct ptr_list **)dst);
238 void convert_instruction_target(struct instruction *insn, pseudo_t src)
240 pseudo_t target;
241 struct pseudo_user *pu;
243 * Go through the "insn->users" list and replace them all..
245 target = insn->target;
246 if (target == src)
247 return;
248 FOR_EACH_PTR(target->users, pu) {
249 if (*pu->userp != VOID) {
250 assert(*pu->userp == target);
251 *pu->userp = src;
253 } END_FOR_EACH_PTR(pu);
254 concat_user_list(target->users, &src->users);
255 target->users = NULL;
258 void convert_load_instruction(struct instruction *insn, pseudo_t src)
260 convert_instruction_target(insn, src);
261 /* Turn the load into a no-op */
262 insn->opcode = OP_LNOP;
263 insn->bb = NULL;
266 static int overlapping_memop(struct instruction *a, struct instruction *b)
268 unsigned int a_start = a->offset << 3;
269 unsigned int b_start = b->offset << 3;
270 unsigned int a_size = a->size;
271 unsigned int b_size = b->size;
273 if (a_size + a_start <= b_start)
274 return 0;
275 if (b_size + b_start <= a_start)
276 return 0;
277 return 1;
280 static inline int same_memop(struct instruction *a, struct instruction *b)
282 return a->offset == b->offset && a->size == b->size;
286 * Return 1 if "one" dominates the access to 'pseudo'
287 * in insn.
289 * Return 0 if it doesn't, and -1 if you don't know.
291 int dominates(pseudo_t pseudo, struct instruction *insn, struct instruction *dom, int local)
293 int opcode = dom->opcode;
295 if (opcode == OP_CALL || opcode == OP_ENTRY)
296 return local ? 0 : -1;
297 if (opcode != OP_LOAD && opcode != OP_STORE)
298 return 0;
299 if (dom->src != pseudo) {
300 if (local)
301 return 0;
302 /* We don't think two explicitly different symbols ever alias */
303 if (dom->src->type == PSEUDO_SYM)
304 return 0;
305 /* We could try to do some alias analysis here */
306 return -1;
308 if (!same_memop(insn, dom)) {
309 if (dom->opcode == OP_LOAD)
310 return 0;
311 if (!overlapping_memop(insn, dom))
312 return 0;
313 return -1;
315 return 1;
318 static int find_dominating_parents(pseudo_t pseudo, struct instruction *insn,
319 struct basic_block *bb, unsigned long generation, struct pseudo_list **dominators,
320 int local, int loads)
322 struct basic_block *parent;
324 if (!bb->parents)
325 return !!local;
327 if (bb_list_size(bb->parents) > 1)
328 loads = 0;
329 FOR_EACH_PTR(bb->parents, parent) {
330 struct instruction *one;
331 struct instruction *br;
332 pseudo_t phi;
334 FOR_EACH_PTR_REVERSE(parent->insns, one) {
335 int dominance;
336 if (one == insn)
337 goto no_dominance;
338 dominance = dominates(pseudo, insn, one, local);
339 if (dominance < 0) {
340 if (one->opcode == OP_LOAD)
341 continue;
342 return 0;
344 if (!dominance)
345 continue;
346 if (one->opcode == OP_LOAD && !loads)
347 continue;
348 goto found_dominator;
349 } END_FOR_EACH_PTR_REVERSE(one);
350 no_dominance:
351 if (parent->generation == generation)
352 continue;
353 parent->generation = generation;
355 if (!find_dominating_parents(pseudo, insn, parent, generation, dominators, local, loads))
356 return 0;
357 continue;
359 found_dominator:
360 br = delete_last_instruction(&parent->insns);
361 phi = alloc_phi(parent, one->target, one->size);
362 phi->ident = phi->ident ? : pseudo->ident;
363 add_instruction(&parent->insns, br);
364 use_pseudo(insn, phi, add_pseudo(dominators, phi));
365 } END_FOR_EACH_PTR(parent);
366 return 1;
370 * We should probably sort the phi list just to make it easier to compare
371 * later for equality.
373 void rewrite_load_instruction(struct instruction *insn, struct pseudo_list *dominators)
375 pseudo_t new, phi;
378 * Check for somewhat common case of duplicate
379 * phi nodes.
381 new = first_pseudo(dominators)->def->src1;
382 FOR_EACH_PTR(dominators, phi) {
383 if (new != phi->def->src1)
384 goto complex_phi;
385 new->ident = new->ident ? : phi->ident;
386 } END_FOR_EACH_PTR(phi);
389 * All the same pseudo - mark the phi-nodes unused
390 * and convert the load into a LNOP and replace the
391 * pseudo.
393 FOR_EACH_PTR(dominators, phi) {
394 phi->def->bb = NULL;
395 } END_FOR_EACH_PTR(phi);
396 convert_load_instruction(insn, new);
397 return;
399 complex_phi:
400 /* We leave symbol pseudos with a bogus usage list here */
401 if (insn->src->type != PSEUDO_SYM)
402 kill_use(&insn->src);
403 insn->opcode = OP_PHI;
404 insn->phi_list = dominators;
407 static int find_dominating_stores(pseudo_t pseudo, struct instruction *insn,
408 unsigned long generation, int local)
410 struct basic_block *bb = insn->bb;
411 struct instruction *one, *dom = NULL;
412 struct pseudo_list *dominators;
413 int partial;
415 /* Unreachable load? Undo it */
416 if (!bb) {
417 insn->opcode = OP_LNOP;
418 return 1;
421 partial = 0;
422 FOR_EACH_PTR(bb->insns, one) {
423 int dominance;
424 if (one == insn)
425 goto found;
426 dominance = dominates(pseudo, insn, one, local);
427 if (dominance < 0) {
428 /* Ignore partial load dominators */
429 if (one->opcode == OP_LOAD)
430 continue;
431 dom = NULL;
432 partial = 1;
433 continue;
435 if (!dominance)
436 continue;
437 dom = one;
438 partial = 0;
439 } END_FOR_EACH_PTR(one);
440 /* Whaa? */
441 warning(pseudo->sym->pos, "unable to find symbol read");
442 return 0;
443 found:
444 if (partial)
445 return 0;
447 if (dom) {
448 convert_load_instruction(insn, dom->target);
449 return 1;
452 /* OK, go find the parents */
453 bb->generation = generation;
455 dominators = NULL;
456 if (!find_dominating_parents(pseudo, insn, bb, generation, &dominators, local, 1))
457 return 0;
459 /* This happens with initial assignments to structures etc.. */
460 if (!dominators) {
461 if (!local)
462 return 0;
463 check_access(insn);
464 convert_load_instruction(insn, value_pseudo(0));
465 return 1;
469 * If we find just one dominating instruction, we
470 * can turn it into a direct thing. Otherwise we'll
471 * have to turn the load into a phi-node of the
472 * dominators.
474 rewrite_load_instruction(insn, dominators);
475 return 1;
478 static void kill_store(struct instruction *insn)
480 if (insn) {
481 insn->bb = NULL;
482 insn->opcode = OP_SNOP;
483 kill_use(&insn->target);
487 /* Kill a pseudo that is dead on exit from the bb */
488 static void kill_dead_stores(pseudo_t pseudo, unsigned long generation, struct basic_block *bb, int local)
490 struct instruction *insn;
491 struct basic_block *parent;
493 if (bb->generation == generation)
494 return;
495 bb->generation = generation;
496 FOR_EACH_PTR_REVERSE(bb->insns, insn) {
497 int opcode = insn->opcode;
499 if (opcode != OP_LOAD && opcode != OP_STORE) {
500 if (local)
501 continue;
502 if (opcode == OP_CALL)
503 return;
504 continue;
506 if (insn->src == pseudo) {
507 if (opcode == OP_LOAD)
508 return;
509 kill_store(insn);
510 continue;
512 if (local)
513 continue;
514 if (insn->src->type != PSEUDO_SYM)
515 return;
516 } END_FOR_EACH_PTR_REVERSE(insn);
518 FOR_EACH_PTR(bb->parents, parent) {
519 struct basic_block *child;
520 FOR_EACH_PTR(parent->children, child) {
521 if (child && child != bb)
522 return;
523 } END_FOR_EACH_PTR(child);
524 kill_dead_stores(pseudo, generation, parent, local);
525 } END_FOR_EACH_PTR(parent);
529 * This should see if the "insn" trivially dominates some previous store, and kill the
530 * store if unnecessary.
532 static void kill_dominated_stores(pseudo_t pseudo, struct instruction *insn,
533 unsigned long generation, struct basic_block *bb, int local, int found)
535 struct instruction *one;
536 struct basic_block *parent;
538 /* Unreachable store? Undo it */
539 if (!bb) {
540 kill_store(insn);
541 return;
543 if (bb->generation == generation)
544 return;
545 bb->generation = generation;
546 FOR_EACH_PTR_REVERSE(bb->insns, one) {
547 int dominance;
548 if (!found) {
549 if (one != insn)
550 continue;
551 found = 1;
552 continue;
554 dominance = dominates(pseudo, insn, one, local);
555 if (!dominance)
556 continue;
557 if (dominance < 0)
558 return;
559 if (one->opcode == OP_LOAD)
560 return;
561 kill_store(one);
562 } END_FOR_EACH_PTR_REVERSE(one);
564 if (!found) {
565 warning(bb->pos, "Unable to find instruction");
566 return;
569 FOR_EACH_PTR(bb->parents, parent) {
570 struct basic_block *child;
571 FOR_EACH_PTR(parent->children, child) {
572 if (child && child != bb)
573 return;
574 } END_FOR_EACH_PTR(child);
575 kill_dominated_stores(pseudo, insn, generation, parent, local, found);
576 } END_FOR_EACH_PTR(parent);
579 void check_access(struct instruction *insn)
581 pseudo_t pseudo = insn->src;
583 if (insn->bb && pseudo->type == PSEUDO_SYM) {
584 int offset = insn->offset, bit = (offset << 3) + insn->size;
585 struct symbol *sym = pseudo->sym;
587 if (sym->bit_size > 0 && (offset < 0 || bit > sym->bit_size))
588 warning(insn->pos, "invalid access %s '%s' (%d %d)",
589 offset < 0 ? "below" : "past the end of",
590 show_ident(sym->ident), offset, sym->bit_size >> 3);
594 static void simplify_one_symbol(struct entrypoint *ep, struct symbol *sym)
596 pseudo_t pseudo, src;
597 struct pseudo_user *pu;
598 struct instruction *def;
599 unsigned long mod;
600 int all, stores, complex;
602 /* Never used as a symbol? */
603 pseudo = sym->pseudo;
604 if (!pseudo)
605 return;
607 /* We don't do coverage analysis of volatiles.. */
608 if (sym->ctype.modifiers & MOD_VOLATILE)
609 return;
611 /* ..and symbols with external visibility need more care */
612 mod = sym->ctype.modifiers & (MOD_NONLOCAL | MOD_STATIC | MOD_ADDRESSABLE);
613 if (mod)
614 goto external_visibility;
616 def = NULL;
617 stores = 0;
618 complex = 0;
619 FOR_EACH_PTR(pseudo->users, pu) {
620 /* We know that the symbol-pseudo use is the "src" in the instruction */
621 struct instruction *insn = pu->insn;
623 switch (insn->opcode) {
624 case OP_STORE:
625 stores++;
626 def = insn;
627 break;
628 case OP_LOAD:
629 break;
630 case OP_SYMADDR:
631 if (!insn->bb)
632 continue;
633 mod |= MOD_ADDRESSABLE;
634 goto external_visibility;
635 case OP_NOP:
636 case OP_SNOP:
637 case OP_LNOP:
638 case OP_PHI:
639 continue;
640 default:
641 warning(sym->pos, "symbol '%s' pseudo used in unexpected way", show_ident(sym->ident));
643 complex |= insn->offset;
644 } END_FOR_EACH_PTR(pu);
646 if (complex)
647 goto complex_def;
648 if (stores > 1)
649 goto multi_def;
652 * Goodie, we have a single store (if even that) in the whole
653 * thing. Replace all loads with moves from the pseudo,
654 * replace the store with a def.
656 src = VOID;
657 if (def)
658 src = def->target;
660 FOR_EACH_PTR(pseudo->users, pu) {
661 struct instruction *insn = pu->insn;
662 if (insn->opcode == OP_LOAD) {
663 check_access(insn);
664 convert_load_instruction(insn, src);
666 } END_FOR_EACH_PTR(pu);
668 /* Turn the store into a no-op */
669 kill_store(def);
670 return;
672 multi_def:
673 complex_def:
674 external_visibility:
675 all = 1;
676 FOR_EACH_PTR_REVERSE(pseudo->users, pu) {
677 struct instruction *insn = pu->insn;
678 if (insn->opcode == OP_LOAD)
679 all &= find_dominating_stores(pseudo, insn, ++bb_generation, !mod);
680 } END_FOR_EACH_PTR_REVERSE(pu);
682 /* If we converted all the loads, remove the stores. They are dead */
683 if (all && !mod) {
684 FOR_EACH_PTR(pseudo->users, pu) {
685 struct instruction *insn = pu->insn;
686 if (insn->opcode == OP_STORE)
687 kill_store(insn);
688 } END_FOR_EACH_PTR(pu);
689 } else {
691 * If we couldn't take the shortcut, see if we can at least kill some
692 * of them..
694 FOR_EACH_PTR(pseudo->users, pu) {
695 struct instruction *insn = pu->insn;
696 if (insn->opcode == OP_STORE)
697 kill_dominated_stores(pseudo, insn, ++bb_generation, insn->bb, !mod, 0);
698 } END_FOR_EACH_PTR(pu);
700 if (!(mod & (MOD_NONLOCAL | MOD_STATIC))) {
701 struct basic_block *bb;
702 FOR_EACH_PTR(ep->bbs, bb) {
703 if (!bb->children)
704 kill_dead_stores(pseudo, ++bb_generation, bb, !mod);
705 } END_FOR_EACH_PTR(bb);
709 return;
712 void simplify_symbol_usage(struct entrypoint *ep)
714 pseudo_t pseudo;
716 FOR_EACH_PTR(ep->accesses, pseudo) {
717 simplify_one_symbol(ep, pseudo->sym);
718 } END_FOR_EACH_PTR(pseudo);
721 static void mark_bb_reachable(struct basic_block *bb, unsigned long generation)
723 struct basic_block *child;
725 if (bb->generation == generation)
726 return;
727 bb->generation = generation;
728 FOR_EACH_PTR(bb->children, child) {
729 mark_bb_reachable(child, generation);
730 } END_FOR_EACH_PTR(child);
733 static void kill_defs(struct instruction *insn)
735 pseudo_t target = insn->target;
737 if (!has_use_list(target))
738 return;
739 if (target->def != insn)
740 return;
742 convert_instruction_target(insn, VOID);
745 void kill_bb(struct basic_block *bb)
747 struct instruction *insn;
748 struct basic_block *child, *parent;
750 FOR_EACH_PTR(bb->insns, insn) {
751 kill_instruction(insn);
752 kill_defs(insn);
754 * We kill unreachable instructions even if they
755 * otherwise aren't "killable" (e.g. volatile loads)
757 insn->bb = NULL;
758 } END_FOR_EACH_PTR(insn);
759 bb->insns = NULL;
761 FOR_EACH_PTR(bb->children, child) {
762 remove_bb_from_list(&child->parents, bb, 0);
763 } END_FOR_EACH_PTR(child);
764 bb->children = NULL;
766 FOR_EACH_PTR(bb->parents, parent) {
767 remove_bb_from_list(&parent->children, bb, 0);
768 } END_FOR_EACH_PTR(parent);
769 bb->parents = NULL;
772 void kill_unreachable_bbs(struct entrypoint *ep)
774 struct basic_block *bb;
775 unsigned long generation = ++bb_generation;
777 mark_bb_reachable(ep->entry->bb, generation);
778 FOR_EACH_PTR(ep->bbs, bb) {
779 if (bb->generation == generation)
780 continue;
781 /* Mark it as being dead */
782 kill_bb(bb);
783 bb->ep = NULL;
784 DELETE_CURRENT_PTR(bb);
785 } END_FOR_EACH_PTR(bb);
786 PACK_PTR_LIST(&ep->bbs);
789 static int rewrite_parent_branch(struct basic_block *bb, struct basic_block *old, struct basic_block *new)
791 int changed = 0;
792 struct instruction *insn = last_instruction(bb->insns);
794 if (!insn)
795 return 0;
797 /* Infinite loops: let's not "optimize" them.. */
798 if (old == new)
799 return 0;
801 switch (insn->opcode) {
802 case OP_BR:
803 changed |= rewrite_branch(bb, &insn->bb_true, old, new);
804 changed |= rewrite_branch(bb, &insn->bb_false, old, new);
805 assert(changed);
806 return changed;
807 case OP_SWITCH: {
808 struct multijmp *jmp;
809 FOR_EACH_PTR(insn->multijmp_list, jmp) {
810 changed |= rewrite_branch(bb, &jmp->target, old, new);
811 } END_FOR_EACH_PTR(jmp);
812 assert(changed);
813 return changed;
815 default:
816 return 0;
820 static struct basic_block * rewrite_branch_bb(struct basic_block *bb, struct instruction *br)
822 struct basic_block *parent;
823 struct basic_block *target = br->bb_true;
824 struct basic_block *false = br->bb_false;
826 if (target && false) {
827 pseudo_t cond = br->cond;
828 if (cond->type != PSEUDO_VAL)
829 return NULL;
830 target = cond->value ? target : false;
834 * We can't do FOR_EACH_PTR() here, because the parent list
835 * may change when we rewrite the parent.
837 while ((parent = first_basic_block(bb->parents)) != NULL) {
838 if (!rewrite_parent_branch(parent, bb, target))
839 return NULL;
841 return target;
844 static void vrfy_bb_in_list(struct basic_block *bb, struct basic_block_list *list)
846 if (bb) {
847 struct basic_block *tmp;
848 int no_bb_in_list = 0;
850 FOR_EACH_PTR(list, tmp) {
851 if (bb == tmp)
852 return;
853 } END_FOR_EACH_PTR(tmp);
854 assert(no_bb_in_list);
858 static void vrfy_parents(struct basic_block *bb)
860 struct basic_block *tmp;
861 FOR_EACH_PTR(bb->parents, tmp) {
862 vrfy_bb_in_list(bb, tmp->children);
863 } END_FOR_EACH_PTR(tmp);
866 static void vrfy_children(struct basic_block *bb)
868 struct basic_block *tmp;
869 struct instruction *br = last_instruction(bb->insns);
871 if (!br) {
872 assert(!bb->children);
873 return;
875 switch (br->opcode) {
876 struct multijmp *jmp;
877 case OP_BR:
878 vrfy_bb_in_list(br->bb_true, bb->children);
879 vrfy_bb_in_list(br->bb_false, bb->children);
880 break;
881 case OP_SWITCH:
882 case OP_COMPUTEDGOTO:
883 FOR_EACH_PTR(br->multijmp_list, jmp) {
884 vrfy_bb_in_list(jmp->target, bb->children);
885 } END_FOR_EACH_PTR(jmp);
886 break;
887 default:
888 break;
891 FOR_EACH_PTR(bb->children, tmp) {
892 vrfy_bb_in_list(bb, tmp->parents);
893 } END_FOR_EACH_PTR(tmp);
896 static void vrfy_bb_flow(struct basic_block *bb)
898 vrfy_children(bb);
899 vrfy_parents(bb);
902 void vrfy_flow(struct entrypoint *ep)
904 struct basic_block *bb;
905 struct basic_block *entry = ep->entry->bb;
907 FOR_EACH_PTR(ep->bbs, bb) {
908 if (bb == entry)
909 entry = NULL;
910 vrfy_bb_flow(bb);
911 } END_FOR_EACH_PTR(bb);
912 assert(!entry);
915 void pack_basic_blocks(struct entrypoint *ep)
917 struct basic_block *bb;
919 /* See if we can merge a bb into another one.. */
920 FOR_EACH_PTR(ep->bbs, bb) {
921 struct instruction *first, *insn;
922 struct basic_block *parent, *child, *last;
924 if (!bb_reachable(bb))
925 continue;
928 * Just a branch?
930 FOR_EACH_PTR(bb->insns, first) {
931 if (!first->bb)
932 continue;
933 switch (first->opcode) {
934 case OP_NOP: case OP_LNOP: case OP_SNOP:
935 continue;
936 case OP_BR: {
937 struct basic_block *replace;
938 replace = rewrite_branch_bb(bb, first);
939 if (replace) {
940 kill_bb(bb);
941 goto no_merge;
944 /* fallthrough */
945 default:
946 goto out;
948 } END_FOR_EACH_PTR(first);
950 out:
952 * See if we only have one parent..
954 last = NULL;
955 FOR_EACH_PTR(bb->parents, parent) {
956 if (last) {
957 if (last != parent)
958 goto no_merge;
959 continue;
961 last = parent;
962 } END_FOR_EACH_PTR(parent);
964 parent = last;
965 if (!parent || parent == bb)
966 continue;
969 * Goodie. See if the parent can merge..
971 FOR_EACH_PTR(parent->children, child) {
972 if (child != bb)
973 goto no_merge;
974 } END_FOR_EACH_PTR(child);
977 * Merge the two.
979 repeat_phase |= REPEAT_CSE;
981 parent->children = bb->children;
982 bb->children = NULL;
983 bb->parents = NULL;
985 FOR_EACH_PTR(parent->children, child) {
986 replace_bb_in_list(&child->parents, bb, parent, 0);
987 } END_FOR_EACH_PTR(child);
989 kill_instruction(delete_last_instruction(&parent->insns));
990 FOR_EACH_PTR(bb->insns, insn) {
991 if (insn->bb) {
992 assert(insn->bb == bb);
993 insn->bb = parent;
995 add_instruction(&parent->insns, insn);
996 } END_FOR_EACH_PTR(insn);
997 bb->insns = NULL;
999 no_merge:
1000 /* nothing to do */;
1001 } END_FOR_EACH_PTR(bb);