[PATCH] Add -Wno-cast-truncate
[smatch.git] / flow.c
blob00bc807713cf7e7c223ef525f47bfe5731bbd66e
1 /*
2 * Flow - walk the linearized flowgraph, simplifying it as we
3 * go along.
5 * Copyright (C) 2004 Linus Torvalds
6 */
8 #include <string.h>
9 #include <stdarg.h>
10 #include <stdlib.h>
11 #include <stdio.h>
12 #include <stddef.h>
13 #include <assert.h>
15 #include "parse.h"
16 #include "expression.h"
17 #include "linearize.h"
18 #include "flow.h"
20 unsigned long bb_generation;
23 * Dammit, if we have a phi-node followed by a conditional
24 * branch on that phi-node, we should damn well be able to
25 * do something about the source. Maybe.
27 static int rewrite_branch(struct basic_block *bb,
28 struct basic_block **ptr,
29 struct basic_block *old,
30 struct basic_block *new)
32 if (*ptr != old || new == old)
33 return 0;
35 /* We might find new if-conversions or non-dominating CSEs */
36 repeat_phase |= REPEAT_CSE;
37 *ptr = new;
38 replace_bb_in_list(&bb->children, old, new, 1);
39 remove_bb_from_list(&old->parents, bb, 1);
40 add_bb(&new->parents, bb);
41 return 1;
45 * Return the known truth value of a pseudo, or -1 if
46 * it's not known.
48 static int pseudo_truth_value(pseudo_t pseudo)
50 switch (pseudo->type) {
51 case PSEUDO_VAL:
52 return !!pseudo->value;
54 case PSEUDO_REG: {
55 struct instruction *insn = pseudo->def;
57 /* A symbol address is always considered true.. */
58 if (insn->opcode == OP_SYMADDR && insn->target == pseudo)
59 return 1;
61 /* Fall through */
62 default:
63 return -1;
68 * Does a basic block depend on the pseudos that "src" defines?
70 static int bb_depends_on(struct basic_block *target, struct basic_block *src)
72 pseudo_t pseudo;
74 FOR_EACH_PTR(src->defines, pseudo) {
75 if (pseudo_in_list(target->needs, pseudo))
76 return 1;
77 } END_FOR_EACH_PTR(pseudo);
78 return 0;
82 * When we reach here, we have:
83 * - a basic block that ends in a conditional branch and
84 * that has no side effects apart from the pseudos it
85 * may change.
86 * - the phi-node that the conditional branch depends on
87 * - full pseudo liveness information
89 * We need to check if any of the _sources_ of the phi-node
90 * may be constant, and not actually need this block at all.
92 static int try_to_simplify_bb(struct basic_block *bb, struct instruction *first, struct instruction *second)
94 int changed = 0;
95 pseudo_t phi;
97 FOR_EACH_PTR(first->phi_list, phi) {
98 struct instruction *def = phi->def;
99 struct basic_block *source, *target;
100 pseudo_t pseudo;
101 struct instruction *br;
102 int true;
104 if (!def)
105 continue;
106 source = def->bb;
107 pseudo = def->src1;
108 if (!pseudo || !source)
109 continue;
110 br = last_instruction(source->insns);
111 if (!br)
112 continue;
113 if (br->opcode != OP_BR)
114 continue;
115 true = pseudo_truth_value(pseudo);
116 if (true < 0)
117 continue;
118 target = true ? second->bb_true : second->bb_false;
119 if (bb_depends_on(target, bb))
120 continue;
121 changed |= rewrite_branch(source, &br->bb_true, bb, target);
122 changed |= rewrite_branch(source, &br->bb_false, bb, target);
123 } END_FOR_EACH_PTR(phi);
124 return changed;
127 static int bb_has_side_effects(struct basic_block *bb)
129 struct instruction *insn;
130 FOR_EACH_PTR(bb->insns, insn) {
131 switch (insn->opcode) {
132 case OP_CALL:
133 /* Fixme! This should take "const" etc into account */
134 return 1;
136 case OP_STORE:
137 case OP_CONTEXT:
138 return 1;
140 case OP_ASM:
141 /* Fixme! This should take "volatile" etc into account */
142 return 1;
144 default:
145 continue;
147 } END_FOR_EACH_PTR(insn);
148 return 0;
151 static int simplify_phi_branch(struct basic_block *bb, struct instruction *br)
153 pseudo_t cond = br->cond;
154 struct instruction *def;
156 if (cond->type != PSEUDO_REG)
157 return 0;
158 def = cond->def;
159 if (def->bb != bb || def->opcode != OP_PHI)
160 return 0;
161 if (bb_has_side_effects(bb))
162 return 0;
163 return try_to_simplify_bb(bb, def, br);
166 static int simplify_branch_branch(struct basic_block *bb, struct instruction *br,
167 struct basic_block **target_p, int true)
169 struct basic_block *target = *target_p, *final;
170 struct instruction *insn;
171 int retval;
173 if (target == bb)
174 return 0;
175 insn = last_instruction(target->insns);
176 if (!insn || insn->opcode != OP_BR || insn->cond != br->cond)
177 return 0;
179 * Ahhah! We've found a branch to a branch on the same conditional!
180 * Now we just need to see if we can rewrite the branch..
182 retval = 0;
183 final = true ? insn->bb_true : insn->bb_false;
184 if (bb_has_side_effects(target))
185 goto try_to_rewrite_target;
186 if (bb_depends_on(final, target))
187 goto try_to_rewrite_target;
188 return rewrite_branch(bb, target_p, target, final);
190 try_to_rewrite_target:
192 * If we're the only parent, at least we can rewrite the
193 * now-known second branch.
195 if (bb_list_size(target->parents) != 1)
196 return retval;
197 insert_branch(target, insn, final);
198 return 1;
201 static int simplify_one_branch(struct basic_block *bb, struct instruction *br)
203 if (simplify_phi_branch(bb, br))
204 return 1;
205 return simplify_branch_branch(bb, br, &br->bb_true, 1) |
206 simplify_branch_branch(bb, br, &br->bb_false, 0);
209 static int simplify_branch_nodes(struct entrypoint *ep)
211 int changed = 0;
212 struct basic_block *bb;
214 FOR_EACH_PTR(ep->bbs, bb) {
215 struct instruction *br = last_instruction(bb->insns);
217 if (!br || br->opcode != OP_BR || !br->bb_false)
218 continue;
219 changed |= simplify_one_branch(bb, br);
220 } END_FOR_EACH_PTR(bb);
221 return changed;
225 * This is called late - when we have intra-bb liveness information..
227 int simplify_flow(struct entrypoint *ep)
229 return simplify_branch_nodes(ep);
232 static inline void concat_user_list(struct pseudo_ptr_list *src, struct pseudo_ptr_list **dst)
234 concat_ptr_list((struct ptr_list *)src, (struct ptr_list **)dst);
237 void convert_instruction_target(struct instruction *insn, pseudo_t src)
239 pseudo_t target, *usep;
242 * Go through the "insn->users" list and replace them all..
244 target = insn->target;
245 if (target == src)
246 return;
247 FOR_EACH_PTR(target->users, usep) {
248 if (*usep != VOID) {
249 assert(*usep == target);
250 *usep = src;
252 } END_FOR_EACH_PTR(usep);
253 concat_user_list(target->users, &src->users);
254 target->users = NULL;
257 void convert_load_instruction(struct instruction *insn, pseudo_t src)
259 convert_instruction_target(insn, src);
260 /* Turn the load into a no-op */
261 insn->opcode = OP_LNOP;
262 insn->bb = NULL;
265 static int overlapping_memop(struct instruction *a, struct instruction *b)
267 unsigned int a_start = a->offset << 3;
268 unsigned int b_start = b->offset << 3;
269 unsigned int a_size = a->size;
270 unsigned int b_size = b->size;
272 if (a_size + a_start <= b_start)
273 return 0;
274 if (b_size + b_start <= a_start)
275 return 0;
276 return 1;
279 static inline int same_memop(struct instruction *a, struct instruction *b)
281 return a->offset == b->offset && a->size == b->size;
285 * Return 1 if "one" dominates the access to 'pseudo'
286 * in insn.
288 * Return 0 if it doesn't, and -1 if you don't know.
290 int dominates(pseudo_t pseudo, struct instruction *insn, struct instruction *dom, int local)
292 int opcode = dom->opcode;
294 if (opcode == OP_CALL || opcode == OP_ENTRY)
295 return local ? 0 : -1;
296 if (opcode != OP_LOAD && opcode != OP_STORE)
297 return 0;
298 if (dom->src != pseudo) {
299 if (local)
300 return 0;
301 /* We don't think two explicitly different symbols ever alias */
302 if (dom->src->type == PSEUDO_SYM)
303 return 0;
304 /* We could try to do some alias analysis here */
305 return -1;
307 if (!same_memop(insn, dom)) {
308 if (dom->opcode == OP_LOAD)
309 return 0;
310 if (!overlapping_memop(insn, dom))
311 return 0;
312 return -1;
314 return 1;
317 static int find_dominating_parents(pseudo_t pseudo, struct instruction *insn,
318 struct basic_block *bb, unsigned long generation, struct pseudo_list **dominators,
319 int local, int loads)
321 struct basic_block *parent;
323 if (!bb->parents)
324 return !!local;
326 if (bb_list_size(bb->parents) > 1)
327 loads = 0;
328 FOR_EACH_PTR(bb->parents, parent) {
329 struct instruction *one;
330 struct instruction *br;
331 pseudo_t phi;
333 FOR_EACH_PTR_REVERSE(parent->insns, one) {
334 int dominance;
335 if (one == insn)
336 goto no_dominance;
337 dominance = dominates(pseudo, insn, one, local);
338 if (dominance < 0) {
339 if (one->opcode == OP_LOAD)
340 continue;
341 return 0;
343 if (!dominance)
344 continue;
345 if (one->opcode == OP_LOAD && !loads)
346 continue;
347 goto found_dominator;
348 } END_FOR_EACH_PTR_REVERSE(one);
349 no_dominance:
350 if (parent->generation == generation)
351 continue;
352 parent->generation = generation;
354 if (!find_dominating_parents(pseudo, insn, parent, generation, dominators, local, loads))
355 return 0;
356 continue;
358 found_dominator:
359 br = delete_last_instruction(&parent->insns);
360 phi = alloc_phi(parent, one->target, one->size);
361 phi->ident = phi->ident ? : pseudo->ident;
362 add_instruction(&parent->insns, br);
363 use_pseudo(phi, add_pseudo(dominators, phi));
364 } END_FOR_EACH_PTR(parent);
365 return 1;
369 * We should probably sort the phi list just to make it easier to compare
370 * later for equality.
372 void rewrite_load_instruction(struct instruction *insn, struct pseudo_list *dominators)
374 pseudo_t new, phi;
377 * Check for somewhat common case of duplicate
378 * phi nodes.
380 new = first_pseudo(dominators)->def->src1;
381 FOR_EACH_PTR(dominators, phi) {
382 if (new != phi->def->src1)
383 goto complex_phi;
384 new->ident = new->ident ? : phi->ident;
385 } END_FOR_EACH_PTR(phi);
388 * All the same pseudo - mark the phi-nodes unused
389 * and convert the load into a LNOP and replace the
390 * pseudo.
392 FOR_EACH_PTR(dominators, phi) {
393 phi->def->bb = NULL;
394 } END_FOR_EACH_PTR(phi);
395 convert_load_instruction(insn, new);
396 return;
398 complex_phi:
399 /* We leave symbol pseudos with a bogus usage list here */
400 if (insn->src->type != PSEUDO_SYM)
401 kill_use(&insn->src);
402 insn->opcode = OP_PHI;
403 insn->phi_list = dominators;
406 static int find_dominating_stores(pseudo_t pseudo, struct instruction *insn,
407 unsigned long generation, int local)
409 struct basic_block *bb = insn->bb;
410 struct instruction *one, *dom = NULL;
411 struct pseudo_list *dominators;
412 int partial;
414 /* Unreachable load? Undo it */
415 if (!bb) {
416 insn->opcode = OP_LNOP;
417 return 1;
420 partial = 0;
421 FOR_EACH_PTR(bb->insns, one) {
422 int dominance;
423 if (one == insn)
424 goto found;
425 dominance = dominates(pseudo, insn, one, local);
426 if (dominance < 0) {
427 /* Ignore partial load dominators */
428 if (one->opcode == OP_LOAD)
429 continue;
430 dom = NULL;
431 partial = 1;
432 continue;
434 if (!dominance)
435 continue;
436 dom = one;
437 partial = 0;
438 } END_FOR_EACH_PTR(one);
439 /* Whaa? */
440 warning(pseudo->sym->pos, "unable to find symbol read");
441 return 0;
442 found:
443 if (partial)
444 return 0;
446 if (dom) {
447 convert_load_instruction(insn, dom->target);
448 return 1;
451 /* Ok, go find the parents */
452 bb->generation = generation;
454 dominators = NULL;
455 if (!find_dominating_parents(pseudo, insn, bb, generation, &dominators, local, 1))
456 return 0;
458 /* This happens with initial assignments to structures etc.. */
459 if (!dominators) {
460 if (!local)
461 return 0;
462 check_access(insn);
463 convert_load_instruction(insn, value_pseudo(0));
464 return 1;
468 * If we find just one dominating instruction, we
469 * can turn it into a direct thing. Otherwise we'll
470 * have to turn the load into a phi-node of the
471 * dominators.
473 rewrite_load_instruction(insn, dominators);
474 return 1;
477 static void kill_store(struct instruction *insn)
479 if (insn) {
480 insn->bb = NULL;
481 insn->opcode = OP_SNOP;
482 kill_use(&insn->target);
486 /* Kill a pseudo that is dead on exit from the bb */
487 static void kill_dead_stores(pseudo_t pseudo, unsigned long generation, struct basic_block *bb, int local)
489 struct instruction *insn;
490 struct basic_block *parent;
492 if (bb->generation == generation)
493 return;
494 bb->generation = generation;
495 FOR_EACH_PTR_REVERSE(bb->insns, insn) {
496 int opcode = insn->opcode;
498 if (opcode != OP_LOAD && opcode != OP_STORE) {
499 if (local)
500 continue;
501 if (opcode == OP_CALL)
502 return;
503 continue;
505 if (insn->src == pseudo) {
506 if (opcode == OP_LOAD)
507 return;
508 kill_store(insn);
509 continue;
511 if (local)
512 continue;
513 if (insn->src->type != PSEUDO_SYM)
514 return;
515 } END_FOR_EACH_PTR_REVERSE(insn);
517 FOR_EACH_PTR(bb->parents, parent) {
518 struct basic_block *child;
519 FOR_EACH_PTR(parent->children, child) {
520 if (child && child != bb)
521 return;
522 } END_FOR_EACH_PTR(child);
523 kill_dead_stores(pseudo, generation, parent, local);
524 } END_FOR_EACH_PTR(parent);
528 * This should see if the "insn" trivially dominates some previous store, and kill the
529 * store if unnecessary.
531 static void kill_dominated_stores(pseudo_t pseudo, struct instruction *insn,
532 unsigned long generation, struct basic_block *bb, int local, int found)
534 struct instruction *one;
535 struct basic_block *parent;
537 /* Unreachable store? Undo it */
538 if (!bb) {
539 kill_store(insn);
540 return;
542 if (bb->generation == generation)
543 return;
544 bb->generation = generation;
545 FOR_EACH_PTR_REVERSE(bb->insns, one) {
546 int dominance;
547 if (!found) {
548 if (one != insn)
549 continue;
550 found = 1;
551 continue;
553 dominance = dominates(pseudo, insn, one, local);
554 if (!dominance)
555 continue;
556 if (dominance < 0)
557 return;
558 if (one->opcode == OP_LOAD)
559 return;
560 kill_store(one);
561 } END_FOR_EACH_PTR_REVERSE(one);
563 if (!found) {
564 warning(bb->pos, "Unable to find instruction");
565 return;
568 FOR_EACH_PTR(bb->parents, parent) {
569 struct basic_block *child;
570 FOR_EACH_PTR(parent->children, child) {
571 if (child && child != bb)
572 return;
573 } END_FOR_EACH_PTR(child);
574 kill_dominated_stores(pseudo, insn, generation, parent, local, found);
575 } END_FOR_EACH_PTR(parent);
578 void check_access(struct instruction *insn)
580 pseudo_t pseudo = insn->src;
582 if (insn->bb && pseudo->type == PSEUDO_SYM) {
583 int offset = insn->offset, bit = (offset << 3) + insn->size;
584 struct symbol *sym = pseudo->sym;
586 if (sym->bit_size > 0 && (offset < 0 || bit > sym->bit_size))
587 warning(insn->pos, "invalid access %s '%s' (%d %d)",
588 offset < 0 ? "below" : "past the end of",
589 show_ident(sym->ident), offset, sym->bit_size >> 3);
593 static void simplify_one_symbol(struct entrypoint *ep, struct symbol *sym)
595 pseudo_t pseudo, src, *pp;
596 struct instruction *def;
597 unsigned long mod;
598 int all, stores, complex;
600 /* Never used as a symbol? */
601 pseudo = sym->pseudo;
602 if (!pseudo)
603 return;
605 /* We don't do coverage analysis of volatiles.. */
606 if (sym->ctype.modifiers & MOD_VOLATILE)
607 return;
609 /* ..and symbols with external visibility need more care */
610 mod = sym->ctype.modifiers & (MOD_NONLOCAL | MOD_STATIC | MOD_ADDRESSABLE);
611 if (mod)
612 goto external_visibility;
614 def = NULL;
615 stores = 0;
616 complex = 0;
617 FOR_EACH_PTR(pseudo->users, pp) {
618 /* We know that the symbol-pseudo use is the "src" in the instruction */
619 struct instruction *insn = container(pp, struct instruction, src);
621 switch (insn->opcode) {
622 case OP_STORE:
623 stores++;
624 def = insn;
625 break;
626 case OP_LOAD:
627 break;
628 case OP_SYMADDR:
629 if (!insn->bb)
630 continue;
631 mod |= MOD_ADDRESSABLE;
632 goto external_visibility;
633 case OP_NOP:
634 case OP_SNOP:
635 case OP_LNOP:
636 case OP_PHI:
637 continue;
638 default:
639 warning(sym->pos, "symbol '%s' pseudo used in unexpected way", show_ident(sym->ident));
641 complex |= insn->offset;
642 } END_FOR_EACH_PTR(pp);
644 if (complex)
645 goto complex_def;
646 if (stores > 1)
647 goto multi_def;
650 * Goodie, we have a single store (if even that) in the whole
651 * thing. Replace all loads with moves from the pseudo,
652 * replace the store with a def.
654 src = VOID;
655 if (def)
656 src = def->target;
658 FOR_EACH_PTR(pseudo->users, pp) {
659 struct instruction *insn = container(pp, struct instruction, src);
660 if (insn->opcode == OP_LOAD) {
661 check_access(insn);
662 convert_load_instruction(insn, src);
664 } END_FOR_EACH_PTR(pp);
666 /* Turn the store into a no-op */
667 kill_store(def);
668 return;
670 multi_def:
671 complex_def:
672 external_visibility:
673 all = 1;
674 FOR_EACH_PTR_REVERSE(pseudo->users, pp) {
675 struct instruction *insn = container(pp, struct instruction, src);
676 if (insn->opcode == OP_LOAD)
677 all &= find_dominating_stores(pseudo, insn, ++bb_generation, !mod);
678 } END_FOR_EACH_PTR_REVERSE(pp);
680 /* If we converted all the loads, remove the stores. They are dead */
681 if (all && !mod) {
682 FOR_EACH_PTR(pseudo->users, pp) {
683 struct instruction *insn = container(pp, struct instruction, src);
684 if (insn->opcode == OP_STORE)
685 kill_store(insn);
686 } END_FOR_EACH_PTR(pp);
687 } else {
689 * If we couldn't take the shortcut, see if we can at least kill some
690 * of them..
692 FOR_EACH_PTR(pseudo->users, pp) {
693 struct instruction *insn = container(pp, struct instruction, src);
694 if (insn->opcode == OP_STORE)
695 kill_dominated_stores(pseudo, insn, ++bb_generation, insn->bb, !mod, 0);
696 } END_FOR_EACH_PTR(pp);
698 if (!(mod & (MOD_NONLOCAL | MOD_STATIC))) {
699 struct basic_block *bb;
700 FOR_EACH_PTR(ep->bbs, bb) {
701 if (!bb->children)
702 kill_dead_stores(pseudo, ++bb_generation, bb, !mod);
703 } END_FOR_EACH_PTR(bb);
707 return;
710 void simplify_symbol_usage(struct entrypoint *ep)
712 struct symbol *sym;
714 FOR_EACH_PTR(ep->accesses, sym) {
715 simplify_one_symbol(ep, sym);
716 } END_FOR_EACH_PTR(sym);
719 static void mark_bb_reachable(struct basic_block *bb, unsigned long generation)
721 struct basic_block *child;
723 if (bb->generation == generation)
724 return;
725 bb->generation = generation;
726 FOR_EACH_PTR(bb->children, child) {
727 mark_bb_reachable(child, generation);
728 } END_FOR_EACH_PTR(child);
731 static void kill_defs(struct instruction *insn)
733 pseudo_t target = insn->target;
735 if (!has_use_list(target))
736 return;
737 if (target->def != insn)
738 return;
740 convert_instruction_target(insn, VOID);
743 void kill_bb(struct basic_block *bb)
745 struct instruction *insn;
746 struct basic_block *child, *parent;
748 FOR_EACH_PTR(bb->insns, insn) {
749 kill_instruction(insn);
750 kill_defs(insn);
752 * We kill unreachable instructions even if they
753 * otherwise aren't "killable". Eg volatile loads
754 * etc.
756 insn->bb = NULL;
757 } END_FOR_EACH_PTR(insn);
758 bb->insns = NULL;
760 FOR_EACH_PTR(bb->children, child) {
761 remove_bb_from_list(&child->parents, bb, 0);
762 } END_FOR_EACH_PTR(child);
763 bb->children = NULL;
765 FOR_EACH_PTR(bb->parents, parent) {
766 remove_bb_from_list(&parent->children, bb, 0);
767 } END_FOR_EACH_PTR(parent);
768 bb->parents = NULL;
771 void kill_unreachable_bbs(struct entrypoint *ep)
773 struct basic_block *bb;
774 unsigned long generation = ++bb_generation;
776 mark_bb_reachable(ep->entry->bb, generation);
777 FOR_EACH_PTR(ep->bbs, bb) {
778 if (bb->generation == generation)
779 continue;
780 /* Mark it as being dead */
781 kill_bb(bb);
782 bb->ep = NULL;
783 DELETE_CURRENT_PTR(bb);
784 } END_FOR_EACH_PTR(bb);
785 PACK_PTR_LIST(&ep->bbs);
788 static int rewrite_parent_branch(struct basic_block *bb, struct basic_block *old, struct basic_block *new)
790 int changed = 0;
791 struct instruction *insn = last_instruction(bb->insns);
793 if (!insn)
794 return 0;
796 /* Infinite loops: let's not "optimize" them.. */
797 if (old == new)
798 return 0;
800 switch (insn->opcode) {
801 case OP_BR:
802 changed |= rewrite_branch(bb, &insn->bb_true, old, new);
803 changed |= rewrite_branch(bb, &insn->bb_false, old, new);
804 assert(changed);
805 return changed;
806 case OP_SWITCH: {
807 struct multijmp *jmp;
808 FOR_EACH_PTR(insn->multijmp_list, jmp) {
809 changed |= rewrite_branch(bb, &jmp->target, old, new);
810 } END_FOR_EACH_PTR(jmp);
811 assert(changed);
812 return changed;
814 default:
815 return 0;
819 static struct basic_block * rewrite_branch_bb(struct basic_block *bb, struct instruction *br)
821 struct basic_block *parent;
822 struct basic_block *target = br->bb_true;
823 struct basic_block *false = br->bb_false;
825 if (target && false) {
826 pseudo_t cond = br->cond;
827 if (cond->type != PSEUDO_VAL)
828 return NULL;
829 target = cond->value ? target : false;
833 * We can't do FOR_EACH_PTR() here, because the parent list
834 * may change when we rewrite the parent.
836 while ((parent = first_basic_block(bb->parents)) != NULL) {
837 if (!rewrite_parent_branch(parent, bb, target))
838 return NULL;
840 return target;
843 static void vrfy_bb_in_list(struct basic_block *bb, struct basic_block_list *list)
845 if (bb) {
846 struct basic_block *tmp;
847 int no_bb_in_list = 0;
849 FOR_EACH_PTR(list, tmp) {
850 if (bb == tmp)
851 return;
852 } END_FOR_EACH_PTR(tmp);
853 assert(no_bb_in_list);
857 static void vrfy_parents(struct basic_block *bb)
859 struct basic_block *tmp;
860 FOR_EACH_PTR(bb->parents, tmp) {
861 vrfy_bb_in_list(bb, tmp->children);
862 } END_FOR_EACH_PTR(tmp);
865 static void vrfy_children(struct basic_block *bb)
867 struct basic_block *tmp;
868 struct instruction *br = last_instruction(bb->insns);
870 if (!br) {
871 assert(!bb->children);
872 return;
874 switch (br->opcode) {
875 struct multijmp *jmp;
876 case OP_BR:
877 vrfy_bb_in_list(br->bb_true, bb->children);
878 vrfy_bb_in_list(br->bb_false, bb->children);
879 break;
880 case OP_SWITCH:
881 case OP_COMPUTEDGOTO:
882 FOR_EACH_PTR(br->multijmp_list, jmp) {
883 vrfy_bb_in_list(jmp->target, bb->children);
884 } END_FOR_EACH_PTR(jmp);
885 break;
886 default:
887 break;
890 FOR_EACH_PTR(bb->children, tmp) {
891 vrfy_bb_in_list(bb, tmp->parents);
892 } END_FOR_EACH_PTR(tmp);
895 static void vrfy_bb_flow(struct basic_block *bb)
897 vrfy_children(bb);
898 vrfy_parents(bb);
901 void vrfy_flow(struct entrypoint *ep)
903 struct basic_block *bb;
904 struct basic_block *entry = ep->entry->bb;
906 FOR_EACH_PTR(ep->bbs, bb) {
907 if (bb == entry)
908 entry = NULL;
909 vrfy_bb_flow(bb);
910 } END_FOR_EACH_PTR(bb);
911 assert(!entry);
914 void pack_basic_blocks(struct entrypoint *ep)
916 struct basic_block *bb;
918 /* See if we can merge a bb into another one.. */
919 FOR_EACH_PTR(ep->bbs, bb) {
920 struct instruction *first, *insn;
921 struct basic_block *parent, *child, *last;
923 if (!bb_reachable(bb))
924 continue;
927 * Just a branch?
929 FOR_EACH_PTR(bb->insns, first) {
930 if (!first->bb)
931 continue;
932 switch (first->opcode) {
933 case OP_NOP: case OP_LNOP: case OP_SNOP:
934 continue;
935 case OP_BR: {
936 struct basic_block *replace;
937 replace = rewrite_branch_bb(bb, first);
938 if (replace) {
939 kill_bb(bb);
940 goto no_merge;
943 /* fallthrough */
944 default:
945 goto out;
947 } END_FOR_EACH_PTR(first);
949 out:
951 * See if we only have one parent..
953 last = NULL;
954 FOR_EACH_PTR(bb->parents, parent) {
955 if (last) {
956 if (last != parent)
957 goto no_merge;
958 continue;
960 last = parent;
961 } END_FOR_EACH_PTR(parent);
963 parent = last;
964 if (!parent || parent == bb)
965 continue;
968 * Goodie. See if the parent can merge..
970 FOR_EACH_PTR(parent->children, child) {
971 if (child != bb)
972 goto no_merge;
973 } END_FOR_EACH_PTR(child);
976 * Merge the two.
978 repeat_phase |= REPEAT_CSE;
980 parent->children = bb->children;
981 bb->children = NULL;
982 bb->parents = NULL;
984 FOR_EACH_PTR(parent->children, child) {
985 replace_bb_in_list(&child->parents, bb, parent, 0);
986 } END_FOR_EACH_PTR(child);
988 delete_last_instruction(&parent->insns);
989 FOR_EACH_PTR(bb->insns, insn) {
990 if (insn->bb) {
991 assert(insn->bb == bb);
992 insn->bb = parent;
994 add_instruction(&parent->insns, insn);
995 } END_FOR_EACH_PTR(insn);
996 bb->insns = NULL;
998 no_merge:
999 /* nothing to do */;
1000 } END_FOR_EACH_PTR(bb);