cgcc: filter-out '-fdump-linearize[=...]'
[smatch.git] / flow.c
blobe0932edb40513ed876faa5d9e57d82d40e23728a
1 /*
2 * Flow - walk the linearized flowgraph, simplifying it as we
3 * go along.
5 * Copyright (C) 2004 Linus Torvalds
6 */
8 #include <string.h>
9 #include <stdarg.h>
10 #include <stdlib.h>
11 #include <stdio.h>
12 #include <stddef.h>
13 #include <assert.h>
15 #include "parse.h"
16 #include "expression.h"
17 #include "linearize.h"
18 #include "flow.h"
19 #include "target.h"
21 unsigned long bb_generation;
24 * Dammit, if we have a phi-node followed by a conditional
25 * branch on that phi-node, we should damn well be able to
26 * do something about the source. Maybe.
28 static int rewrite_branch(struct basic_block *bb,
29 struct basic_block **ptr,
30 struct basic_block *old,
31 struct basic_block *new)
33 if (*ptr != old || new == old)
34 return 0;
36 /* We might find new if-conversions or non-dominating CSEs */
37 repeat_phase |= REPEAT_CSE;
38 *ptr = new;
39 replace_bb_in_list(&bb->children, old, new, 1);
40 remove_bb_from_list(&old->parents, bb, 1);
41 add_bb(&new->parents, bb);
42 return 1;
46 * Return the known truth value of a pseudo, or -1 if
47 * it's not known.
49 static int pseudo_truth_value(pseudo_t pseudo)
51 switch (pseudo->type) {
52 case PSEUDO_VAL:
53 return !!pseudo->value;
55 case PSEUDO_REG: {
56 struct instruction *insn = pseudo->def;
58 /* A symbol address is always considered true.. */
59 if (insn->opcode == OP_SYMADDR && insn->target == pseudo)
60 return 1;
62 /* Fall through */
63 default:
64 return -1;
69 * Does a basic block depend on the pseudos that "src" defines?
71 static int bb_depends_on(struct basic_block *target, struct basic_block *src)
73 pseudo_t pseudo;
75 FOR_EACH_PTR(src->defines, pseudo) {
76 if (pseudo_in_list(target->needs, pseudo))
77 return 1;
78 } END_FOR_EACH_PTR(pseudo);
79 return 0;
83 * Return 1 if 'pseudo' is needed in some parent of 'bb'.
84 * Need liveness info.
86 static int needed_phisrc(struct instruction *phi, struct basic_block *curr, unsigned long generation)
88 pseudo_t target = phi->target;
89 struct basic_block *bb;
91 curr->generation = generation;
92 FOR_EACH_PTR(curr->children, bb) {
93 if (bb->generation == generation)
94 continue;
95 if (bb == phi->bb)
96 continue;
97 if (pseudo_in_list(bb->defines, target)) {
98 continue;
100 if (pseudo_in_list(bb->needs, target))
101 return 1;
102 if (needed_phisrc(phi, bb, generation))
103 return 1;
105 } END_FOR_EACH_PTR(bb);
107 return 0;
111 * When we reach here, we have:
112 * - a basic block that ends in a conditional branch and
113 * that has no side effects apart from the pseudos it
114 * may change.
115 * - the phi-node that the conditional branch depends on
116 * - full pseudo liveness information
118 * We need to check if any of the _sources_ of the phi-node
119 * may be constant, and not actually need this block at all.
121 static int try_to_simplify_bb(struct basic_block *bb, struct instruction *first, struct instruction *second)
123 int changed = 0;
124 pseudo_t phi;
126 FOR_EACH_PTR(first->phi_list, phi) {
127 struct instruction *def = phi->def;
128 struct basic_block *source, *target;
129 pseudo_t pseudo;
130 struct instruction *br;
131 int true;
133 if (!def)
134 continue;
135 source = def->bb;
136 pseudo = def->src1;
137 if (!pseudo || !source)
138 continue;
139 br = last_instruction(source->insns);
140 if (!br)
141 continue;
142 if (br->opcode != OP_CBR && br->opcode != OP_BR)
143 continue;
144 true = pseudo_truth_value(pseudo);
145 if (true < 0)
146 continue;
147 target = true ? second->bb_true : second->bb_false;
148 if (bb_depends_on(target, bb))
149 continue;
150 changed |= rewrite_branch(source, &br->bb_true, bb, target);
151 changed |= rewrite_branch(source, &br->bb_false, bb, target);
152 if (changed && !needed_phisrc(first, source, ++bb_generation))
153 kill_use(THIS_ADDRESS(phi));
154 } END_FOR_EACH_PTR(phi);
155 return changed;
158 static int bb_has_side_effects(struct basic_block *bb)
160 struct instruction *insn;
161 FOR_EACH_PTR(bb->insns, insn) {
162 switch (insn->opcode) {
163 case OP_CALL:
164 /* FIXME! This should take "const" etc into account */
165 return 1;
167 case OP_STORE:
168 case OP_CONTEXT:
169 return 1;
171 case OP_ASM:
172 /* FIXME! This should take "volatile" etc into account */
173 return 1;
175 default:
176 continue;
178 } END_FOR_EACH_PTR(insn);
179 return 0;
182 static int simplify_phi_branch(struct basic_block *bb, struct instruction *br)
184 pseudo_t cond = br->cond;
185 struct instruction *def;
187 if (cond->type != PSEUDO_REG)
188 return 0;
189 def = cond->def;
190 if (def->bb != bb || def->opcode != OP_PHI)
191 return 0;
192 if (bb_has_side_effects(bb))
193 return 0;
194 return try_to_simplify_bb(bb, def, br);
197 static int simplify_branch_branch(struct basic_block *bb, struct instruction *br,
198 struct basic_block **target_p, int true)
200 struct basic_block *target = *target_p, *final;
201 struct instruction *insn;
202 int retval;
204 if (target == bb)
205 return 0;
206 insn = last_instruction(target->insns);
207 if (!insn || insn->opcode != OP_CBR || insn->cond != br->cond)
208 return 0;
210 * Ahhah! We've found a branch to a branch on the same conditional!
211 * Now we just need to see if we can rewrite the branch..
213 retval = 0;
214 final = true ? insn->bb_true : insn->bb_false;
215 if (bb_has_side_effects(target))
216 goto try_to_rewrite_target;
217 if (bb_depends_on(final, target))
218 goto try_to_rewrite_target;
219 return rewrite_branch(bb, target_p, target, final);
221 try_to_rewrite_target:
223 * If we're the only parent, at least we can rewrite the
224 * now-known second branch.
226 if (bb_list_size(target->parents) != 1)
227 return retval;
228 insert_branch(target, insn, final);
229 return 1;
232 static int simplify_one_branch(struct basic_block *bb, struct instruction *br)
234 if (simplify_phi_branch(bb, br))
235 return 1;
236 return simplify_branch_branch(bb, br, &br->bb_true, 1) |
237 simplify_branch_branch(bb, br, &br->bb_false, 0);
240 static int simplify_branch_nodes(struct entrypoint *ep)
242 int changed = 0;
243 struct basic_block *bb;
245 FOR_EACH_PTR(ep->bbs, bb) {
246 struct instruction *br = last_instruction(bb->insns);
248 if (!br || br->opcode != OP_CBR)
249 continue;
250 changed |= simplify_one_branch(bb, br);
251 } END_FOR_EACH_PTR(bb);
252 return changed;
256 * This is called late - when we have intra-bb liveness information..
258 int simplify_flow(struct entrypoint *ep)
260 return simplify_branch_nodes(ep);
263 static inline void concat_user_list(struct pseudo_user_list *src, struct pseudo_user_list **dst)
265 concat_ptr_list((struct ptr_list *)src, (struct ptr_list **)dst);
268 void convert_instruction_target(struct instruction *insn, pseudo_t src)
270 pseudo_t target;
271 struct pseudo_user *pu;
273 * Go through the "insn->users" list and replace them all..
275 target = insn->target;
276 if (target == src)
277 return;
278 FOR_EACH_PTR(target->users, pu) {
279 if (*pu->userp != VOID) {
280 assert(*pu->userp == target);
281 *pu->userp = src;
283 } END_FOR_EACH_PTR(pu);
284 if (has_use_list(src))
285 concat_user_list(target->users, &src->users);
286 target->users = NULL;
289 void convert_load_instruction(struct instruction *insn, pseudo_t src)
291 convert_instruction_target(insn, src);
292 /* Turn the load into a no-op */
293 insn->opcode = OP_LNOP;
294 insn->bb = NULL;
297 static int overlapping_memop(struct instruction *a, struct instruction *b)
299 unsigned int a_start = bytes_to_bits(a->offset);
300 unsigned int b_start = bytes_to_bits(b->offset);
301 unsigned int a_size = a->size;
302 unsigned int b_size = b->size;
304 if (a_size + a_start <= b_start)
305 return 0;
306 if (b_size + b_start <= a_start)
307 return 0;
308 return 1;
311 static inline int same_memop(struct instruction *a, struct instruction *b)
313 return a->offset == b->offset && a->size == b->size;
316 static inline int distinct_symbols(pseudo_t a, pseudo_t b)
318 if (a->type != PSEUDO_SYM)
319 return 0;
320 if (b->type != PSEUDO_SYM)
321 return 0;
322 return a->sym != b->sym;
326 * Return 1 if "dom" dominates the access to "pseudo"
327 * in "insn".
329 * Return 0 if it doesn't, and -1 if you don't know.
331 int dominates(pseudo_t pseudo, struct instruction *insn, struct instruction *dom, int local)
333 int opcode = dom->opcode;
335 if (opcode == OP_CALL || opcode == OP_ENTRY)
336 return local ? 0 : -1;
337 if (opcode != OP_LOAD && opcode != OP_STORE)
338 return 0;
339 if (dom->src != pseudo) {
340 if (local)
341 return 0;
342 /* We don't think two explicitly different symbols ever alias */
343 if (distinct_symbols(insn->src, dom->src))
344 return 0;
345 /* We could try to do some alias analysis here */
346 return -1;
348 if (!same_memop(insn, dom)) {
349 if (dom->opcode == OP_LOAD)
350 return 0;
351 if (!overlapping_memop(insn, dom))
352 return 0;
353 return -1;
355 return 1;
358 static int phisrc_in_bb(struct pseudo_list *list, struct basic_block *bb)
360 pseudo_t p;
361 FOR_EACH_PTR(list, p) {
362 if (p->def->bb == bb)
363 return 1;
364 } END_FOR_EACH_PTR(p);
366 return 0;
369 static int find_dominating_parents(pseudo_t pseudo, struct instruction *insn,
370 struct basic_block *bb, unsigned long generation, struct pseudo_list **dominators,
371 int local)
373 struct basic_block *parent;
375 if (!bb->parents)
376 return !!local;
378 FOR_EACH_PTR(bb->parents, parent) {
379 struct instruction *one;
380 struct instruction *br;
381 pseudo_t phi;
383 FOR_EACH_PTR_REVERSE(parent->insns, one) {
384 int dominance;
385 if (one == insn)
386 goto no_dominance;
387 dominance = dominates(pseudo, insn, one, local);
388 if (dominance < 0) {
389 if (one->opcode == OP_LOAD)
390 continue;
391 return 0;
393 if (!dominance)
394 continue;
395 goto found_dominator;
396 } END_FOR_EACH_PTR_REVERSE(one);
397 no_dominance:
398 if (parent->generation == generation)
399 continue;
400 parent->generation = generation;
402 if (!find_dominating_parents(pseudo, insn, parent, generation, dominators, local))
403 return 0;
404 continue;
406 found_dominator:
407 if (dominators && phisrc_in_bb(*dominators, parent))
408 continue;
409 br = delete_last_instruction(&parent->insns);
410 phi = alloc_phi(parent, one->target, one->size);
411 phi->ident = phi->ident ? : pseudo->ident;
412 add_instruction(&parent->insns, br);
413 use_pseudo(insn, phi, add_pseudo(dominators, phi));
414 } END_FOR_EACH_PTR(parent);
415 return 1;
419 * We should probably sort the phi list just to make it easier to compare
420 * later for equality.
422 void rewrite_load_instruction(struct instruction *insn, struct pseudo_list *dominators)
424 pseudo_t new, phi;
427 * Check for somewhat common case of duplicate
428 * phi nodes.
430 new = first_pseudo(dominators)->def->src1;
431 FOR_EACH_PTR(dominators, phi) {
432 if (new != phi->def->src1)
433 goto complex_phi;
434 new->ident = new->ident ? : phi->ident;
435 } END_FOR_EACH_PTR(phi);
438 * All the same pseudo - mark the phi-nodes unused
439 * and convert the load into a LNOP and replace the
440 * pseudo.
442 FOR_EACH_PTR(dominators, phi) {
443 kill_instruction(phi->def);
444 } END_FOR_EACH_PTR(phi);
445 convert_load_instruction(insn, new);
446 return;
448 complex_phi:
449 /* We leave symbol pseudos with a bogus usage list here */
450 if (insn->src->type != PSEUDO_SYM)
451 kill_use(&insn->src);
452 insn->opcode = OP_PHI;
453 insn->phi_list = dominators;
456 static int find_dominating_stores(pseudo_t pseudo, struct instruction *insn,
457 unsigned long generation, int local)
459 struct basic_block *bb = insn->bb;
460 struct instruction *one, *dom = NULL;
461 struct pseudo_list *dominators;
462 int partial;
464 /* Unreachable load? Undo it */
465 if (!bb) {
466 insn->opcode = OP_LNOP;
467 return 1;
470 partial = 0;
471 FOR_EACH_PTR(bb->insns, one) {
472 int dominance;
473 if (one == insn)
474 goto found;
475 dominance = dominates(pseudo, insn, one, local);
476 if (dominance < 0) {
477 /* Ignore partial load dominators */
478 if (one->opcode == OP_LOAD)
479 continue;
480 dom = NULL;
481 partial = 1;
482 continue;
484 if (!dominance)
485 continue;
486 dom = one;
487 partial = 0;
488 } END_FOR_EACH_PTR(one);
489 /* Whaa? */
490 warning(pseudo->sym->pos, "unable to find symbol read");
491 return 0;
492 found:
493 if (partial)
494 return 0;
496 if (dom) {
497 convert_load_instruction(insn, dom->target);
498 return 1;
501 /* OK, go find the parents */
502 bb->generation = generation;
504 dominators = NULL;
505 if (!find_dominating_parents(pseudo, insn, bb, generation, &dominators, local))
506 return 0;
508 /* This happens with initial assignments to structures etc.. */
509 if (!dominators) {
510 if (!local)
511 return 0;
512 check_access(insn);
513 convert_load_instruction(insn, value_pseudo(0));
514 return 1;
518 * If we find just one dominating instruction, we
519 * can turn it into a direct thing. Otherwise we'll
520 * have to turn the load into a phi-node of the
521 * dominators.
523 rewrite_load_instruction(insn, dominators);
524 return 1;
527 static void kill_store(struct instruction *insn)
529 if (insn) {
530 insn->bb = NULL;
531 insn->opcode = OP_SNOP;
532 kill_use(&insn->target);
536 /* Kill a pseudo that is dead on exit from the bb */
537 static void kill_dead_stores(pseudo_t pseudo, unsigned long generation, struct basic_block *bb, int local)
539 struct instruction *insn;
540 struct basic_block *parent;
542 if (bb->generation == generation)
543 return;
544 bb->generation = generation;
545 FOR_EACH_PTR_REVERSE(bb->insns, insn) {
546 int opcode = insn->opcode;
548 if (opcode != OP_LOAD && opcode != OP_STORE) {
549 if (local)
550 continue;
551 if (opcode == OP_CALL)
552 return;
553 continue;
555 if (insn->src == pseudo) {
556 if (opcode == OP_LOAD)
557 return;
558 kill_store(insn);
559 continue;
561 if (local)
562 continue;
563 if (insn->src->type != PSEUDO_SYM)
564 return;
565 } END_FOR_EACH_PTR_REVERSE(insn);
567 FOR_EACH_PTR(bb->parents, parent) {
568 struct basic_block *child;
569 FOR_EACH_PTR(parent->children, child) {
570 if (child && child != bb)
571 return;
572 } END_FOR_EACH_PTR(child);
573 kill_dead_stores(pseudo, generation, parent, local);
574 } END_FOR_EACH_PTR(parent);
578 * This should see if the "insn" trivially dominates some previous store, and kill the
579 * store if unnecessary.
581 static void kill_dominated_stores(pseudo_t pseudo, struct instruction *insn,
582 unsigned long generation, struct basic_block *bb, int local, int found)
584 struct instruction *one;
585 struct basic_block *parent;
587 /* Unreachable store? Undo it */
588 if (!bb) {
589 kill_store(insn);
590 return;
592 if (bb->generation == generation)
593 return;
594 bb->generation = generation;
595 FOR_EACH_PTR_REVERSE(bb->insns, one) {
596 int dominance;
597 if (!found) {
598 if (one != insn)
599 continue;
600 found = 1;
601 continue;
603 dominance = dominates(pseudo, insn, one, local);
604 if (!dominance)
605 continue;
606 if (dominance < 0)
607 return;
608 if (one->opcode == OP_LOAD)
609 return;
610 kill_store(one);
611 } END_FOR_EACH_PTR_REVERSE(one);
613 if (!found) {
614 warning(bb->pos, "Unable to find instruction");
615 return;
618 FOR_EACH_PTR(bb->parents, parent) {
619 struct basic_block *child;
620 FOR_EACH_PTR(parent->children, child) {
621 if (child && child != bb)
622 return;
623 } END_FOR_EACH_PTR(child);
624 kill_dominated_stores(pseudo, insn, generation, parent, local, found);
625 } END_FOR_EACH_PTR(parent);
628 void check_access(struct instruction *insn)
630 pseudo_t pseudo = insn->src;
632 if (insn->bb && pseudo->type == PSEUDO_SYM) {
633 int offset = insn->offset, bit = bytes_to_bits(offset) + insn->size;
634 struct symbol *sym = pseudo->sym;
636 if (sym->bit_size > 0 && (offset < 0 || bit > sym->bit_size))
637 warning(insn->pos, "invalid access %s '%s' (%d %d)",
638 offset < 0 ? "below" : "past the end of",
639 show_ident(sym->ident), offset,
640 bits_to_bytes(sym->bit_size));
644 static void simplify_one_symbol(struct entrypoint *ep, struct symbol *sym)
646 pseudo_t pseudo, src;
647 struct pseudo_user *pu;
648 struct instruction *def;
649 unsigned long mod;
650 int all, stores, complex;
652 /* Never used as a symbol? */
653 pseudo = sym->pseudo;
654 if (!pseudo)
655 return;
657 /* We don't do coverage analysis of volatiles.. */
658 if (sym->ctype.modifiers & MOD_VOLATILE)
659 return;
661 /* ..and symbols with external visibility need more care */
662 mod = sym->ctype.modifiers & (MOD_NONLOCAL | MOD_STATIC | MOD_ADDRESSABLE);
663 if (mod)
664 goto external_visibility;
666 def = NULL;
667 stores = 0;
668 complex = 0;
669 FOR_EACH_PTR(pseudo->users, pu) {
670 /* We know that the symbol-pseudo use is the "src" in the instruction */
671 struct instruction *insn = pu->insn;
673 switch (insn->opcode) {
674 case OP_STORE:
675 stores++;
676 def = insn;
677 break;
678 case OP_LOAD:
679 break;
680 case OP_SYMADDR:
681 if (!insn->bb)
682 continue;
683 mod |= MOD_ADDRESSABLE;
684 goto external_visibility;
685 case OP_NOP:
686 case OP_SNOP:
687 case OP_LNOP:
688 case OP_PHI:
689 continue;
690 default:
691 warning(sym->pos, "symbol '%s' pseudo used in unexpected way", show_ident(sym->ident));
693 complex |= insn->offset;
694 } END_FOR_EACH_PTR(pu);
696 if (complex)
697 goto complex_def;
698 if (stores > 1)
699 goto multi_def;
702 * Goodie, we have a single store (if even that) in the whole
703 * thing. Replace all loads with moves from the pseudo,
704 * replace the store with a def.
706 src = VOID;
707 if (def)
708 src = def->target;
710 FOR_EACH_PTR(pseudo->users, pu) {
711 struct instruction *insn = pu->insn;
712 if (insn->opcode == OP_LOAD) {
713 check_access(insn);
714 convert_load_instruction(insn, src);
716 } END_FOR_EACH_PTR(pu);
718 /* Turn the store into a no-op */
719 kill_store(def);
720 return;
722 multi_def:
723 complex_def:
724 external_visibility:
725 all = 1;
726 FOR_EACH_PTR_REVERSE(pseudo->users, pu) {
727 struct instruction *insn = pu->insn;
728 if (insn->opcode == OP_LOAD)
729 all &= find_dominating_stores(pseudo, insn, ++bb_generation, !mod);
730 } END_FOR_EACH_PTR_REVERSE(pu);
732 /* If we converted all the loads, remove the stores. They are dead */
733 if (all && !mod) {
734 FOR_EACH_PTR(pseudo->users, pu) {
735 struct instruction *insn = pu->insn;
736 if (insn->opcode == OP_STORE)
737 kill_store(insn);
738 } END_FOR_EACH_PTR(pu);
739 } else {
741 * If we couldn't take the shortcut, see if we can at least kill some
742 * of them..
744 FOR_EACH_PTR(pseudo->users, pu) {
745 struct instruction *insn = pu->insn;
746 if (insn->opcode == OP_STORE)
747 kill_dominated_stores(pseudo, insn, ++bb_generation, insn->bb, !mod, 0);
748 } END_FOR_EACH_PTR(pu);
750 if (!(mod & (MOD_NONLOCAL | MOD_STATIC))) {
751 struct basic_block *bb;
752 FOR_EACH_PTR(ep->bbs, bb) {
753 if (!bb->children)
754 kill_dead_stores(pseudo, ++bb_generation, bb, !mod);
755 } END_FOR_EACH_PTR(bb);
759 return;
762 void simplify_symbol_usage(struct entrypoint *ep)
764 pseudo_t pseudo;
766 FOR_EACH_PTR(ep->accesses, pseudo) {
767 simplify_one_symbol(ep, pseudo->sym);
768 } END_FOR_EACH_PTR(pseudo);
771 static void mark_bb_reachable(struct basic_block *bb, unsigned long generation)
773 struct basic_block *child;
775 if (bb->generation == generation)
776 return;
777 bb->generation = generation;
778 FOR_EACH_PTR(bb->children, child) {
779 mark_bb_reachable(child, generation);
780 } END_FOR_EACH_PTR(child);
783 static void kill_defs(struct instruction *insn)
785 pseudo_t target = insn->target;
787 if (!has_use_list(target))
788 return;
789 if (target->def != insn)
790 return;
792 convert_instruction_target(insn, VOID);
795 void kill_bb(struct basic_block *bb)
797 struct instruction *insn;
798 struct basic_block *child, *parent;
800 FOR_EACH_PTR(bb->insns, insn) {
801 kill_instruction_force(insn);
802 kill_defs(insn);
804 * We kill unreachable instructions even if they
805 * otherwise aren't "killable" (e.g. volatile loads)
807 } END_FOR_EACH_PTR(insn);
808 bb->insns = NULL;
810 FOR_EACH_PTR(bb->children, child) {
811 remove_bb_from_list(&child->parents, bb, 0);
812 } END_FOR_EACH_PTR(child);
813 bb->children = NULL;
815 FOR_EACH_PTR(bb->parents, parent) {
816 remove_bb_from_list(&parent->children, bb, 0);
817 } END_FOR_EACH_PTR(parent);
818 bb->parents = NULL;
821 void kill_unreachable_bbs(struct entrypoint *ep)
823 struct basic_block *bb;
824 unsigned long generation = ++bb_generation;
826 mark_bb_reachable(ep->entry->bb, generation);
827 FOR_EACH_PTR(ep->bbs, bb) {
828 if (bb->generation == generation)
829 continue;
830 /* Mark it as being dead */
831 kill_bb(bb);
832 bb->ep = NULL;
833 DELETE_CURRENT_PTR(bb);
834 } END_FOR_EACH_PTR(bb);
835 PACK_PTR_LIST(&ep->bbs);
837 repeat_phase &= ~REPEAT_CFG_CLEANUP;
840 static int rewrite_parent_branch(struct basic_block *bb, struct basic_block *old, struct basic_block *new)
842 int changed = 0;
843 struct instruction *insn = last_instruction(bb->insns);
845 if (!insn)
846 return 0;
848 /* Infinite loops: let's not "optimize" them.. */
849 if (old == new)
850 return 0;
852 switch (insn->opcode) {
853 case OP_CBR:
854 changed |= rewrite_branch(bb, &insn->bb_false, old, new);
855 /* fall through */
856 case OP_BR:
857 changed |= rewrite_branch(bb, &insn->bb_true, old, new);
858 assert(changed);
859 return changed;
860 case OP_SWITCH: {
861 struct multijmp *jmp;
862 FOR_EACH_PTR(insn->multijmp_list, jmp) {
863 changed |= rewrite_branch(bb, &jmp->target, old, new);
864 } END_FOR_EACH_PTR(jmp);
865 assert(changed);
866 return changed;
868 default:
869 return 0;
873 static struct basic_block * rewrite_branch_bb(struct basic_block *bb, struct instruction *br)
875 struct basic_block *parent;
876 struct basic_block *target = br->bb_true;
877 struct basic_block *false = br->bb_false;
879 if (br->opcode == OP_CBR) {
880 pseudo_t cond = br->cond;
881 if (cond->type != PSEUDO_VAL)
882 return NULL;
883 target = cond->value ? target : false;
887 * We can't do FOR_EACH_PTR() here, because the parent list
888 * may change when we rewrite the parent.
890 while ((parent = first_basic_block(bb->parents)) != NULL) {
891 if (!rewrite_parent_branch(parent, bb, target))
892 return NULL;
894 return target;
897 static void vrfy_bb_in_list(struct basic_block *bb, struct basic_block_list *list)
899 if (bb) {
900 struct basic_block *tmp;
901 int no_bb_in_list = 0;
903 FOR_EACH_PTR(list, tmp) {
904 if (bb == tmp)
905 return;
906 } END_FOR_EACH_PTR(tmp);
907 assert(no_bb_in_list);
911 static void vrfy_parents(struct basic_block *bb)
913 struct basic_block *tmp;
914 FOR_EACH_PTR(bb->parents, tmp) {
915 vrfy_bb_in_list(bb, tmp->children);
916 } END_FOR_EACH_PTR(tmp);
919 static void vrfy_children(struct basic_block *bb)
921 struct basic_block *tmp;
922 struct instruction *br = last_instruction(bb->insns);
924 if (!br) {
925 assert(!bb->children);
926 return;
928 switch (br->opcode) {
929 struct multijmp *jmp;
930 case OP_CBR:
931 vrfy_bb_in_list(br->bb_false, bb->children);
932 /* fall through */
933 case OP_BR:
934 vrfy_bb_in_list(br->bb_true, bb->children);
935 break;
936 case OP_SWITCH:
937 case OP_COMPUTEDGOTO:
938 FOR_EACH_PTR(br->multijmp_list, jmp) {
939 vrfy_bb_in_list(jmp->target, bb->children);
940 } END_FOR_EACH_PTR(jmp);
941 break;
942 default:
943 break;
946 FOR_EACH_PTR(bb->children, tmp) {
947 vrfy_bb_in_list(bb, tmp->parents);
948 } END_FOR_EACH_PTR(tmp);
951 static void vrfy_bb_flow(struct basic_block *bb)
953 vrfy_children(bb);
954 vrfy_parents(bb);
957 void vrfy_flow(struct entrypoint *ep)
959 struct basic_block *bb;
960 struct basic_block *entry = ep->entry->bb;
962 FOR_EACH_PTR(ep->bbs, bb) {
963 if (bb == entry)
964 entry = NULL;
965 vrfy_bb_flow(bb);
966 } END_FOR_EACH_PTR(bb);
967 assert(!entry);
970 void pack_basic_blocks(struct entrypoint *ep)
972 struct basic_block *bb;
974 /* See if we can merge a bb into another one.. */
975 FOR_EACH_PTR(ep->bbs, bb) {
976 struct instruction *first, *insn;
977 struct basic_block *parent, *child, *last;
979 if (!bb_reachable(bb))
980 continue;
983 * Just a branch?
985 FOR_EACH_PTR(bb->insns, first) {
986 if (!first->bb)
987 continue;
988 switch (first->opcode) {
989 case OP_NOP: case OP_LNOP: case OP_SNOP:
990 continue;
991 case OP_CBR:
992 case OP_BR: {
993 struct basic_block *replace;
994 replace = rewrite_branch_bb(bb, first);
995 if (replace) {
996 kill_bb(bb);
997 goto no_merge;
1000 /* fallthrough */
1001 default:
1002 goto out;
1004 } END_FOR_EACH_PTR(first);
1006 out:
1008 * See if we only have one parent..
1010 last = NULL;
1011 FOR_EACH_PTR(bb->parents, parent) {
1012 if (last) {
1013 if (last != parent)
1014 goto no_merge;
1015 continue;
1017 last = parent;
1018 } END_FOR_EACH_PTR(parent);
1020 parent = last;
1021 if (!parent || parent == bb)
1022 continue;
1025 * Goodie. See if the parent can merge..
1027 FOR_EACH_PTR(parent->children, child) {
1028 if (child != bb)
1029 goto no_merge;
1030 } END_FOR_EACH_PTR(child);
1033 * Merge the two.
1035 repeat_phase |= REPEAT_CSE;
1037 parent->children = bb->children;
1038 bb->children = NULL;
1039 bb->parents = NULL;
1041 FOR_EACH_PTR(parent->children, child) {
1042 replace_bb_in_list(&child->parents, bb, parent, 0);
1043 } END_FOR_EACH_PTR(child);
1045 kill_instruction(delete_last_instruction(&parent->insns));
1046 FOR_EACH_PTR(bb->insns, insn) {
1047 if (insn->bb) {
1048 assert(insn->bb == bb);
1049 insn->bb = parent;
1051 add_instruction(&parent->insns, insn);
1052 } END_FOR_EACH_PTR(insn);
1053 bb->insns = NULL;
1055 no_merge:
1056 /* nothing to do */;
1057 } END_FOR_EACH_PTR(bb);