fix: __builtin_bswap{16,32,64}() constantness
[smatch.git] / flow.c
blobc7161d47e53c2aa5892d1b2976735d688e03ef08
1 /*
2 * Flow - walk the linearized flowgraph, simplifying it as we
3 * go along.
5 * Copyright (C) 2004 Linus Torvalds
6 */
8 #include <string.h>
9 #include <stdarg.h>
10 #include <stdlib.h>
11 #include <stdio.h>
12 #include <stddef.h>
13 #include <assert.h>
15 #include "parse.h"
16 #include "expression.h"
17 #include "linearize.h"
18 #include "flow.h"
19 #include "target.h"
21 unsigned long bb_generation;
24 * Dammit, if we have a phi-node followed by a conditional
25 * branch on that phi-node, we should damn well be able to
26 * do something about the source. Maybe.
28 static int rewrite_branch(struct basic_block *bb,
29 struct basic_block **ptr,
30 struct basic_block *old,
31 struct basic_block *new)
33 if (*ptr != old || new == old)
34 return 0;
36 /* We might find new if-conversions or non-dominating CSEs */
37 repeat_phase |= REPEAT_CSE;
38 *ptr = new;
39 replace_bb_in_list(&bb->children, old, new, 1);
40 remove_bb_from_list(&old->parents, bb, 1);
41 add_bb(&new->parents, bb);
42 return 1;
46 * Return the known truth value of a pseudo, or -1 if
47 * it's not known.
49 static int pseudo_truth_value(pseudo_t pseudo)
51 switch (pseudo->type) {
52 case PSEUDO_VAL:
53 return !!pseudo->value;
55 case PSEUDO_REG: {
56 struct instruction *insn = pseudo->def;
58 /* A symbol address is always considered true.. */
59 if (insn->opcode == OP_SYMADDR && insn->target == pseudo)
60 return 1;
62 /* Fall through */
63 default:
64 return -1;
69 * Does a basic block depend on the pseudos that "src" defines?
71 static int bb_depends_on(struct basic_block *target, struct basic_block *src)
73 pseudo_t pseudo;
75 FOR_EACH_PTR(src->defines, pseudo) {
76 if (pseudo_in_list(target->needs, pseudo))
77 return 1;
78 } END_FOR_EACH_PTR(pseudo);
79 return 0;
83 * This is only to be used by try_to_simplify_bb().
84 * It really should be handled by bb_depends_on(), only
85 * that there is no liveness done on OP_PHI/OP_PHISRC.
86 * So we consider for now that if there is an OP_PHI
87 * then some block in fact depends on this one.
88 * The OP_PHI controling the conditional branch of this bb
89 * is excluded since the branch will be removed.
91 static int bb_defines_phi(struct basic_block *bb, struct instruction *def)
93 struct instruction *insn;
94 FOR_EACH_PTR(bb->insns, insn) {
95 switch (insn->opcode) {
96 case OP_PHI:
97 if (def && insn != def)
98 return 1;
99 continue;
100 default:
101 continue;
103 } END_FOR_EACH_PTR(insn);
104 return 0;
108 * When we reach here, we have:
109 * - a basic block that ends in a conditional branch and
110 * that has no side effects apart from the pseudos it
111 * may change.
112 * - the phi-node that the conditional branch depends on
113 * - full pseudo liveness information
115 * We need to check if any of the _sources_ of the phi-node
116 * may be constant, and not actually need this block at all.
118 static int try_to_simplify_bb(struct basic_block *bb, struct instruction *first, struct instruction *second)
120 int changed = 0;
121 pseudo_t phi;
122 int bogus;
125 * This a due to improper dominance tracking during
126 * simplify_symbol_usage()/conversion to SSA form.
127 * No sane simplification can be done when we have this.
129 bogus = bb_list_size(bb->parents) != pseudo_list_size(first->phi_list);
131 FOR_EACH_PTR(first->phi_list, phi) {
132 struct instruction *def = phi->def;
133 struct basic_block *source, *target;
134 pseudo_t pseudo;
135 struct instruction *br;
136 int true;
138 if (!def)
139 continue;
140 source = def->bb;
141 pseudo = def->src1;
142 if (!pseudo || !source)
143 continue;
144 br = last_instruction(source->insns);
145 if (!br)
146 continue;
147 if (br->opcode != OP_CBR && br->opcode != OP_BR)
148 continue;
149 true = pseudo_truth_value(pseudo);
150 if (true < 0)
151 continue;
152 target = true ? second->bb_true : second->bb_false;
153 if (bb_depends_on(target, bb))
154 continue;
155 if (bb_defines_phi(bb, first))
156 continue;
157 changed |= rewrite_branch(source, &br->bb_true, bb, target);
158 changed |= rewrite_branch(source, &br->bb_false, bb, target);
159 if (changed && !bogus)
160 kill_use(THIS_ADDRESS(phi));
161 } END_FOR_EACH_PTR(phi);
162 return changed;
165 static int bb_has_side_effects(struct basic_block *bb)
167 struct instruction *insn;
168 FOR_EACH_PTR(bb->insns, insn) {
169 switch (insn->opcode) {
170 case OP_CALL:
171 /* FIXME! This should take "const" etc into account */
172 return 1;
174 case OP_STORE:
175 case OP_CONTEXT:
176 return 1;
178 case OP_ASM:
179 /* FIXME! This should take "volatile" etc into account */
180 return 1;
182 default:
183 continue;
185 } END_FOR_EACH_PTR(insn);
186 return 0;
189 static int simplify_phi_branch(struct basic_block *bb, struct instruction *br)
191 pseudo_t cond = br->cond;
192 struct instruction *def;
194 if (cond->type != PSEUDO_REG)
195 return 0;
196 def = cond->def;
197 if (def->bb != bb || def->opcode != OP_PHI)
198 return 0;
199 if (bb_has_side_effects(bb))
200 return 0;
201 return try_to_simplify_bb(bb, def, br);
204 static int simplify_branch_branch(struct basic_block *bb, struct instruction *br,
205 struct basic_block **target_p, int true)
207 struct basic_block *target = *target_p, *final;
208 struct instruction *insn;
209 int retval;
211 if (target == bb)
212 return 0;
213 insn = last_instruction(target->insns);
214 if (!insn || insn->opcode != OP_CBR || insn->cond != br->cond)
215 return 0;
217 * Ahhah! We've found a branch to a branch on the same conditional!
218 * Now we just need to see if we can rewrite the branch..
220 retval = 0;
221 final = true ? insn->bb_true : insn->bb_false;
222 if (bb_has_side_effects(target))
223 goto try_to_rewrite_target;
224 if (bb_depends_on(final, target))
225 goto try_to_rewrite_target;
226 return rewrite_branch(bb, target_p, target, final);
228 try_to_rewrite_target:
230 * If we're the only parent, at least we can rewrite the
231 * now-known second branch.
233 if (bb_list_size(target->parents) != 1)
234 return retval;
235 insert_branch(target, insn, final);
236 return 1;
239 static int simplify_one_branch(struct basic_block *bb, struct instruction *br)
241 if (simplify_phi_branch(bb, br))
242 return 1;
243 return simplify_branch_branch(bb, br, &br->bb_true, 1) |
244 simplify_branch_branch(bb, br, &br->bb_false, 0);
247 static int simplify_branch_nodes(struct entrypoint *ep)
249 int changed = 0;
250 struct basic_block *bb;
252 FOR_EACH_PTR(ep->bbs, bb) {
253 struct instruction *br = last_instruction(bb->insns);
255 if (!br || br->opcode != OP_CBR)
256 continue;
257 changed |= simplify_one_branch(bb, br);
258 } END_FOR_EACH_PTR(bb);
259 return changed;
263 * This is called late - when we have intra-bb liveness information..
265 int simplify_flow(struct entrypoint *ep)
267 return simplify_branch_nodes(ep);
270 static inline void concat_user_list(struct pseudo_user_list *src, struct pseudo_user_list **dst)
272 concat_ptr_list((struct ptr_list *)src, (struct ptr_list **)dst);
275 void convert_instruction_target(struct instruction *insn, pseudo_t src)
277 pseudo_t target;
278 struct pseudo_user *pu;
280 * Go through the "insn->users" list and replace them all..
282 target = insn->target;
283 if (target == src)
284 return;
285 FOR_EACH_PTR(target->users, pu) {
286 if (*pu->userp != VOID) {
287 assert(*pu->userp == target);
288 *pu->userp = src;
290 } END_FOR_EACH_PTR(pu);
291 if (has_use_list(src))
292 concat_user_list(target->users, &src->users);
293 target->users = NULL;
296 void convert_load_instruction(struct instruction *insn, pseudo_t src)
298 convert_instruction_target(insn, src);
299 /* Turn the load into a no-op */
300 insn->opcode = OP_LNOP;
301 insn->bb = NULL;
304 static int overlapping_memop(struct instruction *a, struct instruction *b)
306 unsigned int a_start = bytes_to_bits(a->offset);
307 unsigned int b_start = bytes_to_bits(b->offset);
308 unsigned int a_size = a->size;
309 unsigned int b_size = b->size;
311 if (a_size + a_start <= b_start)
312 return 0;
313 if (b_size + b_start <= a_start)
314 return 0;
315 return 1;
318 static inline int same_memop(struct instruction *a, struct instruction *b)
320 return a->offset == b->offset && a->size == b->size;
323 static inline int distinct_symbols(pseudo_t a, pseudo_t b)
325 if (a->type != PSEUDO_SYM)
326 return 0;
327 if (b->type != PSEUDO_SYM)
328 return 0;
329 return a->sym != b->sym;
333 * Return 1 if "dom" dominates the access to "pseudo"
334 * in "insn".
336 * Return 0 if it doesn't, and -1 if you don't know.
338 int dominates(pseudo_t pseudo, struct instruction *insn, struct instruction *dom, int local)
340 int opcode = dom->opcode;
342 if (opcode == OP_CALL || opcode == OP_ENTRY)
343 return local ? 0 : -1;
344 if (opcode != OP_LOAD && opcode != OP_STORE)
345 return 0;
346 if (dom->src != pseudo) {
347 if (local)
348 return 0;
349 /* We don't think two explicitly different symbols ever alias */
350 if (distinct_symbols(insn->src, dom->src))
351 return 0;
352 /* We could try to do some alias analysis here */
353 return -1;
355 if (!same_memop(insn, dom)) {
356 if (dom->opcode == OP_LOAD)
357 return 0;
358 if (!overlapping_memop(insn, dom))
359 return 0;
360 return -1;
362 return 1;
365 static int phisrc_in_bb(struct pseudo_list *list, struct basic_block *bb)
367 pseudo_t p;
368 FOR_EACH_PTR(list, p) {
369 if (p->def->bb == bb)
370 return 1;
371 } END_FOR_EACH_PTR(p);
373 return 0;
376 static int find_dominating_parents(pseudo_t pseudo, struct instruction *insn,
377 struct basic_block *bb, unsigned long generation, struct pseudo_list **dominators,
378 int local)
380 struct basic_block *parent;
382 if (!bb->parents)
383 return !!local;
385 FOR_EACH_PTR(bb->parents, parent) {
386 struct instruction *one;
387 struct instruction *br;
388 pseudo_t phi;
390 FOR_EACH_PTR_REVERSE(parent->insns, one) {
391 int dominance;
392 if (one == insn)
393 goto no_dominance;
394 dominance = dominates(pseudo, insn, one, local);
395 if (dominance < 0) {
396 if (one->opcode == OP_LOAD)
397 continue;
398 return 0;
400 if (!dominance)
401 continue;
402 goto found_dominator;
403 } END_FOR_EACH_PTR_REVERSE(one);
404 no_dominance:
405 if (parent->generation == generation)
406 continue;
407 parent->generation = generation;
409 if (!find_dominating_parents(pseudo, insn, parent, generation, dominators, local))
410 return 0;
411 continue;
413 found_dominator:
414 if (dominators && phisrc_in_bb(*dominators, parent))
415 continue;
416 br = delete_last_instruction(&parent->insns);
417 phi = alloc_phi(parent, one->target, one->size);
418 phi->ident = phi->ident ? : pseudo->ident;
419 add_instruction(&parent->insns, br);
420 use_pseudo(insn, phi, add_pseudo(dominators, phi));
421 } END_FOR_EACH_PTR(parent);
422 return 1;
426 * We should probably sort the phi list just to make it easier to compare
427 * later for equality.
429 void rewrite_load_instruction(struct instruction *insn, struct pseudo_list *dominators)
431 pseudo_t new, phi;
434 * Check for somewhat common case of duplicate
435 * phi nodes.
437 new = first_pseudo(dominators)->def->src1;
438 FOR_EACH_PTR(dominators, phi) {
439 if (new != phi->def->src1)
440 goto complex_phi;
441 new->ident = new->ident ? : phi->ident;
442 } END_FOR_EACH_PTR(phi);
445 * All the same pseudo - mark the phi-nodes unused
446 * and convert the load into a LNOP and replace the
447 * pseudo.
449 FOR_EACH_PTR(dominators, phi) {
450 kill_instruction(phi->def);
451 } END_FOR_EACH_PTR(phi);
452 convert_load_instruction(insn, new);
453 return;
455 complex_phi:
456 /* We leave symbol pseudos with a bogus usage list here */
457 if (insn->src->type != PSEUDO_SYM)
458 kill_use(&insn->src);
459 insn->opcode = OP_PHI;
460 insn->phi_list = dominators;
463 static int find_dominating_stores(pseudo_t pseudo, struct instruction *insn,
464 unsigned long generation, int local)
466 struct basic_block *bb = insn->bb;
467 struct instruction *one, *dom = NULL;
468 struct pseudo_list *dominators;
469 int partial;
471 /* Unreachable load? Undo it */
472 if (!bb) {
473 insn->opcode = OP_LNOP;
474 return 1;
477 partial = 0;
478 FOR_EACH_PTR(bb->insns, one) {
479 int dominance;
480 if (one == insn)
481 goto found;
482 dominance = dominates(pseudo, insn, one, local);
483 if (dominance < 0) {
484 /* Ignore partial load dominators */
485 if (one->opcode == OP_LOAD)
486 continue;
487 dom = NULL;
488 partial = 1;
489 continue;
491 if (!dominance)
492 continue;
493 dom = one;
494 partial = 0;
495 } END_FOR_EACH_PTR(one);
496 /* Whaa? */
497 warning(pseudo->sym->pos, "unable to find symbol read");
498 return 0;
499 found:
500 if (partial)
501 return 0;
503 if (dom) {
504 convert_load_instruction(insn, dom->target);
505 return 1;
508 /* OK, go find the parents */
509 bb->generation = generation;
511 dominators = NULL;
512 if (!find_dominating_parents(pseudo, insn, bb, generation, &dominators, local))
513 return 0;
515 /* This happens with initial assignments to structures etc.. */
516 if (!dominators) {
517 if (!local)
518 return 0;
519 check_access(insn);
520 convert_load_instruction(insn, value_pseudo(0));
521 return 1;
525 * If we find just one dominating instruction, we
526 * can turn it into a direct thing. Otherwise we'll
527 * have to turn the load into a phi-node of the
528 * dominators.
530 rewrite_load_instruction(insn, dominators);
531 return 1;
534 static void kill_store(struct instruction *insn)
536 if (insn) {
537 insn->bb = NULL;
538 insn->opcode = OP_SNOP;
539 kill_use(&insn->target);
543 /* Kill a pseudo that is dead on exit from the bb */
544 static void kill_dead_stores(pseudo_t pseudo, unsigned long generation, struct basic_block *bb, int local)
546 struct instruction *insn;
547 struct basic_block *parent;
549 if (bb->generation == generation)
550 return;
551 bb->generation = generation;
552 FOR_EACH_PTR_REVERSE(bb->insns, insn) {
553 int opcode = insn->opcode;
555 if (opcode != OP_LOAD && opcode != OP_STORE) {
556 if (local)
557 continue;
558 if (opcode == OP_CALL)
559 return;
560 continue;
562 if (insn->src == pseudo) {
563 if (opcode == OP_LOAD)
564 return;
565 kill_store(insn);
566 continue;
568 if (local)
569 continue;
570 if (insn->src->type != PSEUDO_SYM)
571 return;
572 } END_FOR_EACH_PTR_REVERSE(insn);
574 FOR_EACH_PTR(bb->parents, parent) {
575 struct basic_block *child;
576 FOR_EACH_PTR(parent->children, child) {
577 if (child && child != bb)
578 return;
579 } END_FOR_EACH_PTR(child);
580 kill_dead_stores(pseudo, generation, parent, local);
581 } END_FOR_EACH_PTR(parent);
585 * This should see if the "insn" trivially dominates some previous store, and kill the
586 * store if unnecessary.
588 static void kill_dominated_stores(pseudo_t pseudo, struct instruction *insn,
589 unsigned long generation, struct basic_block *bb, int local, int found)
591 struct instruction *one;
592 struct basic_block *parent;
594 /* Unreachable store? Undo it */
595 if (!bb) {
596 kill_store(insn);
597 return;
599 if (bb->generation == generation)
600 return;
601 bb->generation = generation;
602 FOR_EACH_PTR_REVERSE(bb->insns, one) {
603 int dominance;
604 if (!found) {
605 if (one != insn)
606 continue;
607 found = 1;
608 continue;
610 dominance = dominates(pseudo, insn, one, local);
611 if (!dominance)
612 continue;
613 if (dominance < 0)
614 return;
615 if (one->opcode == OP_LOAD)
616 return;
617 kill_store(one);
618 } END_FOR_EACH_PTR_REVERSE(one);
620 if (!found) {
621 warning(bb->pos, "Unable to find instruction");
622 return;
625 FOR_EACH_PTR(bb->parents, parent) {
626 struct basic_block *child;
627 FOR_EACH_PTR(parent->children, child) {
628 if (child && child != bb)
629 return;
630 } END_FOR_EACH_PTR(child);
631 kill_dominated_stores(pseudo, insn, generation, parent, local, found);
632 } END_FOR_EACH_PTR(parent);
635 void check_access(struct instruction *insn)
637 pseudo_t pseudo = insn->src;
639 if (insn->bb && pseudo->type == PSEUDO_SYM) {
640 int offset = insn->offset, bit = bytes_to_bits(offset) + insn->size;
641 struct symbol *sym = pseudo->sym;
643 if (sym->bit_size > 0 && (offset < 0 || bit > sym->bit_size))
644 warning(insn->pos, "invalid access %s '%s' (%d %d)",
645 offset < 0 ? "below" : "past the end of",
646 show_ident(sym->ident), offset,
647 bits_to_bytes(sym->bit_size));
651 static void simplify_one_symbol(struct entrypoint *ep, struct symbol *sym)
653 pseudo_t pseudo, src;
654 struct pseudo_user *pu;
655 struct instruction *def;
656 unsigned long mod;
657 int all, stores, complex;
659 /* Never used as a symbol? */
660 pseudo = sym->pseudo;
661 if (!pseudo)
662 return;
664 /* We don't do coverage analysis of volatiles.. */
665 if (sym->ctype.modifiers & MOD_VOLATILE)
666 return;
668 /* ..and symbols with external visibility need more care */
669 mod = sym->ctype.modifiers & (MOD_NONLOCAL | MOD_STATIC | MOD_ADDRESSABLE);
670 if (mod)
671 goto external_visibility;
673 def = NULL;
674 stores = 0;
675 complex = 0;
676 FOR_EACH_PTR(pseudo->users, pu) {
677 /* We know that the symbol-pseudo use is the "src" in the instruction */
678 struct instruction *insn = pu->insn;
680 switch (insn->opcode) {
681 case OP_STORE:
682 stores++;
683 def = insn;
684 break;
685 case OP_LOAD:
686 break;
687 case OP_SYMADDR:
688 if (!insn->bb)
689 continue;
690 mod |= MOD_ADDRESSABLE;
691 goto external_visibility;
692 case OP_NOP:
693 case OP_SNOP:
694 case OP_LNOP:
695 case OP_PHI:
696 continue;
697 default:
698 warning(sym->pos, "symbol '%s' pseudo used in unexpected way", show_ident(sym->ident));
700 complex |= insn->offset;
701 } END_FOR_EACH_PTR(pu);
703 if (complex)
704 goto complex_def;
705 if (stores > 1)
706 goto multi_def;
709 * Goodie, we have a single store (if even that) in the whole
710 * thing. Replace all loads with moves from the pseudo,
711 * replace the store with a def.
713 src = VOID;
714 if (def)
715 src = def->target;
717 FOR_EACH_PTR(pseudo->users, pu) {
718 struct instruction *insn = pu->insn;
719 if (insn->opcode == OP_LOAD) {
720 check_access(insn);
721 convert_load_instruction(insn, src);
723 } END_FOR_EACH_PTR(pu);
725 /* Turn the store into a no-op */
726 kill_store(def);
727 return;
729 multi_def:
730 complex_def:
731 external_visibility:
732 all = 1;
733 FOR_EACH_PTR_REVERSE(pseudo->users, pu) {
734 struct instruction *insn = pu->insn;
735 if (insn->opcode == OP_LOAD)
736 all &= find_dominating_stores(pseudo, insn, ++bb_generation, !mod);
737 } END_FOR_EACH_PTR_REVERSE(pu);
739 /* If we converted all the loads, remove the stores. They are dead */
740 if (all && !mod) {
741 FOR_EACH_PTR(pseudo->users, pu) {
742 struct instruction *insn = pu->insn;
743 if (insn->opcode == OP_STORE)
744 kill_store(insn);
745 } END_FOR_EACH_PTR(pu);
746 } else {
748 * If we couldn't take the shortcut, see if we can at least kill some
749 * of them..
751 FOR_EACH_PTR(pseudo->users, pu) {
752 struct instruction *insn = pu->insn;
753 if (insn->opcode == OP_STORE)
754 kill_dominated_stores(pseudo, insn, ++bb_generation, insn->bb, !mod, 0);
755 } END_FOR_EACH_PTR(pu);
757 if (!(mod & (MOD_NONLOCAL | MOD_STATIC))) {
758 struct basic_block *bb;
759 FOR_EACH_PTR(ep->bbs, bb) {
760 if (!bb->children)
761 kill_dead_stores(pseudo, ++bb_generation, bb, !mod);
762 } END_FOR_EACH_PTR(bb);
766 return;
769 void simplify_symbol_usage(struct entrypoint *ep)
771 pseudo_t pseudo;
773 FOR_EACH_PTR(ep->accesses, pseudo) {
774 simplify_one_symbol(ep, pseudo->sym);
775 } END_FOR_EACH_PTR(pseudo);
778 static void mark_bb_reachable(struct basic_block *bb, unsigned long generation)
780 struct basic_block *child;
782 if (bb->generation == generation)
783 return;
784 bb->generation = generation;
785 FOR_EACH_PTR(bb->children, child) {
786 mark_bb_reachable(child, generation);
787 } END_FOR_EACH_PTR(child);
790 static void kill_defs(struct instruction *insn)
792 pseudo_t target = insn->target;
794 if (!has_use_list(target))
795 return;
796 if (target->def != insn)
797 return;
799 convert_instruction_target(insn, VOID);
802 void kill_bb(struct basic_block *bb)
804 struct instruction *insn;
805 struct basic_block *child, *parent;
807 FOR_EACH_PTR(bb->insns, insn) {
808 kill_instruction_force(insn);
809 kill_defs(insn);
811 * We kill unreachable instructions even if they
812 * otherwise aren't "killable" (e.g. volatile loads)
814 } END_FOR_EACH_PTR(insn);
815 bb->insns = NULL;
817 FOR_EACH_PTR(bb->children, child) {
818 remove_bb_from_list(&child->parents, bb, 0);
819 } END_FOR_EACH_PTR(child);
820 bb->children = NULL;
822 FOR_EACH_PTR(bb->parents, parent) {
823 remove_bb_from_list(&parent->children, bb, 0);
824 } END_FOR_EACH_PTR(parent);
825 bb->parents = NULL;
828 void kill_unreachable_bbs(struct entrypoint *ep)
830 struct basic_block *bb;
831 unsigned long generation = ++bb_generation;
833 mark_bb_reachable(ep->entry->bb, generation);
834 FOR_EACH_PTR(ep->bbs, bb) {
835 if (bb->generation == generation)
836 continue;
837 /* Mark it as being dead */
838 kill_bb(bb);
839 bb->ep = NULL;
840 DELETE_CURRENT_PTR(bb);
841 } END_FOR_EACH_PTR(bb);
842 PACK_PTR_LIST(&ep->bbs);
844 repeat_phase &= ~REPEAT_CFG_CLEANUP;
847 static int rewrite_parent_branch(struct basic_block *bb, struct basic_block *old, struct basic_block *new)
849 int changed = 0;
850 struct instruction *insn = last_instruction(bb->insns);
852 if (!insn)
853 return 0;
855 /* Infinite loops: let's not "optimize" them.. */
856 if (old == new)
857 return 0;
859 switch (insn->opcode) {
860 case OP_CBR:
861 changed |= rewrite_branch(bb, &insn->bb_false, old, new);
862 /* fall through */
863 case OP_BR:
864 changed |= rewrite_branch(bb, &insn->bb_true, old, new);
865 assert(changed);
866 return changed;
867 case OP_SWITCH: {
868 struct multijmp *jmp;
869 FOR_EACH_PTR(insn->multijmp_list, jmp) {
870 changed |= rewrite_branch(bb, &jmp->target, old, new);
871 } END_FOR_EACH_PTR(jmp);
872 assert(changed);
873 return changed;
875 default:
876 return 0;
880 static struct basic_block * rewrite_branch_bb(struct basic_block *bb, struct instruction *br)
882 struct basic_block *parent;
883 struct basic_block *target = br->bb_true;
884 struct basic_block *false = br->bb_false;
886 if (br->opcode == OP_CBR) {
887 pseudo_t cond = br->cond;
888 if (cond->type != PSEUDO_VAL)
889 return NULL;
890 target = cond->value ? target : false;
894 * We can't do FOR_EACH_PTR() here, because the parent list
895 * may change when we rewrite the parent.
897 while ((parent = first_basic_block(bb->parents)) != NULL) {
898 if (!rewrite_parent_branch(parent, bb, target))
899 return NULL;
901 return target;
904 static void vrfy_bb_in_list(struct basic_block *bb, struct basic_block_list *list)
906 if (bb) {
907 struct basic_block *tmp;
908 int no_bb_in_list = 0;
910 FOR_EACH_PTR(list, tmp) {
911 if (bb == tmp)
912 return;
913 } END_FOR_EACH_PTR(tmp);
914 assert(no_bb_in_list);
918 static void vrfy_parents(struct basic_block *bb)
920 struct basic_block *tmp;
921 FOR_EACH_PTR(bb->parents, tmp) {
922 vrfy_bb_in_list(bb, tmp->children);
923 } END_FOR_EACH_PTR(tmp);
926 static void vrfy_children(struct basic_block *bb)
928 struct basic_block *tmp;
929 struct instruction *br = last_instruction(bb->insns);
931 if (!br) {
932 assert(!bb->children);
933 return;
935 switch (br->opcode) {
936 struct multijmp *jmp;
937 case OP_CBR:
938 vrfy_bb_in_list(br->bb_false, bb->children);
939 /* fall through */
940 case OP_BR:
941 vrfy_bb_in_list(br->bb_true, bb->children);
942 break;
943 case OP_SWITCH:
944 case OP_COMPUTEDGOTO:
945 FOR_EACH_PTR(br->multijmp_list, jmp) {
946 vrfy_bb_in_list(jmp->target, bb->children);
947 } END_FOR_EACH_PTR(jmp);
948 break;
949 default:
950 break;
953 FOR_EACH_PTR(bb->children, tmp) {
954 vrfy_bb_in_list(bb, tmp->parents);
955 } END_FOR_EACH_PTR(tmp);
958 static void vrfy_bb_flow(struct basic_block *bb)
960 vrfy_children(bb);
961 vrfy_parents(bb);
964 void vrfy_flow(struct entrypoint *ep)
966 struct basic_block *bb;
967 struct basic_block *entry = ep->entry->bb;
969 FOR_EACH_PTR(ep->bbs, bb) {
970 if (bb == entry)
971 entry = NULL;
972 vrfy_bb_flow(bb);
973 } END_FOR_EACH_PTR(bb);
974 assert(!entry);
977 void pack_basic_blocks(struct entrypoint *ep)
979 struct basic_block *bb;
981 /* See if we can merge a bb into another one.. */
982 FOR_EACH_PTR(ep->bbs, bb) {
983 struct instruction *first, *insn;
984 struct basic_block *parent, *child, *last;
986 if (!bb_reachable(bb))
987 continue;
990 * Just a branch?
992 FOR_EACH_PTR(bb->insns, first) {
993 if (!first->bb)
994 continue;
995 switch (first->opcode) {
996 case OP_NOP: case OP_LNOP: case OP_SNOP:
997 continue;
998 case OP_CBR:
999 case OP_BR: {
1000 struct basic_block *replace;
1001 replace = rewrite_branch_bb(bb, first);
1002 if (replace) {
1003 kill_bb(bb);
1004 goto no_merge;
1007 /* fallthrough */
1008 default:
1009 goto out;
1011 } END_FOR_EACH_PTR(first);
1013 out:
1015 * See if we only have one parent..
1017 last = NULL;
1018 FOR_EACH_PTR(bb->parents, parent) {
1019 if (last) {
1020 if (last != parent)
1021 goto no_merge;
1022 continue;
1024 last = parent;
1025 } END_FOR_EACH_PTR(parent);
1027 parent = last;
1028 if (!parent || parent == bb)
1029 continue;
1032 * Goodie. See if the parent can merge..
1034 FOR_EACH_PTR(parent->children, child) {
1035 if (child != bb)
1036 goto no_merge;
1037 } END_FOR_EACH_PTR(child);
1040 * Merge the two.
1042 repeat_phase |= REPEAT_CSE;
1044 parent->children = bb->children;
1045 bb->children = NULL;
1046 bb->parents = NULL;
1048 FOR_EACH_PTR(parent->children, child) {
1049 replace_bb_in_list(&child->parents, bb, parent, 0);
1050 } END_FOR_EACH_PTR(child);
1052 kill_instruction(delete_last_instruction(&parent->insns));
1053 FOR_EACH_PTR(bb->insns, insn) {
1054 if (insn->bb) {
1055 assert(insn->bb == bb);
1056 insn->bb = parent;
1058 add_instruction(&parent->insns, insn);
1059 } END_FOR_EACH_PTR(insn);
1060 bb->insns = NULL;
1062 no_merge:
1063 /* nothing to do */;
1064 } END_FOR_EACH_PTR(bb);