2 * Example of how to write a compiler with sparse
10 #include "expression.h"
11 #include "linearize.h"
17 struct pseudo_list
*contains
;
26 /* Our "switch" generation is very very stupid. */
27 #define SWITCH_REG (1)
29 static void output_bb(struct basic_block
*bb
, unsigned long generation
);
32 * We only know about the caller-clobbered registers
35 static struct hardreg hardregs
[] = {
43 #define REGNO (sizeof(hardregs)/sizeof(struct hardreg))
47 unsigned long stack_offset
;
48 struct storage_hash_list
*inputs
;
49 struct storage_hash_list
*outputs
;
50 struct storage_hash_list
*internal
;
53 static struct storage_hash
*find_storage_hash(pseudo_t pseudo
, struct storage_hash_list
*list
)
55 struct storage_hash
*entry
;
56 FOR_EACH_PTR(list
, entry
) {
57 if (entry
->pseudo
== pseudo
)
59 } END_FOR_EACH_PTR(entry
);
63 static struct storage_hash
*find_or_create_hash(pseudo_t pseudo
, struct storage_hash_list
**listp
)
65 struct storage_hash
*entry
;
67 entry
= find_storage_hash(pseudo
, *listp
);
69 entry
= alloc_storage_hash(alloc_storage());
70 entry
->pseudo
= pseudo
;
71 add_ptr_list(listp
, entry
);
76 /* Eventually we should just build it up in memory */
77 static void output_line(struct bb_state
*state
, const char *fmt
, ...)
86 static void output_label(struct bb_state
*state
, const char *fmt
, ...)
88 static char buffer
[512];
92 vsnprintf(buffer
, sizeof(buffer
), fmt
, args
);
95 output_line(state
, "%s:\n", buffer
);
98 static void output_insn(struct bb_state
*state
, const char *fmt
, ...)
100 static char buffer
[512];
104 vsnprintf(buffer
, sizeof(buffer
), fmt
, args
);
107 output_line(state
, "\t%s\n", buffer
);
110 static void output_comment(struct bb_state
*state
, const char *fmt
, ...)
112 static char buffer
[512];
118 vsnprintf(buffer
, sizeof(buffer
), fmt
, args
);
121 output_line(state
, "\t# %s\n", buffer
);
124 static const char *show_memop(struct storage
*storage
)
126 static char buffer
[1000];
130 switch (storage
->type
) {
132 sprintf(buffer
, "%d(FP)", storage
->offset
);
135 sprintf(buffer
, "%d(SP)", storage
->offset
);
138 return hardregs
[storage
->regno
].name
;
140 return show_storage(storage
);
145 static void alloc_stack(struct bb_state
*state
, struct storage
*storage
)
147 storage
->type
= REG_STACK
;
148 storage
->offset
= state
->stack_offset
;
149 state
->stack_offset
+= 4;
153 * Can we re-generate the pseudo, so that we don't need to
154 * flush it to memory? We can regenerate:
155 * - immediates and symbol addresses
156 * - pseudos we got as input in non-registers
157 * - pseudos we've already saved off earlier..
159 static int can_regenerate(struct bb_state
*state
, pseudo_t pseudo
)
161 struct storage_hash
*in
;
163 switch (pseudo
->type
) {
169 in
= find_storage_hash(pseudo
, state
->inputs
);
170 if (in
&& in
->storage
->type
!= REG_REG
)
172 in
= find_storage_hash(pseudo
, state
->internal
);
179 static void flush_one_pseudo(struct bb_state
*state
, struct hardreg
*hardreg
, pseudo_t pseudo
)
181 struct storage_hash
*out
;
182 struct storage
*storage
;
184 if (can_regenerate(state
, pseudo
))
187 output_comment(state
, "flushing %s from %s", show_pseudo(pseudo
), hardreg
->name
);
188 out
= find_storage_hash(pseudo
, state
->internal
);
190 out
= find_storage_hash(pseudo
, state
->outputs
);
192 out
= find_or_create_hash(pseudo
, &state
->internal
);
194 storage
= out
->storage
;
195 switch (storage
->type
) {
198 * Aieee - the next user wants it in a register, but we
199 * need to flush it to memory in between. Which means that
200 * we need to allocate an internal one, dammit..
202 out
= find_or_create_hash(pseudo
, &state
->internal
);
203 storage
= out
->storage
;
206 alloc_stack(state
, storage
);
209 output_insn(state
, "movl %s,%s", hardreg
->name
, show_memop(storage
));
214 /* Flush a hardreg out to the storage it has.. */
215 static void flush_reg(struct bb_state
*state
, struct hardreg
*hardreg
)
224 FOR_EACH_PTR(hardreg
->contains
, pseudo
) {
225 if (CURRENT_TAG(pseudo
) & TAG_DEAD
)
227 if (!(CURRENT_TAG(pseudo
) & TAG_DIRTY
))
229 flush_one_pseudo(state
, hardreg
, pseudo
);
230 } END_FOR_EACH_PTR(pseudo
);
231 free_ptr_list(&hardreg
->contains
);
234 static struct storage_hash
*find_pseudo_storage(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
236 struct storage_hash
*src
;
238 src
= find_storage_hash(pseudo
, state
->internal
);
240 src
= find_storage_hash(pseudo
, state
->inputs
);
242 src
= find_storage_hash(pseudo
, state
->outputs
);
243 /* Undefined? Screw it! */
248 * If we found output storage, it had better be local stack
249 * that we flushed to earlier..
251 if (src
->storage
->type
!= REG_STACK
)
257 * Incoming pseudo with out any pre-set storage allocation?
258 * We can make up our own, and obviously prefer to get it
259 * in the register we already selected (if it hasn't been
262 if (src
->storage
->type
== REG_UDEF
) {
263 if (reg
&& !reg
->used
) {
264 src
->storage
->type
= REG_REG
;
265 src
->storage
->regno
= reg
- hardregs
;
267 alloc_stack(state
, src
->storage
);
272 static void mark_reg_dead(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
276 FOR_EACH_PTR(reg
->contains
, p
) {
279 if (CURRENT_TAG(p
) & TAG_DEAD
)
281 output_comment(state
, "marking pseudo %s in reg %s dead", show_pseudo(pseudo
), reg
->name
);
282 TAG_CURRENT(p
, TAG_DEAD
);
284 } END_FOR_EACH_PTR(p
);
287 /* Fill a hardreg with the pseudo it has */
288 static struct hardreg
*fill_reg(struct bb_state
*state
, struct hardreg
*hardreg
, pseudo_t pseudo
)
290 struct storage_hash
*src
;
291 struct instruction
*def
;
293 switch (pseudo
->type
) {
295 output_insn(state
, "movl $%lld,%s", pseudo
->value
, hardreg
->name
);
298 output_insn(state
, "movl $<%s>,%s", show_pseudo(pseudo
), hardreg
->name
);
303 if (def
->opcode
== OP_SETVAL
) {
304 output_insn(state
, "movl $<%s>,%s", show_pseudo(def
->symbol
), hardreg
->name
);
307 src
= find_pseudo_storage(state
, pseudo
, hardreg
);
310 if (src
->flags
& TAG_DEAD
)
311 mark_reg_dead(state
, pseudo
, hardreg
);
312 output_insn(state
, "mov.%d %s,%s", 32, show_memop(src
->storage
), hardreg
->name
);
315 output_insn(state
, "reload %s from %s", hardreg
->name
, show_pseudo(pseudo
));
321 static void add_pseudo_reg(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
323 output_comment(state
, "added pseudo %s to reg %s", show_pseudo(pseudo
), reg
->name
);
324 add_ptr_list_tag(®
->contains
, pseudo
, TAG_DIRTY
);
330 static struct hardreg
*preferred_reg(struct bb_state
*state
, pseudo_t target
)
332 struct storage_hash
*dst
;
334 dst
= find_storage_hash(target
, state
->outputs
);
336 struct storage
*storage
= dst
->storage
;
337 if (storage
->type
== REG_REG
)
338 return hardregs
+ storage
->regno
;
343 static struct hardreg
*empty_reg(struct bb_state
*state
)
346 struct hardreg
*reg
= hardregs
;
348 for (i
= 0; i
< REGNO
; i
++, reg
++) {
355 static struct hardreg
*target_reg(struct bb_state
*state
, pseudo_t pseudo
, pseudo_t target
)
360 /* First, see if we have a preferred target register.. */
361 reg
= preferred_reg(state
, target
);
362 if (reg
&& !reg
->busy
)
365 reg
= empty_reg(state
);
374 flush_reg(state
, reg
);
377 add_pseudo_reg(state
, pseudo
, reg
);
381 static struct hardreg
*find_in_reg(struct bb_state
*state
, pseudo_t pseudo
)
386 for (i
= 0; i
< REGNO
; i
++) {
390 FOR_EACH_PTR(reg
->contains
, p
) {
393 output_comment(state
, "found pseudo %s in reg %s", show_pseudo(pseudo
), reg
->name
);
396 } END_FOR_EACH_PTR(p
);
401 static struct hardreg
*getreg(struct bb_state
*state
, pseudo_t pseudo
, pseudo_t target
)
405 reg
= find_in_reg(state
, pseudo
);
408 reg
= target_reg(state
, pseudo
, target
);
409 return fill_reg(state
, reg
, pseudo
);
412 static struct hardreg
*copy_reg(struct bb_state
*state
, struct hardreg
*src
, pseudo_t target
)
420 reg
= preferred_reg(state
, target
);
421 if (reg
&& !reg
->busy
) {
422 output_comment(state
, "copying %s to preferred target %s", show_pseudo(target
), reg
->name
);
423 output_insn(state
, "movl %s,%s", src
->name
, reg
->name
);
427 for (i
= 0; i
< REGNO
; i
++) {
428 struct hardreg
*reg
= hardregs
+ i
;
430 output_comment(state
, "copying %s to %s", show_pseudo(target
), reg
->name
);
431 output_insn(state
, "movl %s,%s", src
->name
, reg
->name
);
436 flush_reg(state
, src
);
440 static const char *generic(struct bb_state
*state
, pseudo_t pseudo
)
443 struct storage_hash
*src
;
445 switch (pseudo
->type
) {
448 return show_pseudo(pseudo
);
450 reg
= find_in_reg(state
, pseudo
);
453 src
= find_pseudo_storage(state
, pseudo
, NULL
);
456 return show_memop(src
->storage
);
460 static const char *address(struct bb_state
*state
, struct instruction
*memop
)
463 struct hardreg
*base
;
464 static char buffer
[100];
465 pseudo_t addr
= memop
->src
;
470 if (sym
->ctype
.modifiers
& MOD_NONLOCAL
) {
471 sprintf(buffer
, "%s+%d", show_ident(sym
->ident
), memop
->offset
);
474 sprintf(buffer
, "%d+%s(SP)", memop
->offset
, show_ident(sym
->ident
));
477 base
= getreg(state
, addr
, NULL
);
478 sprintf(buffer
, "%d(%s)", memop
->offset
, base
->name
);
483 static const char *reg_or_imm(struct bb_state
*state
, pseudo_t pseudo
)
485 switch(pseudo
->type
) {
487 return show_pseudo(pseudo
);
489 return getreg(state
, pseudo
, NULL
)->name
;
493 static const char* opcodes
[] = {
494 [OP_BADOP
] = "bad_op",
497 [OP_ENTRY
] = "<entry-point>",
502 [OP_SWITCH
] = "switch",
503 [OP_INVOKE
] = "invoke",
504 [OP_COMPUTEDGOTO
] = "jmp *",
505 [OP_UNWIND
] = "unwind",
520 [OP_AND_BOOL
] = "and-bool",
521 [OP_OR_BOOL
] = "or-bool",
523 /* Binary comparison */
524 [OP_SET_EQ
] = "seteq",
525 [OP_SET_NE
] = "setne",
526 [OP_SET_LE
] = "setle",
527 [OP_SET_GE
] = "setge",
528 [OP_SET_LT
] = "setlt",
529 [OP_SET_GT
] = "setgt",
532 [OP_SET_BE
] = "setbe",
533 [OP_SET_AE
] = "setae",
539 /* Special three-input */
543 [OP_MALLOC
] = "malloc",
545 [OP_ALLOCA
] = "alloca",
547 [OP_STORE
] = "store",
549 [OP_GET_ELEMENT_PTR
] = "getelem",
553 [OP_PHISOURCE
] = "phisrc",
555 [OP_PTRCAST
] = "ptrcast",
557 [OP_VANEXT
] = "va_next",
558 [OP_VAARG
] = "va_arg",
559 [OP_SLICE
] = "slice",
563 [OP_DEATHNOTE
] = "dead",
566 /* Sparse tagging (line numbers, context, whatever) */
567 [OP_CONTEXT
] = "context",
570 static void kill_dead_reg(struct hardreg
*reg
)
575 FOR_EACH_PTR(reg
->contains
, p
) {
576 if (CURRENT_TAG(p
) & TAG_DEAD
) {
577 DELETE_CURRENT_PTR(p
);
581 } END_FOR_EACH_PTR(p
);
582 PACK_PTR_LIST(®
->contains
);
587 static struct hardreg
*target_copy_reg(struct bb_state
*state
, struct hardreg
*src
, pseudo_t target
)
590 return copy_reg(state
, src
, target
);
593 static void do_binop(struct bb_state
*state
, struct instruction
*insn
, pseudo_t val1
, pseudo_t val2
)
595 const char *op
= opcodes
[insn
->opcode
];
596 struct hardreg
*src
= getreg(state
, val1
, insn
->target
);
597 const char *src2
= generic(state
, val2
);
600 dst
= target_copy_reg(state
, src
, insn
->target
);
601 output_insn(state
, "%s.%d %s,%s", op
, insn
->size
, src2
, dst
->name
);
602 add_pseudo_reg(state
, insn
->target
, dst
);
605 static void generate_binop(struct bb_state
*state
, struct instruction
*insn
)
607 do_binop(state
, insn
, insn
->src1
, insn
->src2
);
610 static int is_dead_reg(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
613 FOR_EACH_PTR(reg
->contains
, p
) {
615 return CURRENT_TAG(p
) & TAG_DEAD
;
616 } END_FOR_EACH_PTR(p
);
621 * Commutative binops are much more flexible, since we can switch the
622 * sources around to satisfy the target register, or to avoid having
623 * to load one of them into a register..
625 static void generate_commutative_binop(struct bb_state
*state
, struct instruction
*insn
)
627 pseudo_t src1
= insn
->src1
, src2
= insn
->src2
;
628 struct hardreg
*reg1
, *reg2
= find_in_reg(state
, src2
);
632 reg1
= find_in_reg(state
, src1
);
635 if (!is_dead_reg(state
, src2
, reg2
))
637 if (!is_dead_reg(state
, src1
, reg1
))
640 /* Both are dead. Is one preferrable? */
641 if (reg2
!= preferred_reg(state
, insn
->target
))
648 do_binop(state
, insn
, src1
, src2
);
652 * This marks a pseudo dead. It still stays on the hardreg list (the hardreg
653 * still has its value), but it's scheduled to be killed after the next
654 * "sequence point" when we call "kill_read_pseudos()"
656 static void mark_pseudo_dead(struct bb_state
*state
, pseudo_t pseudo
)
659 struct storage_hash
*src
;
661 src
= find_pseudo_storage(state
, pseudo
, NULL
);
663 src
->flags
|= TAG_DEAD
;
664 for (i
= 0; i
< REGNO
; i
++)
665 mark_reg_dead(state
, pseudo
, hardregs
+ i
);
668 static void kill_dead_pseudos(struct bb_state
*state
)
672 for (i
= 0; i
< REGNO
; i
++) {
673 kill_dead_reg(hardregs
+ i
);
678 * A PHI source can define a pseudo that we already
679 * have in another register. We need to invalidate the
680 * old register so that we don't end up with the same
681 * pseudo in "two places".
683 static void remove_pseudo_reg(struct bb_state
*state
, pseudo_t pseudo
)
687 output_comment(state
, "pseudo %s died", show_pseudo(pseudo
));
688 for (i
= 0; i
< REGNO
; i
++) {
689 struct hardreg
*reg
= hardregs
+ i
;
691 FOR_EACH_PTR(reg
->contains
, p
) {
694 if (CURRENT_TAG(p
) & TAG_DEAD
)
697 DELETE_CURRENT_PTR(p
);
698 output_comment(state
, "removed pseudo %s from reg %s", show_pseudo(pseudo
), reg
->name
);
699 } END_FOR_EACH_PTR(p
);
700 PACK_PTR_LIST(®
->contains
);
704 static void generate_store(struct instruction
*insn
, struct bb_state
*state
)
706 output_insn(state
, "mov.%d %s,%s", insn
->size
, reg_or_imm(state
, insn
->target
), address(state
, insn
));
709 static void generate_load(struct instruction
*insn
, struct bb_state
*state
)
711 const char *input
= address(state
, insn
);
714 kill_dead_pseudos(state
);
715 dst
= target_reg(state
, insn
->target
, NULL
);
716 output_insn(state
, "mov.%d %s,%s", insn
->size
, input
, dst
->name
);
719 static void generate_phisource(struct instruction
*insn
, struct bb_state
*state
)
721 struct instruction
*user
;
724 /* Mark all the target pseudos dead first */
725 FOR_EACH_PTR(insn
->phi_users
, user
) {
726 mark_pseudo_dead(state
, user
->target
);
727 } END_FOR_EACH_PTR(user
);
730 FOR_EACH_PTR(insn
->phi_users
, user
) {
732 reg
= getreg(state
, insn
->phi_src
, user
->target
);
733 remove_pseudo_reg(state
, user
->target
);
734 add_pseudo_reg(state
, user
->target
, reg
);
735 } END_FOR_EACH_PTR(user
);
738 static void generate_cast(struct bb_state
*state
, struct instruction
*insn
)
740 struct hardreg
*src
= getreg(state
, insn
->src
, insn
->target
);
742 unsigned int old
= insn
->orig_type
? insn
->orig_type
->bit_size
: 0;
743 unsigned int new = insn
->size
;
746 * Cast to smaller type? Ignore the high bits, we
747 * just keep both pseudos in the same register.
750 add_pseudo_reg(state
, insn
->target
, src
);
754 dst
= target_copy_reg(state
, src
, insn
->target
);
756 if (insn
->orig_type
&& (insn
->orig_type
->ctype
.modifiers
& MOD_SIGNED
)) {
757 output_insn(state
, "sext.%d.%d %s", old
, new, dst
->name
);
759 unsigned long long mask
;
760 mask
= ~(~0ULL << old
);
761 mask
&= ~(~0ULL << new);
762 output_insn(state
, "andl.%d $%#llx,%s", insn
->size
, mask
, dst
->name
);
764 add_pseudo_reg(state
, insn
->target
, dst
);
767 static void generate_output_storage(struct bb_state
*state
);
769 static void generate_branch(struct bb_state
*state
, struct instruction
*br
)
772 struct hardreg
*reg
= getreg(state
, br
->cond
, NULL
);
773 output_insn(state
, "testl %s,%s", reg
->name
, reg
->name
);
775 generate_output_storage(state
);
777 output_insn(state
, "je .L%p", br
->bb_false
);
778 output_insn(state
, "jmp .L%p", br
->bb_true
);
781 /* We've made sure that there is a dummy reg live for the output */
782 static void generate_switch(struct bb_state
*state
, struct instruction
*insn
)
784 struct hardreg
*reg
= hardregs
+ SWITCH_REG
;
786 generate_output_storage(state
);
787 output_insn(state
, "switch on %s", reg
->name
);
788 output_insn(state
, "unimplemented: %s", show_instruction(insn
));
791 static void generate_ret(struct bb_state
*state
, struct instruction
*ret
)
793 if (ret
->src
&& ret
->src
!= VOID
) {
794 struct hardreg
*wants
= hardregs
+0;
795 struct hardreg
*reg
= getreg(state
, ret
->src
, NULL
);
797 output_insn(state
, "movl %s,%s", reg
->name
, wants
->name
);
799 output_insn(state
, "ret");
803 * Fake "call" linearization just as a taster..
805 static void generate_call(struct bb_state
*state
, struct instruction
*insn
)
810 FOR_EACH_PTR(insn
->arguments
, arg
) {
811 output_insn(state
, "pushl %s", generic(state
, arg
));
813 } END_FOR_EACH_PTR(arg
);
814 flush_reg(state
, hardregs
+0);
815 flush_reg(state
, hardregs
+1);
816 flush_reg(state
, hardregs
+2);
817 output_insn(state
, "call %s", show_pseudo(insn
->func
));
819 output_insn(state
, "addl $%d,%%esp", offset
);
820 add_pseudo_reg(state
, insn
->target
, hardregs
+0);
823 static void generate_select(struct bb_state
*state
, struct instruction
*insn
)
825 struct hardreg
*src1
, *src2
, *cond
, *dst
;
827 cond
= getreg(state
, insn
->src1
, NULL
);
828 output_insn(state
, "testl %s,%s", cond
->name
, cond
->name
);
830 src1
= getreg(state
, insn
->src2
, NULL
);
831 dst
= copy_reg(state
, src1
, insn
->target
);
832 add_pseudo_reg(state
, insn
->target
, dst
);
833 src2
= getreg(state
, insn
->src3
, insn
->target
);
834 output_insn(state
, "sele %s,%s", src2
->name
, dst
->name
);
837 static void generate_asm_inputs(struct bb_state
*state
, struct asm_constraint_list
*list
)
839 struct asm_constraint
*entry
;
841 FOR_EACH_PTR(list
, entry
) {
842 const char *constraint
= entry
->constraint
;
843 pseudo_t pseudo
= entry
->pseudo
;
845 output_insn(state
, "# asm input \"%s\": %s", constraint
, show_pseudo(pseudo
));
846 } END_FOR_EACH_PTR(entry
);
849 static void generate_asm_outputs(struct bb_state
*state
, struct asm_constraint_list
*list
)
851 struct asm_constraint
*entry
;
853 FOR_EACH_PTR(list
, entry
) {
854 const char *constraint
= entry
->constraint
;
855 pseudo_t pseudo
= entry
->pseudo
;
857 while (*constraint
== '=' || *constraint
== '+')
860 output_insn(state
, "# asm output \"%s\": %s", constraint
, show_pseudo(pseudo
));
861 } END_FOR_EACH_PTR(entry
);
864 static void generate_asm(struct bb_state
*state
, struct instruction
*insn
)
866 generate_asm_inputs(state
, insn
->asm_rules
->inputs
);
867 output_insn(state
, "ASM: %s", insn
->string
);
868 generate_asm_outputs(state
, insn
->asm_rules
->outputs
);
871 static void generate_one_insn(struct instruction
*insn
, struct bb_state
*state
)
874 output_comment(state
, "%s", show_instruction(insn
));
876 switch (insn
->opcode
) {
878 struct symbol
*sym
= insn
->bb
->ep
->name
;
879 const char *name
= show_ident(sym
->ident
);
880 if (sym
->ctype
.modifiers
& MOD_STATIC
)
881 printf("\n\n%s:\n", name
);
883 printf("\n\n.globl %s\n%s:\n", name
, name
);
888 * OP_PHI doesn't actually generate any code. It has been
889 * done by the storage allocator and the OP_PHISOURCE.
895 generate_phisource(insn
, state
);
899 * OP_SETVAL likewise doesn't actually generate any
900 * code. On use, the "def" of the pseudo will be
907 generate_store(insn
, state
);
911 generate_load(insn
, state
);
915 mark_pseudo_dead(state
, insn
->target
);
918 case OP_ADD
: case OP_MUL
:
919 case OP_AND
: case OP_OR
: case OP_XOR
:
920 case OP_AND_BOOL
: case OP_OR_BOOL
:
921 case OP_SET_EQ
: case OP_SET_NE
:
922 generate_commutative_binop(state
, insn
);
925 case OP_SUB
: case OP_DIV
: case OP_MOD
:
926 case OP_SHL
: case OP_SHR
:
927 case OP_SET_LE
: case OP_SET_GE
:
928 case OP_SET_LT
: case OP_SET_GT
:
929 case OP_SET_B
: case OP_SET_A
:
930 case OP_SET_BE
: case OP_SET_AE
:
931 generate_binop(state
, insn
);
934 case OP_CAST
: case OP_PTRCAST
:
935 generate_cast(state
, insn
);
939 generate_select(state
, insn
);
943 generate_branch(state
, insn
);
947 generate_switch(state
, insn
);
951 generate_call(state
, insn
);
955 generate_ret(state
, insn
);
959 generate_asm(state
, insn
);
963 output_insn(state
, "unimplemented: %s", show_instruction(insn
));
966 kill_dead_pseudos(state
);
969 #define VERY_BUSY 1000
970 #define REG_FIXED 2000
972 static void write_reg_to_storage(struct bb_state
*state
, struct hardreg
*reg
, pseudo_t pseudo
, struct storage
*storage
)
977 switch (storage
->type
) {
979 out
= hardregs
+ storage
->regno
;
982 output_insn(state
, "movl %s,%s", reg
->name
, out
->name
);
985 if (reg
->busy
< VERY_BUSY
) {
986 storage
->type
= REG_REG
;
987 storage
->regno
= reg
- hardregs
;
988 reg
->busy
= REG_FIXED
;
992 /* Try to find a non-busy register.. */
993 for (i
= 0; i
< REGNO
; i
++) {
997 output_insn(state
, "movl %s,%s", reg
->name
, out
->name
);
998 storage
->type
= REG_REG
;
1000 reg
->busy
= REG_FIXED
;
1004 /* Fall back on stack allocation ... */
1005 alloc_stack(state
, storage
);
1008 output_insn(state
, "movl %s,%s", reg
->name
, show_memop(storage
));
1013 static void write_val_to_storage(struct bb_state
*state
, pseudo_t src
, struct storage
*storage
)
1015 struct hardreg
*out
;
1017 switch (storage
->type
) {
1019 alloc_stack(state
, storage
);
1021 output_insn(state
, "movl %s,%s", show_pseudo(src
), show_memop(storage
));
1024 out
= hardregs
+ storage
->regno
;
1025 output_insn(state
, "movl %s,%s", show_pseudo(src
), out
->name
);
1029 static void fill_output(struct bb_state
*state
, pseudo_t pseudo
, struct storage
*out
)
1032 struct storage_hash
*in
;
1033 struct instruction
*def
;
1035 /* Is that pseudo a constant value? */
1036 switch (pseudo
->type
) {
1038 write_val_to_storage(state
, pseudo
, out
);
1042 if (def
->opcode
== OP_SETVAL
) {
1043 write_val_to_storage(state
, def
->symbol
, out
);
1050 /* See if we have that pseudo in a register.. */
1051 for (i
= 0; i
< REGNO
; i
++) {
1052 struct hardreg
*reg
= hardregs
+ i
;
1055 FOR_EACH_PTR(reg
->contains
, p
) {
1057 write_reg_to_storage(state
, reg
, pseudo
, out
);
1060 } END_FOR_EACH_PTR(p
);
1063 /* Do we have it in another storage? */
1064 in
= find_storage_hash(pseudo
, state
->internal
);
1066 in
= find_storage_hash(pseudo
, state
->inputs
);
1071 switch (out
->type
) {
1073 *out
= *in
->storage
;
1076 output_insn(state
, "movl %s,%s", show_memop(in
->storage
), hardregs
[out
->regno
].name
);
1079 if (out
== in
->storage
)
1081 if (out
->type
== in
->storage
->type
== out
->regno
== in
->storage
->regno
)
1083 output_insn(state
, "movl %s,%s", show_memop(in
->storage
), show_memop(out
));
1089 static int final_pseudo_flush(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
1091 struct storage_hash
*hash
;
1092 struct storage
*out
;
1093 struct hardreg
*dst
;
1096 * Since this pseudo is live at exit, we'd better have output
1099 hash
= find_storage_hash(pseudo
, state
->outputs
);
1102 out
= hash
->storage
;
1104 /* If the output is in a register, try to get it there.. */
1105 if (out
->type
== REG_REG
) {
1106 dst
= hardregs
+ out
->regno
;
1108 * Two good cases: nobody is using the right register,
1109 * or we've already set it aside for output..
1111 if (!dst
->busy
|| dst
->busy
> VERY_BUSY
)
1114 /* Aiee. Try to keep it in a register.. */
1115 dst
= empty_reg(state
);
1122 /* If the output is undefined, let's see if we can put it in a register.. */
1123 if (out
->type
== REG_UDEF
) {
1124 dst
= empty_reg(state
);
1126 out
->type
= REG_REG
;
1127 out
->regno
= dst
- hardregs
;
1130 /* Uhhuh. Not so good. No empty registers right now */
1134 /* If we know we need to flush it, just do so already .. */
1135 output_insn(state
, "movl %s,%s", reg
->name
, show_memop(out
));
1141 output_insn(state
, "movl %s,%s", reg
->name
, dst
->name
);
1142 add_pseudo_reg(state
, pseudo
, dst
);
1147 * This tries to make sure that we put all the pseudos that are
1148 * live on exit into the proper storage
1150 static void generate_output_storage(struct bb_state
*state
)
1152 struct storage_hash
*entry
;
1154 /* Go through the fixed outputs, making sure we have those regs free */
1155 FOR_EACH_PTR(state
->outputs
, entry
) {
1156 struct storage
*out
= entry
->storage
;
1157 if (out
->type
== REG_REG
) {
1158 struct hardreg
*reg
= hardregs
+ out
->regno
;
1162 reg
->busy
= REG_FIXED
;
1163 FOR_EACH_PTR(reg
->contains
, p
) {
1164 if (p
== entry
->pseudo
) {
1168 if (CURRENT_TAG(p
) & TAG_DEAD
)
1171 /* Try to write back the pseudo to where it should go ... */
1172 if (final_pseudo_flush(state
, p
, reg
)) {
1173 DELETE_CURRENT_PTR(p
);
1178 } END_FOR_EACH_PTR(p
);
1179 PACK_PTR_LIST(®
->contains
);
1181 flush_reg(state
, reg
);
1183 } END_FOR_EACH_PTR(entry
);
1185 FOR_EACH_PTR(state
->outputs
, entry
) {
1186 fill_output(state
, entry
->pseudo
, entry
->storage
);
1187 } END_FOR_EACH_PTR(entry
);
1190 static void generate(struct basic_block
*bb
, struct bb_state
*state
)
1193 struct storage_hash
*entry
;
1194 struct instruction
*insn
;
1196 for (i
= 0; i
< REGNO
; i
++) {
1197 free_ptr_list(&hardregs
[i
].contains
);
1198 hardregs
[i
].busy
= 0;
1199 hardregs
[i
].dead
= 0;
1200 hardregs
[i
].used
= 0;
1203 FOR_EACH_PTR(state
->inputs
, entry
) {
1204 struct storage
*storage
= entry
->storage
;
1205 const char *name
= show_storage(storage
);
1206 output_comment(state
, "incoming %s in %s", show_pseudo(entry
->pseudo
), name
);
1207 if (storage
->type
== REG_REG
) {
1208 int regno
= storage
->regno
;
1209 add_pseudo_reg(state
, entry
->pseudo
, hardregs
+ regno
);
1210 name
= hardregs
[regno
].name
;
1212 } END_FOR_EACH_PTR(entry
);
1214 output_label(state
, ".L%p", bb
);
1215 FOR_EACH_PTR(bb
->insns
, insn
) {
1218 generate_one_insn(insn
, state
);
1219 } END_FOR_EACH_PTR(insn
);
1222 output_comment(state
, "--- in ---");
1223 FOR_EACH_PTR(state
->inputs
, entry
) {
1224 output_comment(state
, "%s <- %s", show_pseudo(entry
->pseudo
), show_storage(entry
->storage
));
1225 } END_FOR_EACH_PTR(entry
);
1226 output_comment(state
, "--- spill ---");
1227 FOR_EACH_PTR(state
->internal
, entry
) {
1228 output_comment(state
, "%s <-> %s", show_pseudo(entry
->pseudo
), show_storage(entry
->storage
));
1229 } END_FOR_EACH_PTR(entry
);
1230 output_comment(state
, "--- out ---");
1231 FOR_EACH_PTR(state
->outputs
, entry
) {
1232 output_comment(state
, "%s -> %s", show_pseudo(entry
->pseudo
), show_storage(entry
->storage
));
1233 } END_FOR_EACH_PTR(entry
);
1238 static void generate_list(struct basic_block_list
*list
, unsigned long generation
)
1240 struct basic_block
*bb
;
1241 FOR_EACH_PTR(list
, bb
) {
1242 if (bb
->generation
== generation
)
1244 output_bb(bb
, generation
);
1245 } END_FOR_EACH_PTR(bb
);
1248 static void output_bb(struct basic_block
*bb
, unsigned long generation
)
1250 struct bb_state state
;
1252 bb
->generation
= generation
;
1254 /* Make sure all parents have been generated first */
1255 generate_list(bb
->parents
, generation
);
1257 state
.pos
= bb
->pos
;
1258 state
.inputs
= gather_storage(bb
, STOR_IN
);
1259 state
.outputs
= gather_storage(bb
, STOR_OUT
);
1260 state
.internal
= NULL
;
1261 state
.stack_offset
= 0;
1263 generate(bb
, &state
);
1265 free_ptr_list(&state
.inputs
);
1266 free_ptr_list(&state
.outputs
);
1268 /* Generate all children... */
1269 generate_list(bb
->children
, generation
);
1272 static void set_up_arch_entry(struct entrypoint
*ep
, struct instruction
*entry
)
1278 * We should set up argument sources here..
1280 * Things like "first three arguments in registers" etc
1281 * are all for this place.
1284 FOR_EACH_PTR(entry
->arg_list
, arg
) {
1285 struct storage
*in
= lookup_storage(entry
->bb
, arg
, STOR_IN
);
1287 in
= alloc_storage();
1288 add_storage(in
, entry
->bb
, arg
, STOR_IN
);
1294 in
->type
= REG_FRAME
;
1295 in
->offset
= (i
-3)*4;
1298 } END_FOR_EACH_PTR(arg
);
1302 * Set up storage information for "return"
1304 * Not strictly necessary, since the code generator will
1305 * certainly move the return value to the right register,
1306 * but it can help register allocation if the allocator
1307 * sees that the target register is going to return in %eax.
1309 static void set_up_arch_exit(struct basic_block
*bb
, struct instruction
*ret
)
1311 pseudo_t pseudo
= ret
->src
;
1313 if (pseudo
&& pseudo
!= VOID
) {
1314 struct storage
*out
= lookup_storage(bb
, pseudo
, STOR_OUT
);
1316 out
= alloc_storage();
1317 add_storage(out
, bb
, pseudo
, STOR_OUT
);
1319 out
->type
= REG_REG
;
1325 * Set up dummy/silly output storage information for a switch
1326 * instruction. We need to make sure that a register is available
1327 * when we generate code for switch, so force that by creating
1328 * a dummy output rule.
1330 static void set_up_arch_switch(struct basic_block
*bb
, struct instruction
*insn
)
1332 pseudo_t pseudo
= insn
->cond
;
1333 struct storage
*out
= lookup_storage(bb
, pseudo
, STOR_OUT
);
1335 out
= alloc_storage();
1336 add_storage(out
, bb
, pseudo
, STOR_OUT
);
1338 out
->type
= REG_REG
;
1339 out
->regno
= SWITCH_REG
;
1342 static void arch_set_up_storage(struct entrypoint
*ep
)
1344 struct basic_block
*bb
;
1346 /* Argument storage etc.. */
1347 set_up_arch_entry(ep
, ep
->entry
);
1349 FOR_EACH_PTR(ep
->bbs
, bb
) {
1350 struct instruction
*insn
= last_instruction(bb
->insns
);
1353 switch (insn
->opcode
) {
1355 set_up_arch_exit(bb
, insn
);
1358 set_up_arch_switch(bb
, insn
);
1363 } END_FOR_EACH_PTR(bb
);
1366 static void output(struct entrypoint
*ep
)
1368 unsigned long generation
= ++bb_generation
;
1372 /* Set up initial inter-bb storage links */
1375 /* Architecture-specific storage rules.. */
1376 arch_set_up_storage(ep
);
1378 /* Show the results ... */
1379 output_bb(ep
->entry
->bb
, generation
);
1381 /* Clear the storage hashes for the next function.. */
1385 static int compile(struct symbol_list
*list
)
1388 FOR_EACH_PTR(list
, sym
) {
1389 struct entrypoint
*ep
;
1391 ep
= linearize_symbol(sym
);
1394 } END_FOR_EACH_PTR(sym
);
1399 int main(int argc
, char **argv
)
1401 return compile(sparse(argc
, argv
));