2 * Example of how to write a compiler with sparse
10 #include "expression.h"
11 #include "linearize.h"
15 static const char* opcodes
[] = {
16 [OP_BADOP
] = "bad_op",
19 [OP_ENTRY
] = "<entry-point>",
24 [OP_SWITCH
] = "switch",
25 [OP_INVOKE
] = "invoke",
26 [OP_COMPUTEDGOTO
] = "jmp *",
27 [OP_UNWIND
] = "unwind",
42 [OP_AND_BOOL
] = "and-bool",
43 [OP_OR_BOOL
] = "or-bool",
45 /* Binary comparison */
46 [OP_SET_EQ
] = "seteq",
47 [OP_SET_NE
] = "setne",
48 [OP_SET_LE
] = "setle",
49 [OP_SET_GE
] = "setge",
50 [OP_SET_LT
] = "setlt",
51 [OP_SET_GT
] = "setgt",
54 [OP_SET_BE
] = "setbe",
55 [OP_SET_AE
] = "setae",
61 /* Special three-input */
65 [OP_MALLOC
] = "malloc",
67 [OP_ALLOCA
] = "alloca",
71 [OP_GET_ELEMENT_PTR
] = "getelem",
75 [OP_PHISOURCE
] = "phisrc",
77 [OP_PTRCAST
] = "ptrcast",
79 [OP_VANEXT
] = "va_next",
80 [OP_VAARG
] = "va_arg",
85 [OP_DEATHNOTE
] = "dead",
88 /* Sparse tagging (line numbers, context, whatever) */
89 [OP_CONTEXT
] = "context",
94 struct pseudo_list
*contains
;
103 /* Our "switch" generation is very very stupid. */
104 #define SWITCH_REG (1)
106 static void output_bb(struct basic_block
*bb
, unsigned long generation
);
109 * We only know about the caller-clobbered registers
112 static struct hardreg hardregs
[] = {
120 #define REGNO (sizeof(hardregs)/sizeof(struct hardreg))
124 unsigned long stack_offset
;
125 struct storage_hash_list
*inputs
;
126 struct storage_hash_list
*outputs
;
127 struct storage_hash_list
*internal
;
130 int cc_opcode
, cc_dead
;
134 static struct storage_hash
*find_storage_hash(pseudo_t pseudo
, struct storage_hash_list
*list
)
136 struct storage_hash
*entry
;
137 FOR_EACH_PTR(list
, entry
) {
138 if (entry
->pseudo
== pseudo
)
140 } END_FOR_EACH_PTR(entry
);
144 static struct storage_hash
*find_or_create_hash(pseudo_t pseudo
, struct storage_hash_list
**listp
)
146 struct storage_hash
*entry
;
148 entry
= find_storage_hash(pseudo
, *listp
);
150 entry
= alloc_storage_hash(alloc_storage());
151 entry
->pseudo
= pseudo
;
152 add_ptr_list(listp
, entry
);
157 /* Eventually we should just build it up in memory */
158 static void output_line(struct bb_state
*state
, const char *fmt
, ...)
167 static void output_label(struct bb_state
*state
, const char *fmt
, ...)
169 static char buffer
[512];
173 vsnprintf(buffer
, sizeof(buffer
), fmt
, args
);
176 output_line(state
, "%s:\n", buffer
);
179 static void output_insn(struct bb_state
*state
, const char *fmt
, ...)
181 static char buffer
[512];
185 vsnprintf(buffer
, sizeof(buffer
), fmt
, args
);
188 output_line(state
, "\t%s\n", buffer
);
191 static void output_comment(struct bb_state
*state
, const char *fmt
, ...)
193 static char buffer
[512];
199 vsnprintf(buffer
, sizeof(buffer
), fmt
, args
);
202 output_line(state
, "\t# %s\n", buffer
);
205 static const char *show_memop(struct storage
*storage
)
207 static char buffer
[1000];
211 switch (storage
->type
) {
213 sprintf(buffer
, "%d(FP)", storage
->offset
);
216 sprintf(buffer
, "%d(SP)", storage
->offset
);
219 return hardregs
[storage
->regno
].name
;
221 return show_storage(storage
);
226 static void alloc_stack(struct bb_state
*state
, struct storage
*storage
)
228 storage
->type
= REG_STACK
;
229 storage
->offset
= state
->stack_offset
;
230 state
->stack_offset
+= 4;
234 * Can we re-generate the pseudo, so that we don't need to
235 * flush it to memory? We can regenerate:
236 * - immediates and symbol addresses
237 * - pseudos we got as input in non-registers
238 * - pseudos we've already saved off earlier..
240 static int can_regenerate(struct bb_state
*state
, pseudo_t pseudo
)
242 struct storage_hash
*in
;
244 switch (pseudo
->type
) {
250 in
= find_storage_hash(pseudo
, state
->inputs
);
251 if (in
&& in
->storage
->type
!= REG_REG
)
253 in
= find_storage_hash(pseudo
, state
->internal
);
260 static void flush_one_pseudo(struct bb_state
*state
, struct hardreg
*hardreg
, pseudo_t pseudo
)
262 struct storage_hash
*out
;
263 struct storage
*storage
;
265 if (can_regenerate(state
, pseudo
))
268 output_comment(state
, "flushing %s from %s", show_pseudo(pseudo
), hardreg
->name
);
269 out
= find_storage_hash(pseudo
, state
->internal
);
271 out
= find_storage_hash(pseudo
, state
->outputs
);
273 out
= find_or_create_hash(pseudo
, &state
->internal
);
275 storage
= out
->storage
;
276 switch (storage
->type
) {
279 * Aieee - the next user wants it in a register, but we
280 * need to flush it to memory in between. Which means that
281 * we need to allocate an internal one, dammit..
283 out
= find_or_create_hash(pseudo
, &state
->internal
);
284 storage
= out
->storage
;
287 alloc_stack(state
, storage
);
290 output_insn(state
, "movl %s,%s", hardreg
->name
, show_memop(storage
));
295 /* Flush a hardreg out to the storage it has.. */
296 static void flush_reg(struct bb_state
*state
, struct hardreg
*hardreg
)
305 FOR_EACH_PTR(hardreg
->contains
, pseudo
) {
306 if (CURRENT_TAG(pseudo
) & TAG_DEAD
)
308 if (!(CURRENT_TAG(pseudo
) & TAG_DIRTY
))
310 flush_one_pseudo(state
, hardreg
, pseudo
);
311 } END_FOR_EACH_PTR(pseudo
);
312 free_ptr_list(&hardreg
->contains
);
315 static struct storage_hash
*find_pseudo_storage(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
317 struct storage_hash
*src
;
319 src
= find_storage_hash(pseudo
, state
->internal
);
321 src
= find_storage_hash(pseudo
, state
->inputs
);
323 src
= find_storage_hash(pseudo
, state
->outputs
);
324 /* Undefined? Screw it! */
329 * If we found output storage, it had better be local stack
330 * that we flushed to earlier..
332 if (src
->storage
->type
!= REG_STACK
)
338 * Incoming pseudo with out any pre-set storage allocation?
339 * We can make up our own, and obviously prefer to get it
340 * in the register we already selected (if it hasn't been
343 if (src
->storage
->type
== REG_UDEF
) {
344 if (reg
&& !reg
->used
) {
345 src
->storage
->type
= REG_REG
;
346 src
->storage
->regno
= reg
- hardregs
;
348 alloc_stack(state
, src
->storage
);
353 static void mark_reg_dead(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
357 FOR_EACH_PTR(reg
->contains
, p
) {
360 if (CURRENT_TAG(p
) & TAG_DEAD
)
362 output_comment(state
, "marking pseudo %s in reg %s dead", show_pseudo(pseudo
), reg
->name
);
363 TAG_CURRENT(p
, TAG_DEAD
);
365 } END_FOR_EACH_PTR(p
);
368 static void add_pseudo_reg(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
370 output_comment(state
, "added pseudo %s to reg %s", show_pseudo(pseudo
), reg
->name
);
371 add_ptr_list_tag(®
->contains
, pseudo
, TAG_DIRTY
);
377 static struct hardreg
*preferred_reg(struct bb_state
*state
, pseudo_t target
)
379 struct storage_hash
*dst
;
381 dst
= find_storage_hash(target
, state
->outputs
);
383 struct storage
*storage
= dst
->storage
;
384 if (storage
->type
== REG_REG
)
385 return hardregs
+ storage
->regno
;
390 static struct hardreg
*empty_reg(struct bb_state
*state
)
393 struct hardreg
*reg
= hardregs
;
395 for (i
= 0; i
< REGNO
; i
++, reg
++) {
402 static struct hardreg
*target_reg(struct bb_state
*state
, pseudo_t pseudo
, pseudo_t target
)
407 /* First, see if we have a preferred target register.. */
408 reg
= preferred_reg(state
, target
);
409 if (reg
&& !reg
->busy
)
412 reg
= empty_reg(state
);
421 flush_reg(state
, reg
);
424 add_pseudo_reg(state
, pseudo
, reg
);
428 static struct hardreg
*find_in_reg(struct bb_state
*state
, pseudo_t pseudo
)
433 for (i
= 0; i
< REGNO
; i
++) {
437 FOR_EACH_PTR(reg
->contains
, p
) {
440 output_comment(state
, "found pseudo %s in reg %s", show_pseudo(pseudo
), reg
->name
);
443 } END_FOR_EACH_PTR(p
);
448 static void flush_cc_cache_to_reg(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
450 int opcode
= state
->cc_opcode
;
452 state
->cc_opcode
= 0;
453 state
->cc_target
= NULL
;
454 output_insn(state
, "%s %s", opcodes
[opcode
], reg
->name
);
457 static void flush_cc_cache(struct bb_state
*state
)
459 pseudo_t pseudo
= state
->cc_target
;
464 state
->cc_target
= NULL
;
466 if (!state
->cc_dead
) {
467 dst
= target_reg(state
, pseudo
, pseudo
);
468 flush_cc_cache_to_reg(state
, pseudo
, dst
);
473 static void add_cc_cache(struct bb_state
*state
, int opcode
, pseudo_t pseudo
)
475 assert(!state
->cc_target
);
476 state
->cc_target
= pseudo
;
477 state
->cc_opcode
= opcode
;
479 output_comment(state
, "caching %s", opcodes
[opcode
]);
482 /* Fill a hardreg with the pseudo it has */
483 static struct hardreg
*fill_reg(struct bb_state
*state
, struct hardreg
*hardreg
, pseudo_t pseudo
)
485 struct storage_hash
*src
;
486 struct instruction
*def
;
488 if (state
->cc_target
== pseudo
) {
489 flush_cc_cache_to_reg(state
, pseudo
, hardreg
);
493 switch (pseudo
->type
) {
495 output_insn(state
, "movl $%lld,%s", pseudo
->value
, hardreg
->name
);
498 output_insn(state
, "movl $<%s>,%s", show_pseudo(pseudo
), hardreg
->name
);
503 if (def
->opcode
== OP_SETVAL
) {
504 output_insn(state
, "movl $<%s>,%s", show_pseudo(def
->symbol
), hardreg
->name
);
507 src
= find_pseudo_storage(state
, pseudo
, hardreg
);
510 if (src
->flags
& TAG_DEAD
)
511 mark_reg_dead(state
, pseudo
, hardreg
);
512 output_insn(state
, "mov.%d %s,%s", 32, show_memop(src
->storage
), hardreg
->name
);
515 output_insn(state
, "reload %s from %s", hardreg
->name
, show_pseudo(pseudo
));
521 static struct hardreg
*getreg(struct bb_state
*state
, pseudo_t pseudo
, pseudo_t target
)
525 reg
= find_in_reg(state
, pseudo
);
528 reg
= target_reg(state
, pseudo
, target
);
529 return fill_reg(state
, reg
, pseudo
);
532 static struct hardreg
*copy_reg(struct bb_state
*state
, struct hardreg
*src
, pseudo_t target
)
540 reg
= preferred_reg(state
, target
);
541 if (reg
&& !reg
->busy
) {
542 output_comment(state
, "copying %s to preferred target %s", show_pseudo(target
), reg
->name
);
543 output_insn(state
, "movl %s,%s", src
->name
, reg
->name
);
547 for (i
= 0; i
< REGNO
; i
++) {
548 struct hardreg
*reg
= hardregs
+ i
;
550 output_comment(state
, "copying %s to %s", show_pseudo(target
), reg
->name
);
551 output_insn(state
, "movl %s,%s", src
->name
, reg
->name
);
556 flush_reg(state
, src
);
560 static const char *generic(struct bb_state
*state
, pseudo_t pseudo
)
563 struct storage_hash
*src
;
565 switch (pseudo
->type
) {
568 return show_pseudo(pseudo
);
570 reg
= find_in_reg(state
, pseudo
);
573 src
= find_pseudo_storage(state
, pseudo
, NULL
);
576 return show_memop(src
->storage
);
580 static const char *address(struct bb_state
*state
, struct instruction
*memop
)
583 struct hardreg
*base
;
584 static char buffer
[100];
585 pseudo_t addr
= memop
->src
;
590 if (sym
->ctype
.modifiers
& MOD_NONLOCAL
) {
591 sprintf(buffer
, "%s+%d", show_ident(sym
->ident
), memop
->offset
);
594 sprintf(buffer
, "%d+%s(SP)", memop
->offset
, show_ident(sym
->ident
));
597 base
= getreg(state
, addr
, NULL
);
598 sprintf(buffer
, "%d(%s)", memop
->offset
, base
->name
);
603 static const char *reg_or_imm(struct bb_state
*state
, pseudo_t pseudo
)
605 switch(pseudo
->type
) {
607 return show_pseudo(pseudo
);
609 return getreg(state
, pseudo
, NULL
)->name
;
613 static void kill_dead_reg(struct hardreg
*reg
)
618 FOR_EACH_PTR(reg
->contains
, p
) {
619 if (CURRENT_TAG(p
) & TAG_DEAD
) {
620 DELETE_CURRENT_PTR(p
);
624 } END_FOR_EACH_PTR(p
);
625 PACK_PTR_LIST(®
->contains
);
630 static struct hardreg
*target_copy_reg(struct bb_state
*state
, struct hardreg
*src
, pseudo_t target
)
633 return copy_reg(state
, src
, target
);
636 static void do_binop(struct bb_state
*state
, struct instruction
*insn
, pseudo_t val1
, pseudo_t val2
)
638 const char *op
= opcodes
[insn
->opcode
];
639 struct hardreg
*src
= getreg(state
, val1
, insn
->target
);
640 const char *src2
= generic(state
, val2
);
643 dst
= target_copy_reg(state
, src
, insn
->target
);
644 output_insn(state
, "%s.%d %s,%s", op
, insn
->size
, src2
, dst
->name
);
645 add_pseudo_reg(state
, insn
->target
, dst
);
648 static void generate_binop(struct bb_state
*state
, struct instruction
*insn
)
650 flush_cc_cache(state
);
651 do_binop(state
, insn
, insn
->src1
, insn
->src2
);
654 static int is_dead_reg(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
657 FOR_EACH_PTR(reg
->contains
, p
) {
659 return CURRENT_TAG(p
) & TAG_DEAD
;
660 } END_FOR_EACH_PTR(p
);
665 * Commutative binops are much more flexible, since we can switch the
666 * sources around to satisfy the target register, or to avoid having
667 * to load one of them into a register..
669 static void generate_commutative_binop(struct bb_state
*state
, struct instruction
*insn
)
672 struct hardreg
*reg1
, *reg2
;
674 flush_cc_cache(state
);
677 reg2
= find_in_reg(state
, src2
);
680 reg1
= find_in_reg(state
, src1
);
683 if (!is_dead_reg(state
, src2
, reg2
))
685 if (!is_dead_reg(state
, src1
, reg1
))
688 /* Both are dead. Is one preferrable? */
689 if (reg2
!= preferred_reg(state
, insn
->target
))
696 do_binop(state
, insn
, src1
, src2
);
700 * This marks a pseudo dead. It still stays on the hardreg list (the hardreg
701 * still has its value), but it's scheduled to be killed after the next
702 * "sequence point" when we call "kill_read_pseudos()"
704 static void mark_pseudo_dead(struct bb_state
*state
, pseudo_t pseudo
)
707 struct storage_hash
*src
;
709 if (state
->cc_target
== pseudo
)
711 src
= find_pseudo_storage(state
, pseudo
, NULL
);
713 src
->flags
|= TAG_DEAD
;
714 for (i
= 0; i
< REGNO
; i
++)
715 mark_reg_dead(state
, pseudo
, hardregs
+ i
);
718 static void kill_dead_pseudos(struct bb_state
*state
)
722 for (i
= 0; i
< REGNO
; i
++) {
723 kill_dead_reg(hardregs
+ i
);
728 * A PHI source can define a pseudo that we already
729 * have in another register. We need to invalidate the
730 * old register so that we don't end up with the same
731 * pseudo in "two places".
733 static void remove_pseudo_reg(struct bb_state
*state
, pseudo_t pseudo
)
737 output_comment(state
, "pseudo %s died", show_pseudo(pseudo
));
738 for (i
= 0; i
< REGNO
; i
++) {
739 struct hardreg
*reg
= hardregs
+ i
;
741 FOR_EACH_PTR(reg
->contains
, p
) {
744 if (CURRENT_TAG(p
) & TAG_DEAD
)
747 DELETE_CURRENT_PTR(p
);
748 output_comment(state
, "removed pseudo %s from reg %s", show_pseudo(pseudo
), reg
->name
);
749 } END_FOR_EACH_PTR(p
);
750 PACK_PTR_LIST(®
->contains
);
754 static void generate_store(struct instruction
*insn
, struct bb_state
*state
)
756 output_insn(state
, "mov.%d %s,%s", insn
->size
, reg_or_imm(state
, insn
->target
), address(state
, insn
));
759 static void generate_load(struct instruction
*insn
, struct bb_state
*state
)
761 const char *input
= address(state
, insn
);
764 kill_dead_pseudos(state
);
765 dst
= target_reg(state
, insn
->target
, NULL
);
766 output_insn(state
, "mov.%d %s,%s", insn
->size
, input
, dst
->name
);
769 static void generate_phisource(struct instruction
*insn
, struct bb_state
*state
)
771 struct instruction
*user
;
774 /* Mark all the target pseudos dead first */
775 FOR_EACH_PTR(insn
->phi_users
, user
) {
776 mark_pseudo_dead(state
, user
->target
);
777 } END_FOR_EACH_PTR(user
);
780 FOR_EACH_PTR(insn
->phi_users
, user
) {
782 reg
= getreg(state
, insn
->phi_src
, user
->target
);
783 remove_pseudo_reg(state
, user
->target
);
784 add_pseudo_reg(state
, user
->target
, reg
);
785 } END_FOR_EACH_PTR(user
);
788 static void generate_cast(struct bb_state
*state
, struct instruction
*insn
)
790 struct hardreg
*src
= getreg(state
, insn
->src
, insn
->target
);
792 unsigned int old
= insn
->orig_type
? insn
->orig_type
->bit_size
: 0;
793 unsigned int new = insn
->size
;
796 * Cast to smaller type? Ignore the high bits, we
797 * just keep both pseudos in the same register.
800 add_pseudo_reg(state
, insn
->target
, src
);
804 dst
= target_copy_reg(state
, src
, insn
->target
);
806 if (insn
->orig_type
&& (insn
->orig_type
->ctype
.modifiers
& MOD_SIGNED
)) {
807 output_insn(state
, "sext.%d.%d %s", old
, new, dst
->name
);
809 unsigned long long mask
;
810 mask
= ~(~0ULL << old
);
811 mask
&= ~(~0ULL << new);
812 output_insn(state
, "andl.%d $%#llx,%s", insn
->size
, mask
, dst
->name
);
814 add_pseudo_reg(state
, insn
->target
, dst
);
817 static void generate_output_storage(struct bb_state
*state
);
819 static void generate_branch(struct bb_state
*state
, struct instruction
*br
)
821 const char *branch
= "jXXX";
822 struct basic_block
*target
;
825 if (state
->cc_target
== br
->cond
) {
826 static const char *branches
[] = {
838 branch
= branches
[state
->cc_opcode
];
840 struct hardreg
*reg
= getreg(state
, br
->cond
, NULL
);
841 output_insn(state
, "testl %s,%s", reg
->name
, reg
->name
);
845 generate_output_storage(state
);
846 target
= br
->bb_true
;
848 output_insn(state
, "%s .L%p", branch
, target
);
849 target
= br
->bb_false
;
851 output_insn(state
, "jmp .L%p", target
);
854 /* We've made sure that there is a dummy reg live for the output */
855 static void generate_switch(struct bb_state
*state
, struct instruction
*insn
)
857 struct hardreg
*reg
= hardregs
+ SWITCH_REG
;
859 generate_output_storage(state
);
860 output_insn(state
, "switch on %s", reg
->name
);
861 output_insn(state
, "unimplemented: %s", show_instruction(insn
));
864 static void generate_ret(struct bb_state
*state
, struct instruction
*ret
)
866 if (ret
->src
&& ret
->src
!= VOID
) {
867 struct hardreg
*wants
= hardregs
+0;
868 struct hardreg
*reg
= getreg(state
, ret
->src
, NULL
);
870 output_insn(state
, "movl %s,%s", reg
->name
, wants
->name
);
872 output_insn(state
, "ret");
876 * Fake "call" linearization just as a taster..
878 static void generate_call(struct bb_state
*state
, struct instruction
*insn
)
883 FOR_EACH_PTR(insn
->arguments
, arg
) {
884 output_insn(state
, "pushl %s", generic(state
, arg
));
886 } END_FOR_EACH_PTR(arg
);
887 flush_reg(state
, hardregs
+0);
888 flush_reg(state
, hardregs
+1);
889 flush_reg(state
, hardregs
+2);
890 output_insn(state
, "call %s", show_pseudo(insn
->func
));
892 output_insn(state
, "addl $%d,%%esp", offset
);
893 add_pseudo_reg(state
, insn
->target
, hardregs
+0);
896 static void generate_select(struct bb_state
*state
, struct instruction
*insn
)
898 struct hardreg
*src1
, *src2
, *cond
, *dst
;
900 cond
= getreg(state
, insn
->src1
, NULL
);
901 output_insn(state
, "testl %s,%s", cond
->name
, cond
->name
);
903 src1
= getreg(state
, insn
->src2
, NULL
);
904 dst
= copy_reg(state
, src1
, insn
->target
);
905 add_pseudo_reg(state
, insn
->target
, dst
);
906 src2
= getreg(state
, insn
->src3
, insn
->target
);
907 output_insn(state
, "sele %s,%s", src2
->name
, dst
->name
);
910 static void generate_asm_inputs(struct bb_state
*state
, struct asm_constraint_list
*list
)
912 struct asm_constraint
*entry
;
914 FOR_EACH_PTR(list
, entry
) {
915 const char *constraint
= entry
->constraint
;
916 pseudo_t pseudo
= entry
->pseudo
;
918 output_insn(state
, "# asm input \"%s\": %s", constraint
, show_pseudo(pseudo
));
919 } END_FOR_EACH_PTR(entry
);
922 static void generate_asm_outputs(struct bb_state
*state
, struct asm_constraint_list
*list
)
924 struct asm_constraint
*entry
;
926 FOR_EACH_PTR(list
, entry
) {
927 const char *constraint
= entry
->constraint
;
928 pseudo_t pseudo
= entry
->pseudo
;
930 while (*constraint
== '=' || *constraint
== '+')
933 output_insn(state
, "# asm output \"%s\": %s", constraint
, show_pseudo(pseudo
));
934 } END_FOR_EACH_PTR(entry
);
937 static void generate_asm(struct bb_state
*state
, struct instruction
*insn
)
939 generate_asm_inputs(state
, insn
->asm_rules
->inputs
);
940 output_insn(state
, "ASM: %s", insn
->string
);
941 generate_asm_outputs(state
, insn
->asm_rules
->outputs
);
944 static void generate_compare(struct bb_state
*state
, struct instruction
*insn
)
950 flush_cc_cache(state
);
951 opcode
= insn
->opcode
;
954 * We should try to switch these around if necessary,
955 * and update the opcode to match..
957 src
= getreg(state
, insn
->src1
, insn
->target
);
958 src2
= generic(state
, insn
->src2
);
960 output_insn(state
, "cmp.%d %s,%s", insn
->size
, src2
, src
->name
);
962 add_cc_cache(state
, opcode
, insn
->target
);
965 static void generate_one_insn(struct instruction
*insn
, struct bb_state
*state
)
968 output_comment(state
, "%s", show_instruction(insn
));
970 switch (insn
->opcode
) {
972 struct symbol
*sym
= insn
->bb
->ep
->name
;
973 const char *name
= show_ident(sym
->ident
);
974 if (sym
->ctype
.modifiers
& MOD_STATIC
)
975 printf("\n\n%s:\n", name
);
977 printf("\n\n.globl %s\n%s:\n", name
, name
);
982 * OP_PHI doesn't actually generate any code. It has been
983 * done by the storage allocator and the OP_PHISOURCE.
989 generate_phisource(insn
, state
);
993 * OP_SETVAL likewise doesn't actually generate any
994 * code. On use, the "def" of the pseudo will be
1001 generate_store(insn
, state
);
1005 generate_load(insn
, state
);
1009 mark_pseudo_dead(state
, insn
->target
);
1012 case OP_ADD
: case OP_MUL
:
1013 case OP_AND
: case OP_OR
: case OP_XOR
:
1014 case OP_AND_BOOL
: case OP_OR_BOOL
:
1015 generate_commutative_binop(state
, insn
);
1018 case OP_SUB
: case OP_DIV
: case OP_MOD
:
1019 case OP_SHL
: case OP_SHR
:
1020 generate_binop(state
, insn
);
1023 case OP_BINCMP
... OP_BINCMP_END
:
1024 generate_compare(state
, insn
);
1027 case OP_CAST
: case OP_PTRCAST
:
1028 generate_cast(state
, insn
);
1032 generate_select(state
, insn
);
1036 generate_branch(state
, insn
);
1040 generate_switch(state
, insn
);
1044 generate_call(state
, insn
);
1048 generate_ret(state
, insn
);
1052 generate_asm(state
, insn
);
1056 output_insn(state
, "unimplemented: %s", show_instruction(insn
));
1059 kill_dead_pseudos(state
);
1062 #define VERY_BUSY 1000
1063 #define REG_FIXED 2000
1065 static void write_reg_to_storage(struct bb_state
*state
, struct hardreg
*reg
, pseudo_t pseudo
, struct storage
*storage
)
1068 struct hardreg
*out
;
1070 switch (storage
->type
) {
1072 out
= hardregs
+ storage
->regno
;
1075 output_insn(state
, "movl %s,%s", reg
->name
, out
->name
);
1078 if (reg
->busy
< VERY_BUSY
) {
1079 storage
->type
= REG_REG
;
1080 storage
->regno
= reg
- hardregs
;
1081 reg
->busy
= REG_FIXED
;
1085 /* Try to find a non-busy register.. */
1086 for (i
= 0; i
< REGNO
; i
++) {
1090 output_insn(state
, "movl %s,%s", reg
->name
, out
->name
);
1091 storage
->type
= REG_REG
;
1093 reg
->busy
= REG_FIXED
;
1097 /* Fall back on stack allocation ... */
1098 alloc_stack(state
, storage
);
1101 output_insn(state
, "movl %s,%s", reg
->name
, show_memop(storage
));
1106 static void write_val_to_storage(struct bb_state
*state
, pseudo_t src
, struct storage
*storage
)
1108 struct hardreg
*out
;
1110 switch (storage
->type
) {
1112 alloc_stack(state
, storage
);
1114 output_insn(state
, "movl %s,%s", show_pseudo(src
), show_memop(storage
));
1117 out
= hardregs
+ storage
->regno
;
1118 output_insn(state
, "movl %s,%s", show_pseudo(src
), out
->name
);
1122 static void fill_output(struct bb_state
*state
, pseudo_t pseudo
, struct storage
*out
)
1125 struct storage_hash
*in
;
1126 struct instruction
*def
;
1128 /* Is that pseudo a constant value? */
1129 switch (pseudo
->type
) {
1131 write_val_to_storage(state
, pseudo
, out
);
1135 if (def
->opcode
== OP_SETVAL
) {
1136 write_val_to_storage(state
, def
->symbol
, out
);
1143 /* See if we have that pseudo in a register.. */
1144 for (i
= 0; i
< REGNO
; i
++) {
1145 struct hardreg
*reg
= hardregs
+ i
;
1148 FOR_EACH_PTR(reg
->contains
, p
) {
1150 write_reg_to_storage(state
, reg
, pseudo
, out
);
1153 } END_FOR_EACH_PTR(p
);
1156 /* Do we have it in another storage? */
1157 in
= find_storage_hash(pseudo
, state
->internal
);
1159 in
= find_storage_hash(pseudo
, state
->inputs
);
1164 switch (out
->type
) {
1166 *out
= *in
->storage
;
1169 output_insn(state
, "movl %s,%s", show_memop(in
->storage
), hardregs
[out
->regno
].name
);
1172 if (out
== in
->storage
)
1174 if (out
->type
== in
->storage
->type
== out
->regno
== in
->storage
->regno
)
1176 output_insn(state
, "movl %s,%s", show_memop(in
->storage
), show_memop(out
));
1182 static int final_pseudo_flush(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
1184 struct storage_hash
*hash
;
1185 struct storage
*out
;
1186 struct hardreg
*dst
;
1189 * Since this pseudo is live at exit, we'd better have output
1192 hash
= find_storage_hash(pseudo
, state
->outputs
);
1195 out
= hash
->storage
;
1197 /* If the output is in a register, try to get it there.. */
1198 if (out
->type
== REG_REG
) {
1199 dst
= hardregs
+ out
->regno
;
1201 * Two good cases: nobody is using the right register,
1202 * or we've already set it aside for output..
1204 if (!dst
->busy
|| dst
->busy
> VERY_BUSY
)
1207 /* Aiee. Try to keep it in a register.. */
1208 dst
= empty_reg(state
);
1215 /* If the output is undefined, let's see if we can put it in a register.. */
1216 if (out
->type
== REG_UDEF
) {
1217 dst
= empty_reg(state
);
1219 out
->type
= REG_REG
;
1220 out
->regno
= dst
- hardregs
;
1223 /* Uhhuh. Not so good. No empty registers right now */
1227 /* If we know we need to flush it, just do so already .. */
1228 output_insn(state
, "movl %s,%s", reg
->name
, show_memop(out
));
1234 output_insn(state
, "movl %s,%s", reg
->name
, dst
->name
);
1235 add_pseudo_reg(state
, pseudo
, dst
);
1240 * This tries to make sure that we put all the pseudos that are
1241 * live on exit into the proper storage
1243 static void generate_output_storage(struct bb_state
*state
)
1245 struct storage_hash
*entry
;
1247 /* Go through the fixed outputs, making sure we have those regs free */
1248 FOR_EACH_PTR(state
->outputs
, entry
) {
1249 struct storage
*out
= entry
->storage
;
1250 if (out
->type
== REG_REG
) {
1251 struct hardreg
*reg
= hardregs
+ out
->regno
;
1255 reg
->busy
= REG_FIXED
;
1256 FOR_EACH_PTR(reg
->contains
, p
) {
1257 if (p
== entry
->pseudo
) {
1261 if (CURRENT_TAG(p
) & TAG_DEAD
)
1264 /* Try to write back the pseudo to where it should go ... */
1265 if (final_pseudo_flush(state
, p
, reg
)) {
1266 DELETE_CURRENT_PTR(p
);
1271 } END_FOR_EACH_PTR(p
);
1272 PACK_PTR_LIST(®
->contains
);
1274 flush_reg(state
, reg
);
1276 } END_FOR_EACH_PTR(entry
);
1278 FOR_EACH_PTR(state
->outputs
, entry
) {
1279 fill_output(state
, entry
->pseudo
, entry
->storage
);
1280 } END_FOR_EACH_PTR(entry
);
1283 static void generate(struct basic_block
*bb
, struct bb_state
*state
)
1286 struct storage_hash
*entry
;
1287 struct instruction
*insn
;
1289 for (i
= 0; i
< REGNO
; i
++) {
1290 free_ptr_list(&hardregs
[i
].contains
);
1291 hardregs
[i
].busy
= 0;
1292 hardregs
[i
].dead
= 0;
1293 hardregs
[i
].used
= 0;
1296 FOR_EACH_PTR(state
->inputs
, entry
) {
1297 struct storage
*storage
= entry
->storage
;
1298 const char *name
= show_storage(storage
);
1299 output_comment(state
, "incoming %s in %s", show_pseudo(entry
->pseudo
), name
);
1300 if (storage
->type
== REG_REG
) {
1301 int regno
= storage
->regno
;
1302 add_pseudo_reg(state
, entry
->pseudo
, hardregs
+ regno
);
1303 name
= hardregs
[regno
].name
;
1305 } END_FOR_EACH_PTR(entry
);
1307 output_label(state
, ".L%p", bb
);
1308 FOR_EACH_PTR(bb
->insns
, insn
) {
1311 generate_one_insn(insn
, state
);
1312 } END_FOR_EACH_PTR(insn
);
1315 output_comment(state
, "--- in ---");
1316 FOR_EACH_PTR(state
->inputs
, entry
) {
1317 output_comment(state
, "%s <- %s", show_pseudo(entry
->pseudo
), show_storage(entry
->storage
));
1318 } END_FOR_EACH_PTR(entry
);
1319 output_comment(state
, "--- spill ---");
1320 FOR_EACH_PTR(state
->internal
, entry
) {
1321 output_comment(state
, "%s <-> %s", show_pseudo(entry
->pseudo
), show_storage(entry
->storage
));
1322 } END_FOR_EACH_PTR(entry
);
1323 output_comment(state
, "--- out ---");
1324 FOR_EACH_PTR(state
->outputs
, entry
) {
1325 output_comment(state
, "%s -> %s", show_pseudo(entry
->pseudo
), show_storage(entry
->storage
));
1326 } END_FOR_EACH_PTR(entry
);
1331 static void generate_list(struct basic_block_list
*list
, unsigned long generation
)
1333 struct basic_block
*bb
;
1334 FOR_EACH_PTR(list
, bb
) {
1335 if (bb
->generation
== generation
)
1337 output_bb(bb
, generation
);
1338 } END_FOR_EACH_PTR(bb
);
1341 static void output_bb(struct basic_block
*bb
, unsigned long generation
)
1343 struct bb_state state
;
1345 bb
->generation
= generation
;
1347 /* Make sure all parents have been generated first */
1348 generate_list(bb
->parents
, generation
);
1350 state
.pos
= bb
->pos
;
1351 state
.inputs
= gather_storage(bb
, STOR_IN
);
1352 state
.outputs
= gather_storage(bb
, STOR_OUT
);
1353 state
.internal
= NULL
;
1354 state
.stack_offset
= 0;
1355 state
.cc_opcode
= 0;
1356 state
.cc_target
= NULL
;
1358 generate(bb
, &state
);
1360 free_ptr_list(&state
.inputs
);
1361 free_ptr_list(&state
.outputs
);
1363 /* Generate all children... */
1364 generate_list(bb
->children
, generation
);
1367 static void set_up_arch_entry(struct entrypoint
*ep
, struct instruction
*entry
)
1373 * We should set up argument sources here..
1375 * Things like "first three arguments in registers" etc
1376 * are all for this place.
1379 FOR_EACH_PTR(entry
->arg_list
, arg
) {
1380 struct storage
*in
= lookup_storage(entry
->bb
, arg
, STOR_IN
);
1382 in
= alloc_storage();
1383 add_storage(in
, entry
->bb
, arg
, STOR_IN
);
1389 in
->type
= REG_FRAME
;
1390 in
->offset
= (i
-3)*4;
1393 } END_FOR_EACH_PTR(arg
);
1397 * Set up storage information for "return"
1399 * Not strictly necessary, since the code generator will
1400 * certainly move the return value to the right register,
1401 * but it can help register allocation if the allocator
1402 * sees that the target register is going to return in %eax.
1404 static void set_up_arch_exit(struct basic_block
*bb
, struct instruction
*ret
)
1406 pseudo_t pseudo
= ret
->src
;
1408 if (pseudo
&& pseudo
!= VOID
) {
1409 struct storage
*out
= lookup_storage(bb
, pseudo
, STOR_OUT
);
1411 out
= alloc_storage();
1412 add_storage(out
, bb
, pseudo
, STOR_OUT
);
1414 out
->type
= REG_REG
;
1420 * Set up dummy/silly output storage information for a switch
1421 * instruction. We need to make sure that a register is available
1422 * when we generate code for switch, so force that by creating
1423 * a dummy output rule.
1425 static void set_up_arch_switch(struct basic_block
*bb
, struct instruction
*insn
)
1427 pseudo_t pseudo
= insn
->cond
;
1428 struct storage
*out
= lookup_storage(bb
, pseudo
, STOR_OUT
);
1430 out
= alloc_storage();
1431 add_storage(out
, bb
, pseudo
, STOR_OUT
);
1433 out
->type
= REG_REG
;
1434 out
->regno
= SWITCH_REG
;
1437 static void arch_set_up_storage(struct entrypoint
*ep
)
1439 struct basic_block
*bb
;
1441 /* Argument storage etc.. */
1442 set_up_arch_entry(ep
, ep
->entry
);
1444 FOR_EACH_PTR(ep
->bbs
, bb
) {
1445 struct instruction
*insn
= last_instruction(bb
->insns
);
1448 switch (insn
->opcode
) {
1450 set_up_arch_exit(bb
, insn
);
1453 set_up_arch_switch(bb
, insn
);
1458 } END_FOR_EACH_PTR(bb
);
1461 static void output(struct entrypoint
*ep
)
1463 unsigned long generation
= ++bb_generation
;
1467 /* Set up initial inter-bb storage links */
1470 /* Architecture-specific storage rules.. */
1471 arch_set_up_storage(ep
);
1473 /* Show the results ... */
1474 output_bb(ep
->entry
->bb
, generation
);
1476 /* Clear the storage hashes for the next function.. */
1480 static int compile(struct symbol_list
*list
)
1483 FOR_EACH_PTR(list
, sym
) {
1484 struct entrypoint
*ep
;
1486 ep
= linearize_symbol(sym
);
1489 } END_FOR_EACH_PTR(sym
);
1494 int main(int argc
, char **argv
)
1496 return compile(sparse(argc
, argv
));