2 * Example of how to write a compiler with sparse
11 #include "expression.h"
12 #include "linearize.h"
16 static const char* opcodes
[] = {
17 [OP_BADOP
] = "bad_op",
20 [OP_ENTRY
] = "<entry-point>",
25 [OP_SWITCH
] = "switch",
26 [OP_INVOKE
] = "invoke",
27 [OP_COMPUTEDGOTO
] = "jmp *",
28 [OP_UNWIND
] = "unwind",
43 [OP_AND_BOOL
] = "and-bool",
44 [OP_OR_BOOL
] = "or-bool",
46 /* Binary comparison */
47 [OP_SET_EQ
] = "seteq",
48 [OP_SET_NE
] = "setne",
49 [OP_SET_LE
] = "setle",
50 [OP_SET_GE
] = "setge",
51 [OP_SET_LT
] = "setlt",
52 [OP_SET_GT
] = "setgt",
55 [OP_SET_BE
] = "setbe",
56 [OP_SET_AE
] = "setae",
62 /* Special three-input */
66 [OP_MALLOC
] = "malloc",
68 [OP_ALLOCA
] = "alloca",
72 [OP_GET_ELEMENT_PTR
] = "getelem",
76 [OP_PHISOURCE
] = "phisrc",
78 [OP_PTRCAST
] = "ptrcast",
80 [OP_VANEXT
] = "va_next",
81 [OP_VAARG
] = "va_arg",
86 [OP_DEATHNOTE
] = "dead",
89 /* Sparse tagging (line numbers, context, whatever) */
90 [OP_CONTEXT
] = "context",
93 static int last_reg
, stack_offset
;
97 struct pseudo_list
*contains
;
106 /* Our "switch" generation is very very stupid. */
107 #define SWITCH_REG (1)
109 static void output_bb(struct basic_block
*bb
, unsigned long generation
);
112 * We only know about the caller-clobbered registers
115 static struct hardreg hardregs
[] = {
123 #define REGNO (sizeof(hardregs)/sizeof(struct hardreg))
127 struct storage_hash_list
*inputs
;
128 struct storage_hash_list
*outputs
;
129 struct storage_hash_list
*internal
;
132 int cc_opcode
, cc_dead
;
136 static struct storage_hash
*find_storage_hash(pseudo_t pseudo
, struct storage_hash_list
*list
)
138 struct storage_hash
*entry
;
139 FOR_EACH_PTR(list
, entry
) {
140 if (entry
->pseudo
== pseudo
)
142 } END_FOR_EACH_PTR(entry
);
146 static struct storage_hash
*find_or_create_hash(pseudo_t pseudo
, struct storage_hash_list
**listp
)
148 struct storage_hash
*entry
;
150 entry
= find_storage_hash(pseudo
, *listp
);
152 entry
= alloc_storage_hash(alloc_storage());
153 entry
->pseudo
= pseudo
;
154 add_ptr_list(listp
, entry
);
159 /* Eventually we should just build it up in memory */
160 static void output_line(struct bb_state
*state
, const char *fmt
, ...)
169 static void output_label(struct bb_state
*state
, const char *fmt
, ...)
171 static char buffer
[512];
175 vsnprintf(buffer
, sizeof(buffer
), fmt
, args
);
178 output_line(state
, "%s:\n", buffer
);
181 static void output_insn(struct bb_state
*state
, const char *fmt
, ...)
183 static char buffer
[512];
187 vsnprintf(buffer
, sizeof(buffer
), fmt
, args
);
190 output_line(state
, "\t%s\n", buffer
);
193 static void output_comment(struct bb_state
*state
, const char *fmt
, ...)
195 static char buffer
[512];
201 vsnprintf(buffer
, sizeof(buffer
), fmt
, args
);
204 output_line(state
, "\t# %s\n", buffer
);
207 static const char *show_memop(struct storage
*storage
)
209 static char buffer
[1000];
213 switch (storage
->type
) {
215 sprintf(buffer
, "%d(FP)", storage
->offset
);
218 sprintf(buffer
, "%d(SP)", storage
->offset
);
221 return hardregs
[storage
->regno
].name
;
223 return show_storage(storage
);
228 static void alloc_stack(struct bb_state
*state
, struct storage
*storage
)
230 storage
->type
= REG_STACK
;
231 storage
->offset
= stack_offset
;
236 * Can we re-generate the pseudo, so that we don't need to
237 * flush it to memory? We can regenerate:
238 * - immediates and symbol addresses
239 * - pseudos we got as input in non-registers
240 * - pseudos we've already saved off earlier..
242 static int can_regenerate(struct bb_state
*state
, pseudo_t pseudo
)
244 struct storage_hash
*in
;
246 switch (pseudo
->type
) {
252 in
= find_storage_hash(pseudo
, state
->inputs
);
253 if (in
&& in
->storage
->type
!= REG_REG
)
255 in
= find_storage_hash(pseudo
, state
->internal
);
262 static void flush_one_pseudo(struct bb_state
*state
, struct hardreg
*hardreg
, pseudo_t pseudo
)
264 struct storage_hash
*out
;
265 struct storage
*storage
;
267 if (can_regenerate(state
, pseudo
))
270 output_comment(state
, "flushing %s from %s", show_pseudo(pseudo
), hardreg
->name
);
271 out
= find_storage_hash(pseudo
, state
->internal
);
273 out
= find_storage_hash(pseudo
, state
->outputs
);
275 out
= find_or_create_hash(pseudo
, &state
->internal
);
277 storage
= out
->storage
;
278 switch (storage
->type
) {
281 * Aieee - the next user wants it in a register, but we
282 * need to flush it to memory in between. Which means that
283 * we need to allocate an internal one, dammit..
285 out
= find_or_create_hash(pseudo
, &state
->internal
);
286 storage
= out
->storage
;
289 alloc_stack(state
, storage
);
292 output_insn(state
, "movl %s,%s", hardreg
->name
, show_memop(storage
));
297 /* Flush a hardreg out to the storage it has.. */
298 static void flush_reg(struct bb_state
*state
, struct hardreg
*hardreg
)
307 FOR_EACH_PTR(hardreg
->contains
, pseudo
) {
308 if (CURRENT_TAG(pseudo
) & TAG_DEAD
)
310 if (!(CURRENT_TAG(pseudo
) & TAG_DIRTY
))
312 flush_one_pseudo(state
, hardreg
, pseudo
);
313 } END_FOR_EACH_PTR(pseudo
);
314 free_ptr_list(&hardreg
->contains
);
317 static struct storage_hash
*find_pseudo_storage(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
319 struct storage_hash
*src
;
321 src
= find_storage_hash(pseudo
, state
->internal
);
323 src
= find_storage_hash(pseudo
, state
->inputs
);
325 src
= find_storage_hash(pseudo
, state
->outputs
);
326 /* Undefined? Screw it! */
331 * If we found output storage, it had better be local stack
332 * that we flushed to earlier..
334 if (src
->storage
->type
!= REG_STACK
)
340 * Incoming pseudo with out any pre-set storage allocation?
341 * We can make up our own, and obviously prefer to get it
342 * in the register we already selected (if it hasn't been
345 if (src
->storage
->type
== REG_UDEF
) {
346 if (reg
&& !reg
->used
) {
347 src
->storage
->type
= REG_REG
;
348 src
->storage
->regno
= reg
- hardregs
;
351 alloc_stack(state
, src
->storage
);
356 static void mark_reg_dead(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
360 FOR_EACH_PTR(reg
->contains
, p
) {
363 if (CURRENT_TAG(p
) & TAG_DEAD
)
365 output_comment(state
, "marking pseudo %s in reg %s dead", show_pseudo(pseudo
), reg
->name
);
366 TAG_CURRENT(p
, TAG_DEAD
);
368 } END_FOR_EACH_PTR(p
);
371 static void add_pseudo_reg(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
373 output_comment(state
, "added pseudo %s to reg %s", show_pseudo(pseudo
), reg
->name
);
374 add_ptr_list_tag(®
->contains
, pseudo
, TAG_DIRTY
);
378 static struct hardreg
*preferred_reg(struct bb_state
*state
, pseudo_t target
)
380 struct storage_hash
*dst
;
382 dst
= find_storage_hash(target
, state
->outputs
);
384 struct storage
*storage
= dst
->storage
;
385 if (storage
->type
== REG_REG
)
386 return hardregs
+ storage
->regno
;
391 static struct hardreg
*empty_reg(struct bb_state
*state
)
394 struct hardreg
*reg
= hardregs
;
396 for (i
= 0; i
< REGNO
; i
++, reg
++) {
403 static struct hardreg
*target_reg(struct bb_state
*state
, pseudo_t pseudo
, pseudo_t target
)
408 /* First, see if we have a preferred target register.. */
409 reg
= preferred_reg(state
, target
);
410 if (reg
&& !reg
->busy
)
413 reg
= empty_reg(state
);
422 flush_reg(state
, reg
);
425 add_pseudo_reg(state
, pseudo
, reg
);
429 static struct hardreg
*find_in_reg(struct bb_state
*state
, pseudo_t pseudo
)
434 for (i
= 0; i
< REGNO
; i
++) {
438 FOR_EACH_PTR(reg
->contains
, p
) {
441 output_comment(state
, "found pseudo %s in reg %s", show_pseudo(pseudo
), reg
->name
);
444 } END_FOR_EACH_PTR(p
);
449 static void flush_cc_cache_to_reg(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
451 int opcode
= state
->cc_opcode
;
453 state
->cc_opcode
= 0;
454 state
->cc_target
= NULL
;
455 output_insn(state
, "%s %s", opcodes
[opcode
], reg
->name
);
458 static void flush_cc_cache(struct bb_state
*state
)
460 pseudo_t pseudo
= state
->cc_target
;
465 state
->cc_target
= NULL
;
467 if (!state
->cc_dead
) {
468 dst
= target_reg(state
, pseudo
, pseudo
);
469 flush_cc_cache_to_reg(state
, pseudo
, dst
);
474 static void add_cc_cache(struct bb_state
*state
, int opcode
, pseudo_t pseudo
)
476 assert(!state
->cc_target
);
477 state
->cc_target
= pseudo
;
478 state
->cc_opcode
= opcode
;
480 output_comment(state
, "caching %s", opcodes
[opcode
]);
483 /* Fill a hardreg with the pseudo it has */
484 static struct hardreg
*fill_reg(struct bb_state
*state
, struct hardreg
*hardreg
, pseudo_t pseudo
)
486 struct storage_hash
*src
;
487 struct instruction
*def
;
489 if (state
->cc_target
== pseudo
) {
490 flush_cc_cache_to_reg(state
, pseudo
, hardreg
);
494 switch (pseudo
->type
) {
496 output_insn(state
, "movl $%lld,%s", pseudo
->value
, hardreg
->name
);
499 output_insn(state
, "movl $<%s>,%s", show_pseudo(pseudo
), hardreg
->name
);
504 if (def
->opcode
== OP_SETVAL
) {
505 output_insn(state
, "movl $<%s>,%s", show_pseudo(def
->target
), hardreg
->name
);
508 src
= find_pseudo_storage(state
, pseudo
, hardreg
);
511 if (src
->flags
& TAG_DEAD
)
512 mark_reg_dead(state
, pseudo
, hardreg
);
513 output_insn(state
, "mov.%d %s,%s", 32, show_memop(src
->storage
), hardreg
->name
);
516 output_insn(state
, "reload %s from %s", hardreg
->name
, show_pseudo(pseudo
));
522 static struct hardreg
*getreg(struct bb_state
*state
, pseudo_t pseudo
, pseudo_t target
)
526 reg
= find_in_reg(state
, pseudo
);
529 reg
= target_reg(state
, pseudo
, target
);
530 return fill_reg(state
, reg
, pseudo
);
533 static void move_reg(struct bb_state
*state
, struct hardreg
*src
, struct hardreg
*dst
)
535 output_insn(state
, "movl %s,%s", src
->name
, dst
->name
);
538 static struct hardreg
*copy_reg(struct bb_state
*state
, struct hardreg
*src
, pseudo_t target
)
546 reg
= preferred_reg(state
, target
);
547 if (reg
&& !reg
->busy
) {
548 output_comment(state
, "copying %s to preferred target %s", show_pseudo(target
), reg
->name
);
549 move_reg(state
, src
, reg
);
553 for (i
= 0; i
< REGNO
; i
++) {
554 struct hardreg
*reg
= hardregs
+ i
;
556 output_comment(state
, "copying %s to %s", show_pseudo(target
), reg
->name
);
557 output_insn(state
, "movl %s,%s", src
->name
, reg
->name
);
562 flush_reg(state
, src
);
566 static const char *generic(struct bb_state
*state
, pseudo_t pseudo
)
569 struct storage_hash
*src
;
571 switch (pseudo
->type
) {
574 return show_pseudo(pseudo
);
576 reg
= find_in_reg(state
, pseudo
);
579 src
= find_pseudo_storage(state
, pseudo
, NULL
);
582 return show_memop(src
->storage
);
586 static const char *address(struct bb_state
*state
, struct instruction
*memop
)
589 struct hardreg
*base
;
590 static char buffer
[100];
591 pseudo_t addr
= memop
->src
;
596 if (sym
->ctype
.modifiers
& MOD_NONLOCAL
) {
597 sprintf(buffer
, "%s+%d", show_ident(sym
->ident
), memop
->offset
);
600 sprintf(buffer
, "%d+%s(SP)", memop
->offset
, show_ident(sym
->ident
));
603 base
= getreg(state
, addr
, NULL
);
604 sprintf(buffer
, "%d(%s)", memop
->offset
, base
->name
);
609 static const char *reg_or_imm(struct bb_state
*state
, pseudo_t pseudo
)
611 switch(pseudo
->type
) {
613 return show_pseudo(pseudo
);
615 return getreg(state
, pseudo
, NULL
)->name
;
619 static void kill_dead_reg(struct hardreg
*reg
)
624 FOR_EACH_PTR(reg
->contains
, p
) {
625 if (CURRENT_TAG(p
) & TAG_DEAD
) {
626 DELETE_CURRENT_PTR(p
);
630 } END_FOR_EACH_PTR(p
);
631 PACK_PTR_LIST(®
->contains
);
636 static struct hardreg
*target_copy_reg(struct bb_state
*state
, struct hardreg
*src
, pseudo_t target
)
639 return copy_reg(state
, src
, target
);
642 static void do_binop(struct bb_state
*state
, struct instruction
*insn
, pseudo_t val1
, pseudo_t val2
)
644 const char *op
= opcodes
[insn
->opcode
];
645 struct hardreg
*src
= getreg(state
, val1
, insn
->target
);
646 const char *src2
= generic(state
, val2
);
649 dst
= target_copy_reg(state
, src
, insn
->target
);
650 output_insn(state
, "%s.%d %s,%s", op
, insn
->size
, src2
, dst
->name
);
651 add_pseudo_reg(state
, insn
->target
, dst
);
654 static void generate_binop(struct bb_state
*state
, struct instruction
*insn
)
656 flush_cc_cache(state
);
657 do_binop(state
, insn
, insn
->src1
, insn
->src2
);
660 static int is_dead_reg(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
663 FOR_EACH_PTR(reg
->contains
, p
) {
665 return CURRENT_TAG(p
) & TAG_DEAD
;
666 } END_FOR_EACH_PTR(p
);
671 * Commutative binops are much more flexible, since we can switch the
672 * sources around to satisfy the target register, or to avoid having
673 * to load one of them into a register..
675 static void generate_commutative_binop(struct bb_state
*state
, struct instruction
*insn
)
678 struct hardreg
*reg1
, *reg2
;
680 flush_cc_cache(state
);
683 reg2
= find_in_reg(state
, src2
);
686 reg1
= find_in_reg(state
, src1
);
689 if (!is_dead_reg(state
, src2
, reg2
))
691 if (!is_dead_reg(state
, src1
, reg1
))
694 /* Both are dead. Is one preferrable? */
695 if (reg2
!= preferred_reg(state
, insn
->target
))
702 do_binop(state
, insn
, src1
, src2
);
706 * This marks a pseudo dead. It still stays on the hardreg list (the hardreg
707 * still has its value), but it's scheduled to be killed after the next
708 * "sequence point" when we call "kill_read_pseudos()"
710 static void mark_pseudo_dead(struct bb_state
*state
, pseudo_t pseudo
)
713 struct storage_hash
*src
;
715 if (state
->cc_target
== pseudo
)
717 src
= find_pseudo_storage(state
, pseudo
, NULL
);
719 src
->flags
|= TAG_DEAD
;
720 for (i
= 0; i
< REGNO
; i
++)
721 mark_reg_dead(state
, pseudo
, hardregs
+ i
);
724 static void kill_dead_pseudos(struct bb_state
*state
)
728 for (i
= 0; i
< REGNO
; i
++) {
729 kill_dead_reg(hardregs
+ i
);
734 * A PHI source can define a pseudo that we already
735 * have in another register. We need to invalidate the
736 * old register so that we don't end up with the same
737 * pseudo in "two places".
739 static void remove_pseudo_reg(struct bb_state
*state
, pseudo_t pseudo
)
743 output_comment(state
, "pseudo %s died", show_pseudo(pseudo
));
744 for (i
= 0; i
< REGNO
; i
++) {
745 struct hardreg
*reg
= hardregs
+ i
;
747 FOR_EACH_PTR(reg
->contains
, p
) {
750 if (CURRENT_TAG(p
) & TAG_DEAD
)
753 DELETE_CURRENT_PTR(p
);
754 output_comment(state
, "removed pseudo %s from reg %s", show_pseudo(pseudo
), reg
->name
);
755 } END_FOR_EACH_PTR(p
);
756 PACK_PTR_LIST(®
->contains
);
760 static void generate_store(struct instruction
*insn
, struct bb_state
*state
)
762 output_insn(state
, "mov.%d %s,%s", insn
->size
, reg_or_imm(state
, insn
->target
), address(state
, insn
));
765 static void generate_load(struct instruction
*insn
, struct bb_state
*state
)
767 const char *input
= address(state
, insn
);
770 kill_dead_pseudos(state
);
771 dst
= target_reg(state
, insn
->target
, NULL
);
772 output_insn(state
, "mov.%d %s,%s", insn
->size
, input
, dst
->name
);
775 static void generate_phisource(struct instruction
*insn
, struct bb_state
*state
)
777 struct instruction
*user
;
780 /* Mark all the target pseudos dead first */
781 FOR_EACH_PTR(insn
->phi_users
, user
) {
782 mark_pseudo_dead(state
, user
->target
);
783 } END_FOR_EACH_PTR(user
);
786 FOR_EACH_PTR(insn
->phi_users
, user
) {
788 reg
= getreg(state
, insn
->phi_src
, user
->target
);
789 remove_pseudo_reg(state
, user
->target
);
790 add_pseudo_reg(state
, user
->target
, reg
);
791 } END_FOR_EACH_PTR(user
);
794 static void generate_cast(struct bb_state
*state
, struct instruction
*insn
)
796 struct hardreg
*src
= getreg(state
, insn
->src
, insn
->target
);
798 unsigned int old
= insn
->orig_type
? insn
->orig_type
->bit_size
: 0;
799 unsigned int new = insn
->size
;
802 * Cast to smaller type? Ignore the high bits, we
803 * just keep both pseudos in the same register.
806 add_pseudo_reg(state
, insn
->target
, src
);
810 dst
= target_copy_reg(state
, src
, insn
->target
);
812 if (insn
->orig_type
&& (insn
->orig_type
->ctype
.modifiers
& MOD_SIGNED
)) {
813 output_insn(state
, "sext.%d.%d %s", old
, new, dst
->name
);
815 unsigned long long mask
;
816 mask
= ~(~0ULL << old
);
817 mask
&= ~(~0ULL << new);
818 output_insn(state
, "andl.%d $%#llx,%s", insn
->size
, mask
, dst
->name
);
820 add_pseudo_reg(state
, insn
->target
, dst
);
823 static void generate_output_storage(struct bb_state
*state
);
825 static const char *conditional
[] = {
839 static void generate_branch(struct bb_state
*state
, struct instruction
*br
)
841 const char *cond
= "XXX";
842 struct basic_block
*target
;
845 if (state
->cc_target
== br
->cond
) {
846 cond
= conditional
[state
->cc_opcode
];
848 struct hardreg
*reg
= getreg(state
, br
->cond
, NULL
);
849 output_insn(state
, "testl %s,%s", reg
->name
, reg
->name
);
853 generate_output_storage(state
);
854 target
= br
->bb_true
;
856 output_insn(state
, "j%s .L%p", cond
, target
);
857 target
= br
->bb_false
;
859 output_insn(state
, "jmp .L%p", target
);
862 /* We've made sure that there is a dummy reg live for the output */
863 static void generate_switch(struct bb_state
*state
, struct instruction
*insn
)
865 struct hardreg
*reg
= hardregs
+ SWITCH_REG
;
867 generate_output_storage(state
);
868 output_insn(state
, "switch on %s", reg
->name
);
869 output_insn(state
, "unimplemented: %s", show_instruction(insn
));
872 static void generate_ret(struct bb_state
*state
, struct instruction
*ret
)
874 if (ret
->src
&& ret
->src
!= VOID
) {
875 struct hardreg
*wants
= hardregs
+0;
876 struct hardreg
*reg
= getreg(state
, ret
->src
, NULL
);
878 output_insn(state
, "movl %s,%s", reg
->name
, wants
->name
);
880 output_insn(state
, "ret");
884 * Fake "call" linearization just as a taster..
886 static void generate_call(struct bb_state
*state
, struct instruction
*insn
)
891 FOR_EACH_PTR(insn
->arguments
, arg
) {
892 output_insn(state
, "pushl %s", generic(state
, arg
));
894 } END_FOR_EACH_PTR(arg
);
895 flush_reg(state
, hardregs
+0);
896 flush_reg(state
, hardregs
+1);
897 flush_reg(state
, hardregs
+2);
898 output_insn(state
, "call %s", show_pseudo(insn
->func
));
900 output_insn(state
, "addl $%d,%%esp", offset
);
901 add_pseudo_reg(state
, insn
->target
, hardregs
+0);
904 static void generate_select(struct bb_state
*state
, struct instruction
*insn
)
907 struct hardreg
*src1
, *src2
, *dst
;
909 src1
= getreg(state
, insn
->src2
, NULL
);
910 dst
= copy_reg(state
, src1
, insn
->target
);
911 add_pseudo_reg(state
, insn
->target
, dst
);
912 src2
= getreg(state
, insn
->src3
, insn
->target
);
914 if (state
->cc_target
== insn
->src1
) {
915 cond
= conditional
[state
->cc_opcode
];
917 struct hardreg
*reg
= getreg(state
, insn
->src1
, NULL
);
918 output_insn(state
, "testl %s,%s", reg
->name
, reg
->name
);
922 output_insn(state
, "sel%s %s,%s", cond
, src2
->name
, dst
->name
);
926 const struct ident
*name
;
932 static void replace_asm_arg(char **dst_p
, struct asm_arg
*arg
)
935 int len
= strlen(arg
->value
);
937 memcpy(dst
, arg
->value
, len
);
941 static void replace_asm_percent(const char **src_p
, char **dst_p
, struct asm_arg
*args
, int nr
)
943 const char *src
= *src_p
;
952 replace_asm_arg(dst_p
, args
+index
);
959 static void replace_asm_named(const char **src_p
, char **dst_p
, struct asm_arg
*args
, int nr
)
961 const char *src
= *src_p
;
962 const char *end
= src
;
972 for (i
= 0; i
< nr
; i
++) {
973 const struct ident
*ident
= args
[i
].name
;
978 if (memcmp(src
, ident
->name
, len
))
980 replace_asm_arg(dst_p
, args
+i
);
987 static const char *replace_asm_args(const char *str
, struct asm_arg
*args
, int nr
)
989 static char buffer
[1000];
1005 replace_asm_percent(&str
, &p
, args
, nr
);
1008 replace_asm_named(&str
, &p
, args
, nr
);
1017 #define MAX_ASM_ARG (50)
1018 static struct asm_arg asm_arguments
[MAX_ASM_ARG
];
1020 static struct asm_arg
*generate_asm_inputs(struct bb_state
*state
, struct asm_constraint_list
*list
, struct asm_arg
*arg
)
1022 struct asm_constraint
*entry
;
1024 FOR_EACH_PTR(list
, entry
) {
1025 const char *constraint
= entry
->constraint
;
1026 pseudo_t pseudo
= entry
->pseudo
;
1027 struct hardreg
*reg
, *orig
;
1032 switch (*constraint
) {
1034 string
= getreg(state
, pseudo
, NULL
)->name
;
1037 index
= *constraint
- '0';
1038 reg
= asm_arguments
[index
].reg
;
1039 orig
= find_in_reg(state
, pseudo
);
1041 move_reg(state
, orig
, reg
);
1043 fill_reg(state
, reg
, pseudo
);
1047 string
= generic(state
, pseudo
);
1051 output_insn(state
, "# asm input \"%s\": %s : %s", constraint
, show_pseudo(pseudo
), string
);
1053 arg
->name
= entry
->ident
;
1054 arg
->value
= string
;
1058 } END_FOR_EACH_PTR(entry
);
1062 static struct asm_arg
*generate_asm_outputs(struct bb_state
*state
, struct asm_constraint_list
*list
, struct asm_arg
*arg
)
1064 struct asm_constraint
*entry
;
1066 FOR_EACH_PTR(list
, entry
) {
1067 const char *constraint
= entry
->constraint
;
1068 pseudo_t pseudo
= entry
->pseudo
;
1069 struct hardreg
*reg
;
1072 while (*constraint
== '=' || *constraint
== '+')
1076 switch (*constraint
) {
1079 reg
= target_reg(state
, pseudo
, NULL
);
1080 arg
->pseudo
= pseudo
;
1086 output_insn(state
, "# asm output \"%s\": %s : %s", constraint
, show_pseudo(pseudo
), string
);
1088 arg
->name
= entry
->ident
;
1089 arg
->value
= string
;
1091 } END_FOR_EACH_PTR(entry
);
1095 static void generate_asm(struct bb_state
*state
, struct instruction
*insn
)
1097 const char *str
= insn
->string
;
1099 if (insn
->asm_rules
->outputs
|| insn
->asm_rules
->inputs
) {
1100 struct asm_arg
*arg
;
1102 arg
= generate_asm_outputs(state
, insn
->asm_rules
->outputs
, asm_arguments
);
1103 arg
= generate_asm_inputs(state
, insn
->asm_rules
->inputs
, arg
);
1104 str
= replace_asm_args(str
, asm_arguments
, arg
- asm_arguments
);
1106 output_insn(state
, "%s", str
);
1109 static void generate_compare(struct bb_state
*state
, struct instruction
*insn
)
1111 struct hardreg
*src
;
1115 flush_cc_cache(state
);
1116 opcode
= insn
->opcode
;
1119 * We should try to switch these around if necessary,
1120 * and update the opcode to match..
1122 src
= getreg(state
, insn
->src1
, insn
->target
);
1123 src2
= generic(state
, insn
->src2
);
1125 output_insn(state
, "cmp.%d %s,%s", insn
->size
, src2
, src
->name
);
1127 add_cc_cache(state
, opcode
, insn
->target
);
1130 static void generate_one_insn(struct instruction
*insn
, struct bb_state
*state
)
1133 output_comment(state
, "%s", show_instruction(insn
));
1135 switch (insn
->opcode
) {
1137 struct symbol
*sym
= insn
->bb
->ep
->name
;
1138 const char *name
= show_ident(sym
->ident
);
1139 if (sym
->ctype
.modifiers
& MOD_STATIC
)
1140 printf("\n\n%s:\n", name
);
1142 printf("\n\n.globl %s\n%s:\n", name
, name
);
1147 * OP_PHI doesn't actually generate any code. It has been
1148 * done by the storage allocator and the OP_PHISOURCE.
1154 generate_phisource(insn
, state
);
1158 * OP_SETVAL likewise doesn't actually generate any
1159 * code. On use, the "def" of the pseudo will be
1166 generate_store(insn
, state
);
1170 generate_load(insn
, state
);
1174 mark_pseudo_dead(state
, insn
->target
);
1177 case OP_ADD
: case OP_MUL
:
1178 case OP_AND
: case OP_OR
: case OP_XOR
:
1179 case OP_AND_BOOL
: case OP_OR_BOOL
:
1180 generate_commutative_binop(state
, insn
);
1183 case OP_SUB
: case OP_DIV
: case OP_MOD
:
1184 case OP_SHL
: case OP_SHR
:
1185 generate_binop(state
, insn
);
1188 case OP_BINCMP
... OP_BINCMP_END
:
1189 generate_compare(state
, insn
);
1192 case OP_CAST
: case OP_PTRCAST
:
1193 generate_cast(state
, insn
);
1197 generate_select(state
, insn
);
1201 generate_branch(state
, insn
);
1205 generate_switch(state
, insn
);
1209 generate_call(state
, insn
);
1213 generate_ret(state
, insn
);
1217 generate_asm(state
, insn
);
1221 output_insn(state
, "unimplemented: %s", show_instruction(insn
));
1224 kill_dead_pseudos(state
);
1227 #define VERY_BUSY 1000
1228 #define REG_FIXED 2000
1230 static void write_reg_to_storage(struct bb_state
*state
, struct hardreg
*reg
, pseudo_t pseudo
, struct storage
*storage
)
1233 struct hardreg
*out
;
1235 switch (storage
->type
) {
1237 out
= hardregs
+ storage
->regno
;
1240 output_insn(state
, "movl %s,%s", reg
->name
, out
->name
);
1243 if (reg
->busy
< VERY_BUSY
) {
1244 storage
->type
= REG_REG
;
1245 storage
->regno
= reg
- hardregs
;
1246 reg
->busy
= REG_FIXED
;
1250 /* Try to find a non-busy register.. */
1251 for (i
= 0; i
< REGNO
; i
++) {
1255 output_insn(state
, "movl %s,%s", reg
->name
, out
->name
);
1256 storage
->type
= REG_REG
;
1258 out
->busy
= REG_FIXED
;
1262 /* Fall back on stack allocation ... */
1263 alloc_stack(state
, storage
);
1266 output_insn(state
, "movl %s,%s", reg
->name
, show_memop(storage
));
1271 static void write_val_to_storage(struct bb_state
*state
, pseudo_t src
, struct storage
*storage
)
1273 struct hardreg
*out
;
1275 switch (storage
->type
) {
1277 alloc_stack(state
, storage
);
1279 output_insn(state
, "movl %s,%s", show_pseudo(src
), show_memop(storage
));
1282 out
= hardregs
+ storage
->regno
;
1283 output_insn(state
, "movl %s,%s", show_pseudo(src
), out
->name
);
1287 static void fill_output(struct bb_state
*state
, pseudo_t pseudo
, struct storage
*out
)
1290 struct storage_hash
*in
;
1291 struct instruction
*def
;
1293 /* Is that pseudo a constant value? */
1294 switch (pseudo
->type
) {
1296 write_val_to_storage(state
, pseudo
, out
);
1300 if (def
->opcode
== OP_SETVAL
) {
1301 write_val_to_storage(state
, pseudo
, out
);
1308 /* See if we have that pseudo in a register.. */
1309 for (i
= 0; i
< REGNO
; i
++) {
1310 struct hardreg
*reg
= hardregs
+ i
;
1313 FOR_EACH_PTR(reg
->contains
, p
) {
1315 write_reg_to_storage(state
, reg
, pseudo
, out
);
1318 } END_FOR_EACH_PTR(p
);
1321 /* Do we have it in another storage? */
1322 in
= find_storage_hash(pseudo
, state
->internal
);
1324 in
= find_storage_hash(pseudo
, state
->inputs
);
1329 switch (out
->type
) {
1331 *out
= *in
->storage
;
1334 output_insn(state
, "movl %s,%s", show_memop(in
->storage
), hardregs
[out
->regno
].name
);
1337 if (out
== in
->storage
)
1339 if (out
->type
== in
->storage
->type
== out
->regno
== in
->storage
->regno
)
1341 output_insn(state
, "movl %s,%s", show_memop(in
->storage
), show_memop(out
));
1347 static int final_pseudo_flush(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
1349 struct storage_hash
*hash
;
1350 struct storage
*out
;
1351 struct hardreg
*dst
;
1354 * Since this pseudo is live at exit, we'd better have output
1357 hash
= find_storage_hash(pseudo
, state
->outputs
);
1360 out
= hash
->storage
;
1362 /* If the output is in a register, try to get it there.. */
1363 if (out
->type
== REG_REG
) {
1364 dst
= hardregs
+ out
->regno
;
1366 * Two good cases: nobody is using the right register,
1367 * or we've already set it aside for output..
1369 if (!dst
->busy
|| dst
->busy
> VERY_BUSY
)
1372 /* Aiee. Try to keep it in a register.. */
1373 dst
= empty_reg(state
);
1380 /* If the output is undefined, let's see if we can put it in a register.. */
1381 if (out
->type
== REG_UDEF
) {
1382 dst
= empty_reg(state
);
1384 out
->type
= REG_REG
;
1385 out
->regno
= dst
- hardregs
;
1388 /* Uhhuh. Not so good. No empty registers right now */
1392 /* If we know we need to flush it, just do so already .. */
1393 output_insn(state
, "movl %s,%s", reg
->name
, show_memop(out
));
1399 output_insn(state
, "movl %s,%s", reg
->name
, dst
->name
);
1400 add_pseudo_reg(state
, pseudo
, dst
);
1405 * This tries to make sure that we put all the pseudos that are
1406 * live on exit into the proper storage
1408 static void generate_output_storage(struct bb_state
*state
)
1410 struct storage_hash
*entry
;
1412 /* Go through the fixed outputs, making sure we have those regs free */
1413 FOR_EACH_PTR(state
->outputs
, entry
) {
1414 struct storage
*out
= entry
->storage
;
1415 if (out
->type
== REG_REG
) {
1416 struct hardreg
*reg
= hardregs
+ out
->regno
;
1420 reg
->busy
= REG_FIXED
;
1421 FOR_EACH_PTR(reg
->contains
, p
) {
1422 if (p
== entry
->pseudo
) {
1426 if (CURRENT_TAG(p
) & TAG_DEAD
)
1429 /* Try to write back the pseudo to where it should go ... */
1430 if (final_pseudo_flush(state
, p
, reg
)) {
1431 DELETE_CURRENT_PTR(p
);
1436 } END_FOR_EACH_PTR(p
);
1437 PACK_PTR_LIST(®
->contains
);
1439 flush_reg(state
, reg
);
1441 } END_FOR_EACH_PTR(entry
);
1443 FOR_EACH_PTR(state
->outputs
, entry
) {
1444 fill_output(state
, entry
->pseudo
, entry
->storage
);
1445 } END_FOR_EACH_PTR(entry
);
1448 static void generate(struct basic_block
*bb
, struct bb_state
*state
)
1451 struct storage_hash
*entry
;
1452 struct instruction
*insn
;
1454 for (i
= 0; i
< REGNO
; i
++) {
1455 free_ptr_list(&hardregs
[i
].contains
);
1456 hardregs
[i
].busy
= 0;
1457 hardregs
[i
].dead
= 0;
1458 hardregs
[i
].used
= 0;
1461 FOR_EACH_PTR(state
->inputs
, entry
) {
1462 struct storage
*storage
= entry
->storage
;
1463 const char *name
= show_storage(storage
);
1464 output_comment(state
, "incoming %s in %s", show_pseudo(entry
->pseudo
), name
);
1465 if (storage
->type
== REG_REG
) {
1466 int regno
= storage
->regno
;
1467 add_pseudo_reg(state
, entry
->pseudo
, hardregs
+ regno
);
1468 name
= hardregs
[regno
].name
;
1470 } END_FOR_EACH_PTR(entry
);
1472 output_label(state
, ".L%p", bb
);
1473 FOR_EACH_PTR(bb
->insns
, insn
) {
1476 generate_one_insn(insn
, state
);
1477 } END_FOR_EACH_PTR(insn
);
1480 output_comment(state
, "--- in ---");
1481 FOR_EACH_PTR(state
->inputs
, entry
) {
1482 output_comment(state
, "%s <- %s", show_pseudo(entry
->pseudo
), show_storage(entry
->storage
));
1483 } END_FOR_EACH_PTR(entry
);
1484 output_comment(state
, "--- spill ---");
1485 FOR_EACH_PTR(state
->internal
, entry
) {
1486 output_comment(state
, "%s <-> %s", show_pseudo(entry
->pseudo
), show_storage(entry
->storage
));
1487 } END_FOR_EACH_PTR(entry
);
1488 output_comment(state
, "--- out ---");
1489 FOR_EACH_PTR(state
->outputs
, entry
) {
1490 output_comment(state
, "%s -> %s", show_pseudo(entry
->pseudo
), show_storage(entry
->storage
));
1491 } END_FOR_EACH_PTR(entry
);
1496 static void generate_list(struct basic_block_list
*list
, unsigned long generation
)
1498 struct basic_block
*bb
;
1499 FOR_EACH_PTR(list
, bb
) {
1500 if (bb
->generation
== generation
)
1502 output_bb(bb
, generation
);
1503 } END_FOR_EACH_PTR(bb
);
1507 * Mark all the output registers of all the parents
1508 * as being "used" - this does not mean that we cannot
1509 * re-use them, but it means that we cannot ask the
1510 * parents to pass in another pseudo in one of those
1511 * registers that it already uses for another child.
1513 static void mark_used_registers(struct basic_block
*bb
, struct bb_state
*state
)
1515 struct basic_block
*parent
;
1517 FOR_EACH_PTR(bb
->parents
, parent
) {
1518 struct storage_hash_list
*outputs
= gather_storage(parent
, STOR_OUT
);
1519 struct storage_hash
*entry
;
1521 FOR_EACH_PTR(outputs
, entry
) {
1522 struct storage
*s
= entry
->storage
;
1523 if (s
->type
== REG_REG
) {
1524 struct hardreg
*reg
= hardregs
+ s
->regno
;
1527 } END_FOR_EACH_PTR(entry
);
1528 } END_FOR_EACH_PTR(parent
);
1531 static void output_bb(struct basic_block
*bb
, unsigned long generation
)
1533 struct bb_state state
;
1535 bb
->generation
= generation
;
1537 /* Make sure all parents have been generated first */
1538 generate_list(bb
->parents
, generation
);
1540 state
.pos
= bb
->pos
;
1541 state
.inputs
= gather_storage(bb
, STOR_IN
);
1542 state
.outputs
= gather_storage(bb
, STOR_OUT
);
1543 state
.internal
= NULL
;
1544 state
.cc_opcode
= 0;
1545 state
.cc_target
= NULL
;
1547 /* Mark incoming registers used */
1548 mark_used_registers(bb
, &state
);
1550 generate(bb
, &state
);
1552 free_ptr_list(&state
.inputs
);
1553 free_ptr_list(&state
.outputs
);
1555 /* Generate all children... */
1556 generate_list(bb
->children
, generation
);
1559 static void set_up_arch_entry(struct entrypoint
*ep
, struct instruction
*entry
)
1565 * We should set up argument sources here..
1567 * Things like "first three arguments in registers" etc
1568 * are all for this place.
1571 FOR_EACH_PTR(entry
->arg_list
, arg
) {
1572 struct storage
*in
= lookup_storage(entry
->bb
, arg
, STOR_IN
);
1574 in
= alloc_storage();
1575 add_storage(in
, entry
->bb
, arg
, STOR_IN
);
1581 in
->type
= REG_FRAME
;
1582 in
->offset
= (i
-3)*4;
1585 } END_FOR_EACH_PTR(arg
);
1589 * Set up storage information for "return"
1591 * Not strictly necessary, since the code generator will
1592 * certainly move the return value to the right register,
1593 * but it can help register allocation if the allocator
1594 * sees that the target register is going to return in %eax.
1596 static void set_up_arch_exit(struct basic_block
*bb
, struct instruction
*ret
)
1598 pseudo_t pseudo
= ret
->src
;
1600 if (pseudo
&& pseudo
!= VOID
) {
1601 struct storage
*out
= lookup_storage(bb
, pseudo
, STOR_OUT
);
1603 out
= alloc_storage();
1604 add_storage(out
, bb
, pseudo
, STOR_OUT
);
1606 out
->type
= REG_REG
;
1612 * Set up dummy/silly output storage information for a switch
1613 * instruction. We need to make sure that a register is available
1614 * when we generate code for switch, so force that by creating
1615 * a dummy output rule.
1617 static void set_up_arch_switch(struct basic_block
*bb
, struct instruction
*insn
)
1619 pseudo_t pseudo
= insn
->cond
;
1620 struct storage
*out
= lookup_storage(bb
, pseudo
, STOR_OUT
);
1622 out
= alloc_storage();
1623 add_storage(out
, bb
, pseudo
, STOR_OUT
);
1625 out
->type
= REG_REG
;
1626 out
->regno
= SWITCH_REG
;
1629 static void arch_set_up_storage(struct entrypoint
*ep
)
1631 struct basic_block
*bb
;
1633 /* Argument storage etc.. */
1634 set_up_arch_entry(ep
, ep
->entry
);
1636 FOR_EACH_PTR(ep
->bbs
, bb
) {
1637 struct instruction
*insn
= last_instruction(bb
->insns
);
1640 switch (insn
->opcode
) {
1642 set_up_arch_exit(bb
, insn
);
1645 set_up_arch_switch(bb
, insn
);
1650 } END_FOR_EACH_PTR(bb
);
1653 static void output(struct entrypoint
*ep
)
1655 unsigned long generation
= ++bb_generation
;
1660 /* Set up initial inter-bb storage links */
1663 /* Architecture-specific storage rules.. */
1664 arch_set_up_storage(ep
);
1666 /* Show the results ... */
1667 output_bb(ep
->entry
->bb
, generation
);
1669 /* Clear the storage hashes for the next function.. */
1673 static int compile(struct symbol_list
*list
)
1676 FOR_EACH_PTR(list
, sym
) {
1677 struct entrypoint
*ep
;
1679 ep
= linearize_symbol(sym
);
1682 } END_FOR_EACH_PTR(sym
);
1687 int main(int argc
, char **argv
)
1689 return compile(sparse(argc
, argv
));