2 * Example of how to write a compiler with sparse
11 #include "expression.h"
12 #include "linearize.h"
17 static const char *opcodes
[] = {
18 [OP_BADOP
] = "bad_op",
21 [OP_ENTRY
] = "<entry-point>",
26 [OP_SWITCH
] = "switch",
27 [OP_INVOKE
] = "invoke",
28 [OP_COMPUTEDGOTO
] = "jmp *",
29 [OP_UNWIND
] = "unwind",
48 [OP_AND_BOOL
] = "and-bool",
49 [OP_OR_BOOL
] = "or-bool",
51 /* Binary comparison */
52 [OP_SET_EQ
] = "seteq",
53 [OP_SET_NE
] = "setne",
54 [OP_SET_LE
] = "setle",
55 [OP_SET_GE
] = "setge",
56 [OP_SET_LT
] = "setlt",
57 [OP_SET_GT
] = "setgt",
60 [OP_SET_BE
] = "setbe",
61 [OP_SET_AE
] = "setae",
67 /* Special three-input */
71 [OP_MALLOC
] = "malloc",
73 [OP_ALLOCA
] = "alloca",
77 [OP_GET_ELEMENT_PTR
] = "getelem",
81 [OP_PHISOURCE
] = "phisrc",
85 [OP_FPCAST
] = "fpcast",
86 [OP_PTRCAST
] = "ptrcast",
88 [OP_VANEXT
] = "va_next",
89 [OP_VAARG
] = "va_arg",
94 [OP_DEATHNOTE
] = "dead",
97 /* Sparse tagging (line numbers, context, whatever) */
98 [OP_CONTEXT
] = "context",
101 static int last_reg
, stack_offset
;
105 struct pseudo_list
*contains
;
114 /* Our "switch" generation is very very stupid. */
115 #define SWITCH_REG (1)
117 static void output_bb(struct basic_block
*bb
, unsigned long generation
);
120 * We only know about the caller-clobbered registers
123 static struct hardreg hardregs
[] = {
140 struct storage_hash_list
*inputs
;
141 struct storage_hash_list
*outputs
;
142 struct storage_hash_list
*internal
;
145 int cc_opcode
, cc_dead
;
163 struct /* OP_MEM and OP_ADDR */ {
167 struct hardreg
*base
;
168 struct hardreg
*index
;
173 static const char *show_op(struct bb_state
*state
, struct operand
*op
)
175 static char buf
[256][4];
180 nr
= (bufnr
+ 1) & 3;
188 return op
->reg
->name
;
190 sprintf(p
, "$%lld", op
->value
);
195 p
+= sprintf(p
, "%d", op
->offset
);
197 p
+= sprintf(p
, "%s%s",
198 op
->offset
? "+" : "",
199 show_ident(op
->sym
->ident
));
200 if (op
->base
|| op
->index
) {
201 p
+= sprintf(p
, "(%s%s%s",
202 op
->base
? op
->base
->name
: "",
203 (op
->base
&& op
->index
) ? "," : "",
204 op
->index
? op
->index
->name
: "");
206 p
+= sprintf(p
, ",%d", op
->scale
);
215 static struct storage_hash
*find_storage_hash(pseudo_t pseudo
, struct storage_hash_list
*list
)
217 struct storage_hash
*entry
;
218 FOR_EACH_PTR(list
, entry
) {
219 if (entry
->pseudo
== pseudo
)
221 } END_FOR_EACH_PTR(entry
);
225 static struct storage_hash
*find_or_create_hash(pseudo_t pseudo
, struct storage_hash_list
**listp
)
227 struct storage_hash
*entry
;
229 entry
= find_storage_hash(pseudo
, *listp
);
231 entry
= alloc_storage_hash(alloc_storage());
232 entry
->pseudo
= pseudo
;
233 add_ptr_list(listp
, entry
);
238 /* Eventually we should just build it up in memory */
239 static void output_line(struct bb_state
*state
, const char *fmt
, ...)
248 static void output_label(struct bb_state
*state
, const char *fmt
, ...)
250 static char buffer
[512];
254 vsnprintf(buffer
, sizeof(buffer
), fmt
, args
);
257 output_line(state
, "%s:\n", buffer
);
260 static void output_insn(struct bb_state
*state
, const char *fmt
, ...)
262 static char buffer
[512];
266 vsnprintf(buffer
, sizeof(buffer
), fmt
, args
);
269 output_line(state
, "\t%s\n", buffer
);
272 #define output_insn(state, fmt, arg...) \
273 output_insn(state, fmt "\t\t# %s" , ## arg , __FUNCTION__)
275 static void output_comment(struct bb_state
*state
, const char *fmt
, ...)
277 static char buffer
[512];
283 vsnprintf(buffer
, sizeof(buffer
), fmt
, args
);
286 output_line(state
, "\t# %s\n", buffer
);
289 static const char *show_memop(struct storage
*storage
)
291 static char buffer
[1000];
295 switch (storage
->type
) {
297 sprintf(buffer
, "%d(FP)", storage
->offset
);
300 sprintf(buffer
, "%d(SP)", storage
->offset
);
303 return hardregs
[storage
->regno
].name
;
305 return show_storage(storage
);
310 static int alloc_stack_offset(int size
)
312 int ret
= stack_offset
;
313 stack_offset
= ret
+ size
;
317 static void alloc_stack(struct bb_state
*state
, struct storage
*storage
)
319 storage
->type
= REG_STACK
;
320 storage
->offset
= alloc_stack_offset(4);
324 * Can we re-generate the pseudo, so that we don't need to
325 * flush it to memory? We can regenerate:
326 * - immediates and symbol addresses
327 * - pseudos we got as input in non-registers
328 * - pseudos we've already saved off earlier..
330 static int can_regenerate(struct bb_state
*state
, pseudo_t pseudo
)
332 struct storage_hash
*in
;
334 switch (pseudo
->type
) {
340 in
= find_storage_hash(pseudo
, state
->inputs
);
341 if (in
&& in
->storage
->type
!= REG_REG
)
343 in
= find_storage_hash(pseudo
, state
->internal
);
350 static void flush_one_pseudo(struct bb_state
*state
, struct hardreg
*hardreg
, pseudo_t pseudo
)
352 struct storage_hash
*out
;
353 struct storage
*storage
;
355 if (can_regenerate(state
, pseudo
))
358 output_comment(state
, "flushing %s from %s", show_pseudo(pseudo
), hardreg
->name
);
359 out
= find_storage_hash(pseudo
, state
->internal
);
361 out
= find_storage_hash(pseudo
, state
->outputs
);
363 out
= find_or_create_hash(pseudo
, &state
->internal
);
365 storage
= out
->storage
;
366 switch (storage
->type
) {
369 * Aieee - the next user wants it in a register, but we
370 * need to flush it to memory in between. Which means that
371 * we need to allocate an internal one, dammit..
373 out
= find_or_create_hash(pseudo
, &state
->internal
);
374 storage
= out
->storage
;
377 alloc_stack(state
, storage
);
380 output_insn(state
, "movl %s,%s", hardreg
->name
, show_memop(storage
));
385 /* Flush a hardreg out to the storage it has.. */
386 static void flush_reg(struct bb_state
*state
, struct hardreg
*reg
)
391 output_comment(state
, "reg %s flushed while busy is %d!", reg
->name
, reg
->busy
);
396 FOR_EACH_PTR(reg
->contains
, pseudo
) {
397 if (CURRENT_TAG(pseudo
) & TAG_DEAD
)
399 if (!(CURRENT_TAG(pseudo
) & TAG_DIRTY
))
401 flush_one_pseudo(state
, reg
, pseudo
);
402 } END_FOR_EACH_PTR(pseudo
);
403 free_ptr_list(®
->contains
);
406 static struct storage_hash
*find_pseudo_storage(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
408 struct storage_hash
*src
;
410 src
= find_storage_hash(pseudo
, state
->internal
);
412 src
= find_storage_hash(pseudo
, state
->inputs
);
414 src
= find_storage_hash(pseudo
, state
->outputs
);
415 /* Undefined? Screw it! */
420 * If we found output storage, it had better be local stack
421 * that we flushed to earlier..
423 if (src
->storage
->type
!= REG_STACK
)
429 * Incoming pseudo with out any pre-set storage allocation?
430 * We can make up our own, and obviously prefer to get it
431 * in the register we already selected (if it hasn't been
434 if (src
->storage
->type
== REG_UDEF
) {
435 if (reg
&& !reg
->used
) {
436 src
->storage
->type
= REG_REG
;
437 src
->storage
->regno
= reg
- hardregs
;
440 alloc_stack(state
, src
->storage
);
445 static void mark_reg_dead(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
449 FOR_EACH_PTR(reg
->contains
, p
) {
452 if (CURRENT_TAG(p
) & TAG_DEAD
)
454 output_comment(state
, "marking pseudo %s in reg %s dead", show_pseudo(pseudo
), reg
->name
);
455 TAG_CURRENT(p
, TAG_DEAD
);
457 } END_FOR_EACH_PTR(p
);
460 static void add_pseudo_reg(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
462 output_comment(state
, "added pseudo %s to reg %s", show_pseudo(pseudo
), reg
->name
);
463 add_ptr_list_tag(®
->contains
, pseudo
, TAG_DIRTY
);
466 static struct hardreg
*preferred_reg(struct bb_state
*state
, pseudo_t target
)
468 struct storage_hash
*dst
;
470 dst
= find_storage_hash(target
, state
->outputs
);
472 struct storage
*storage
= dst
->storage
;
473 if (storage
->type
== REG_REG
)
474 return hardregs
+ storage
->regno
;
479 static struct hardreg
*empty_reg(struct bb_state
*state
)
482 struct hardreg
*reg
= hardregs
;
484 for (i
= 0; i
< REGNO
; i
++, reg
++) {
491 static struct hardreg
*target_reg(struct bb_state
*state
, pseudo_t pseudo
, pseudo_t target
)
494 int unable_to_find_reg
= 0;
497 /* First, see if we have a preferred target register.. */
498 reg
= preferred_reg(state
, target
);
499 if (reg
&& !reg
->contains
)
502 reg
= empty_reg(state
);
513 flush_reg(state
, reg
);
517 } while (i
!= last_reg
);
518 assert(unable_to_find_reg
);
521 add_pseudo_reg(state
, pseudo
, reg
);
525 static struct hardreg
*find_in_reg(struct bb_state
*state
, pseudo_t pseudo
)
530 for (i
= 0; i
< REGNO
; i
++) {
534 FOR_EACH_PTR(reg
->contains
, p
) {
537 output_comment(state
, "found pseudo %s in reg %s (busy=%d)", show_pseudo(pseudo
), reg
->name
, reg
->busy
);
540 } END_FOR_EACH_PTR(p
);
545 static void flush_pseudo(struct bb_state
*state
, pseudo_t pseudo
, struct storage
*storage
)
547 struct hardreg
*reg
= find_in_reg(state
, pseudo
);
550 flush_reg(state
, reg
);
553 static void flush_cc_cache_to_reg(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
555 int opcode
= state
->cc_opcode
;
557 state
->cc_opcode
= 0;
558 state
->cc_target
= NULL
;
559 output_insn(state
, "%s %s", opcodes
[opcode
], reg
->name
);
562 static void flush_cc_cache(struct bb_state
*state
)
564 pseudo_t pseudo
= state
->cc_target
;
569 state
->cc_target
= NULL
;
571 if (!state
->cc_dead
) {
572 dst
= target_reg(state
, pseudo
, pseudo
);
573 flush_cc_cache_to_reg(state
, pseudo
, dst
);
578 static void add_cc_cache(struct bb_state
*state
, int opcode
, pseudo_t pseudo
)
580 assert(!state
->cc_target
);
581 state
->cc_target
= pseudo
;
582 state
->cc_opcode
= opcode
;
584 output_comment(state
, "caching %s", opcodes
[opcode
]);
587 /* Fill a hardreg with the pseudo it has */
588 static struct hardreg
*fill_reg(struct bb_state
*state
, struct hardreg
*hardreg
, pseudo_t pseudo
)
590 struct storage_hash
*src
;
591 struct instruction
*def
;
593 if (state
->cc_target
== pseudo
) {
594 flush_cc_cache_to_reg(state
, pseudo
, hardreg
);
598 switch (pseudo
->type
) {
600 output_insn(state
, "movl $%lld,%s", pseudo
->value
, hardreg
->name
);
603 src
= find_pseudo_storage(state
, pseudo
, NULL
);
606 output_insn(state
, "movl $<%s>,%s", show_pseudo(pseudo
), hardreg
->name
);
609 switch (src
->storage
->type
) {
611 /* Aiaiaiaiaii! Need to flush it to temporary memory */
612 src
= find_or_create_hash(pseudo
, &state
->internal
);
615 alloc_stack(state
, src
->storage
);
619 flush_pseudo(state
, pseudo
, src
->storage
);
620 output_insn(state
, "leal %s,%s", show_memop(src
->storage
), hardreg
->name
);
627 if (def
&& def
->opcode
== OP_SETVAL
) {
628 output_insn(state
, "movl $<%s>,%s", show_pseudo(def
->target
), hardreg
->name
);
631 src
= find_pseudo_storage(state
, pseudo
, hardreg
);
634 if (src
->flags
& TAG_DEAD
)
635 mark_reg_dead(state
, pseudo
, hardreg
);
636 output_insn(state
, "mov.%d %s,%s", 32, show_memop(src
->storage
), hardreg
->name
);
639 output_insn(state
, "reload %s from %s", hardreg
->name
, show_pseudo(pseudo
));
645 static struct hardreg
*getreg(struct bb_state
*state
, pseudo_t pseudo
, pseudo_t target
)
649 reg
= find_in_reg(state
, pseudo
);
652 reg
= target_reg(state
, pseudo
, target
);
653 return fill_reg(state
, reg
, pseudo
);
656 static void move_reg(struct bb_state
*state
, struct hardreg
*src
, struct hardreg
*dst
)
658 output_insn(state
, "movl %s,%s", src
->name
, dst
->name
);
661 static struct hardreg
*copy_reg(struct bb_state
*state
, struct hardreg
*src
, pseudo_t target
)
666 /* If the container has been killed off, just re-use it */
670 /* If "src" only has one user, and the contents are dead, we can re-use it */
671 if (src
->busy
== 1 && src
->dead
== 1)
674 reg
= preferred_reg(state
, target
);
675 if (reg
&& !reg
->contains
) {
676 output_comment(state
, "copying %s to preferred target %s", show_pseudo(target
), reg
->name
);
677 move_reg(state
, src
, reg
);
681 for (i
= 0; i
< REGNO
; i
++) {
682 struct hardreg
*reg
= hardregs
+ i
;
683 if (!reg
->contains
) {
684 output_comment(state
, "copying %s to %s", show_pseudo(target
), reg
->name
);
685 output_insn(state
, "movl %s,%s", src
->name
, reg
->name
);
690 flush_reg(state
, src
);
694 static void put_operand(struct bb_state
*state
, struct operand
*op
)
712 static struct operand
*alloc_op(void)
714 struct operand
*op
= malloc(sizeof(*op
));
715 memset(op
, 0, sizeof(*op
));
719 static struct operand
*get_register_operand(struct bb_state
*state
, pseudo_t pseudo
, pseudo_t target
)
721 struct operand
*op
= alloc_op();
723 op
->reg
= getreg(state
, pseudo
, target
);
728 static int get_sym_frame_offset(struct bb_state
*state
, pseudo_t pseudo
)
730 int offset
= pseudo
->nr
;
732 offset
= alloc_stack_offset(4);
738 static struct operand
*get_generic_operand(struct bb_state
*state
, pseudo_t pseudo
)
742 struct storage_hash
*hash
;
743 struct operand
*op
= malloc(sizeof(*op
));
745 memset(op
, 0, sizeof(*op
));
746 switch (pseudo
->type
) {
749 op
->value
= pseudo
->value
;
753 struct symbol
*sym
= pseudo
->sym
;
755 if (sym
->ctype
.modifiers
& MOD_NONLOCAL
) {
759 op
->base
= hardregs
+ REG_EBP
;
760 op
->offset
= get_sym_frame_offset(state
, pseudo
);
765 reg
= find_in_reg(state
, pseudo
);
772 hash
= find_pseudo_storage(state
, pseudo
, NULL
);
779 op
->reg
= hardregs
+ src
->regno
;
784 op
->offset
= src
->offset
;
785 op
->base
= hardregs
+ REG_EBP
;
789 op
->offset
= src
->offset
;
790 op
->base
= hardregs
+ REG_ESP
;
799 /* Callers should be made to use the proper "operand" formats */
800 static const char *generic(struct bb_state
*state
, pseudo_t pseudo
)
803 struct operand
*op
= get_generic_operand(state
, pseudo
);
804 static char buf
[100];
809 if (!op
->offset
&& op
->base
&& !op
->sym
)
810 return op
->base
->name
;
811 if (op
->sym
&& !op
->base
) {
812 int len
= sprintf(buf
, "$ %s", show_op(state
, op
));
814 sprintf(buf
+ len
, " + %d", op
->offset
);
817 str
= show_op(state
, op
);
818 put_operand(state
, op
);
819 reg
= target_reg(state
, pseudo
, NULL
);
820 output_insn(state
, "lea %s,%s", show_op(state
, op
), reg
->name
);
824 str
= show_op(state
, op
);
826 put_operand(state
, op
);
830 static struct operand
*get_address_operand(struct bb_state
*state
, struct instruction
*memop
)
832 struct hardreg
*base
;
833 struct operand
*op
= get_generic_operand(state
, memop
->src
);
837 op
->offset
+= memop
->offset
;
840 put_operand(state
, op
);
841 base
= getreg(state
, memop
->src
, NULL
);
845 op
->offset
= memop
->offset
;
851 static const char *address(struct bb_state
*state
, struct instruction
*memop
)
853 struct operand
*op
= get_address_operand(state
, memop
);
854 const char *str
= show_op(state
, op
);
855 put_operand(state
, op
);
859 static const char *reg_or_imm(struct bb_state
*state
, pseudo_t pseudo
)
861 switch(pseudo
->type
) {
863 return show_pseudo(pseudo
);
865 return getreg(state
, pseudo
, NULL
)->name
;
869 static void kill_dead_reg(struct hardreg
*reg
)
874 FOR_EACH_PTR(reg
->contains
, p
) {
875 if (CURRENT_TAG(p
) & TAG_DEAD
) {
876 DELETE_CURRENT_PTR(p
);
879 } END_FOR_EACH_PTR(p
);
880 PACK_PTR_LIST(®
->contains
);
885 static struct hardreg
*target_copy_reg(struct bb_state
*state
, struct hardreg
*src
, pseudo_t target
)
888 return copy_reg(state
, src
, target
);
891 static void do_binop(struct bb_state
*state
, struct instruction
*insn
, pseudo_t val1
, pseudo_t val2
)
893 const char *op
= opcodes
[insn
->opcode
];
894 struct operand
*src
= get_register_operand(state
, val1
, insn
->target
);
895 struct operand
*src2
= get_generic_operand(state
, val2
);
898 dst
= target_copy_reg(state
, src
->reg
, insn
->target
);
899 output_insn(state
, "%s.%d %s,%s", op
, insn
->size
, show_op(state
, src2
), dst
->name
);
900 put_operand(state
, src
);
901 put_operand(state
, src2
);
902 add_pseudo_reg(state
, insn
->target
, dst
);
905 static void generate_binop(struct bb_state
*state
, struct instruction
*insn
)
907 flush_cc_cache(state
);
908 do_binop(state
, insn
, insn
->src1
, insn
->src2
);
911 static int is_dead_reg(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
914 FOR_EACH_PTR(reg
->contains
, p
) {
916 return CURRENT_TAG(p
) & TAG_DEAD
;
917 } END_FOR_EACH_PTR(p
);
922 * Commutative binops are much more flexible, since we can switch the
923 * sources around to satisfy the target register, or to avoid having
924 * to load one of them into a register..
926 static void generate_commutative_binop(struct bb_state
*state
, struct instruction
*insn
)
929 struct hardreg
*reg1
, *reg2
;
931 flush_cc_cache(state
);
934 reg2
= find_in_reg(state
, src2
);
937 reg1
= find_in_reg(state
, src1
);
940 if (!is_dead_reg(state
, src2
, reg2
))
942 if (!is_dead_reg(state
, src1
, reg1
))
945 /* Both are dead. Is one preferable? */
946 if (reg2
!= preferred_reg(state
, insn
->target
))
953 do_binop(state
, insn
, src1
, src2
);
957 * This marks a pseudo dead. It still stays on the hardreg list (the hardreg
958 * still has its value), but it's scheduled to be killed after the next
959 * "sequence point" when we call "kill_read_pseudos()"
961 static void mark_pseudo_dead(struct bb_state
*state
, pseudo_t pseudo
)
964 struct storage_hash
*src
;
966 if (state
->cc_target
== pseudo
)
968 src
= find_pseudo_storage(state
, pseudo
, NULL
);
970 src
->flags
|= TAG_DEAD
;
971 for (i
= 0; i
< REGNO
; i
++)
972 mark_reg_dead(state
, pseudo
, hardregs
+ i
);
975 static void kill_dead_pseudos(struct bb_state
*state
)
979 for (i
= 0; i
< REGNO
; i
++) {
980 kill_dead_reg(hardregs
+ i
);
984 static void generate_store(struct instruction
*insn
, struct bb_state
*state
)
986 output_insn(state
, "mov.%d %s,%s", insn
->size
, reg_or_imm(state
, insn
->target
), address(state
, insn
));
989 static void generate_load(struct instruction
*insn
, struct bb_state
*state
)
991 const char *input
= address(state
, insn
);
994 kill_dead_pseudos(state
);
995 dst
= target_reg(state
, insn
->target
, NULL
);
996 output_insn(state
, "mov.%d %s,%s", insn
->size
, input
, dst
->name
);
999 static void kill_pseudo(struct bb_state
*state
, pseudo_t pseudo
)
1002 struct hardreg
*reg
;
1004 output_comment(state
, "killing pseudo %s", show_pseudo(pseudo
));
1005 for (i
= 0; i
< REGNO
; i
++) {
1009 FOR_EACH_PTR(reg
->contains
, p
) {
1012 if (CURRENT_TAG(p
) & TAG_DEAD
)
1014 output_comment(state
, "removing pseudo %s from reg %s",
1015 show_pseudo(pseudo
), reg
->name
);
1016 DELETE_CURRENT_PTR(p
);
1017 } END_FOR_EACH_PTR(p
);
1018 PACK_PTR_LIST(®
->contains
);
1022 static void generate_copy(struct bb_state
*state
, struct instruction
*insn
)
1024 struct hardreg
*src
= getreg(state
, insn
->src
, insn
->target
);
1025 kill_pseudo(state
, insn
->target
);
1026 add_pseudo_reg(state
, insn
->target
, src
);
1029 static void generate_cast(struct bb_state
*state
, struct instruction
*insn
)
1031 struct hardreg
*src
= getreg(state
, insn
->src
, insn
->target
);
1032 struct hardreg
*dst
;
1033 unsigned int old
= insn
->orig_type
? insn
->orig_type
->bit_size
: 0;
1034 unsigned int new = insn
->size
;
1037 * Cast to smaller type? Ignore the high bits, we
1038 * just keep both pseudos in the same register.
1041 add_pseudo_reg(state
, insn
->target
, src
);
1045 dst
= target_copy_reg(state
, src
, insn
->target
);
1047 if (insn
->orig_type
&& (insn
->orig_type
->ctype
.modifiers
& MOD_SIGNED
)) {
1048 output_insn(state
, "sext.%d.%d %s", old
, new, dst
->name
);
1050 unsigned long long mask
;
1051 mask
= ~(~0ULL << old
);
1052 mask
&= ~(~0ULL << new);
1053 output_insn(state
, "andl.%d $%#llx,%s", insn
->size
, mask
, dst
->name
);
1055 add_pseudo_reg(state
, insn
->target
, dst
);
1058 static void generate_output_storage(struct bb_state
*state
);
1060 static const char *conditional
[] = {
1074 static void generate_branch(struct bb_state
*state
, struct instruction
*br
)
1076 const char *cond
= "XXX";
1077 struct basic_block
*target
;
1080 if (state
->cc_target
== br
->cond
) {
1081 cond
= conditional
[state
->cc_opcode
];
1083 struct hardreg
*reg
= getreg(state
, br
->cond
, NULL
);
1084 output_insn(state
, "testl %s,%s", reg
->name
, reg
->name
);
1088 generate_output_storage(state
);
1089 target
= br
->bb_true
;
1091 output_insn(state
, "j%s .L%p", cond
, target
);
1092 target
= br
->bb_false
;
1094 output_insn(state
, "jmp .L%p", target
);
1097 /* We've made sure that there is a dummy reg live for the output */
1098 static void generate_switch(struct bb_state
*state
, struct instruction
*insn
)
1100 struct hardreg
*reg
= hardregs
+ SWITCH_REG
;
1102 generate_output_storage(state
);
1103 output_insn(state
, "switch on %s", reg
->name
);
1104 output_insn(state
, "unimplemented: %s", show_instruction(insn
));
1107 static void generate_ret(struct bb_state
*state
, struct instruction
*ret
)
1109 if (ret
->src
&& ret
->src
!= VOID
) {
1110 struct hardreg
*wants
= hardregs
+0;
1111 struct hardreg
*reg
= getreg(state
, ret
->src
, NULL
);
1113 output_insn(state
, "movl %s,%s", reg
->name
, wants
->name
);
1115 output_insn(state
, "ret");
1119 * Fake "call" linearization just as a taster..
1121 static void generate_call(struct bb_state
*state
, struct instruction
*insn
)
1126 FOR_EACH_PTR(insn
->arguments
, arg
) {
1127 output_insn(state
, "pushl %s", generic(state
, arg
));
1129 } END_FOR_EACH_PTR(arg
);
1130 flush_reg(state
, hardregs
+0);
1131 flush_reg(state
, hardregs
+1);
1132 flush_reg(state
, hardregs
+2);
1133 output_insn(state
, "call %s", show_pseudo(insn
->func
));
1135 output_insn(state
, "addl $%d,%%esp", offset
);
1136 if (insn
->target
&& insn
->target
!= VOID
)
1137 add_pseudo_reg(state
, insn
->target
, hardregs
+0);
1140 static void generate_select(struct bb_state
*state
, struct instruction
*insn
)
1143 struct hardreg
*src1
, *src2
, *dst
;
1145 src1
= getreg(state
, insn
->src2
, NULL
);
1146 dst
= copy_reg(state
, src1
, insn
->target
);
1147 add_pseudo_reg(state
, insn
->target
, dst
);
1148 src2
= getreg(state
, insn
->src3
, insn
->target
);
1150 if (state
->cc_target
== insn
->src1
) {
1151 cond
= conditional
[state
->cc_opcode
];
1153 struct hardreg
*reg
= getreg(state
, insn
->src1
, NULL
);
1154 output_insn(state
, "testl %s,%s", reg
->name
, reg
->name
);
1158 output_insn(state
, "sel%s %s,%s", cond
, src2
->name
, dst
->name
);
1162 const struct ident
*name
;
1165 struct hardreg
*reg
;
1168 static void replace_asm_arg(char **dst_p
, struct asm_arg
*arg
)
1171 int len
= strlen(arg
->value
);
1173 memcpy(dst
, arg
->value
, len
);
1177 static void replace_asm_percent(const char **src_p
, char **dst_p
, struct asm_arg
*args
, int nr
)
1179 const char *src
= *src_p
;
1188 replace_asm_arg(dst_p
, args
+index
);
1195 static void replace_asm_named(const char **src_p
, char **dst_p
, struct asm_arg
*args
, int nr
)
1197 const char *src
= *src_p
;
1198 const char *end
= src
;
1208 for (i
= 0; i
< nr
; i
++) {
1209 const struct ident
*ident
= args
[i
].name
;
1214 if (memcmp(src
, ident
->name
, len
))
1216 replace_asm_arg(dst_p
, args
+i
);
1223 static const char *replace_asm_args(const char *str
, struct asm_arg
*args
, int nr
)
1225 static char buffer
[1000];
1241 replace_asm_percent(&str
, &p
, args
, nr
);
1244 replace_asm_named(&str
, &p
, args
, nr
);
1253 #define MAX_ASM_ARG (50)
1254 static struct asm_arg asm_arguments
[MAX_ASM_ARG
];
1256 static struct asm_arg
*generate_asm_inputs(struct bb_state
*state
, struct asm_constraint_list
*list
, struct asm_arg
*arg
)
1258 struct asm_constraint
*entry
;
1260 FOR_EACH_PTR(list
, entry
) {
1261 const char *constraint
= entry
->constraint
;
1262 pseudo_t pseudo
= entry
->pseudo
;
1263 struct hardreg
*reg
, *orig
;
1268 switch (*constraint
) {
1270 string
= getreg(state
, pseudo
, NULL
)->name
;
1273 index
= *constraint
- '0';
1274 reg
= asm_arguments
[index
].reg
;
1275 orig
= find_in_reg(state
, pseudo
);
1277 move_reg(state
, orig
, reg
);
1279 fill_reg(state
, reg
, pseudo
);
1283 string
= generic(state
, pseudo
);
1287 output_insn(state
, "# asm input \"%s\": %s : %s", constraint
, show_pseudo(pseudo
), string
);
1289 arg
->name
= entry
->ident
;
1290 arg
->value
= string
;
1294 } END_FOR_EACH_PTR(entry
);
1298 static struct asm_arg
*generate_asm_outputs(struct bb_state
*state
, struct asm_constraint_list
*list
, struct asm_arg
*arg
)
1300 struct asm_constraint
*entry
;
1302 FOR_EACH_PTR(list
, entry
) {
1303 const char *constraint
= entry
->constraint
;
1304 pseudo_t pseudo
= entry
->pseudo
;
1305 struct hardreg
*reg
;
1308 while (*constraint
== '=' || *constraint
== '+')
1312 switch (*constraint
) {
1315 reg
= target_reg(state
, pseudo
, NULL
);
1316 arg
->pseudo
= pseudo
;
1322 output_insn(state
, "# asm output \"%s\": %s : %s", constraint
, show_pseudo(pseudo
), string
);
1324 arg
->name
= entry
->ident
;
1325 arg
->value
= string
;
1327 } END_FOR_EACH_PTR(entry
);
1331 static void generate_asm(struct bb_state
*state
, struct instruction
*insn
)
1333 const char *str
= insn
->string
;
1335 if (insn
->asm_rules
->outputs
|| insn
->asm_rules
->inputs
) {
1336 struct asm_arg
*arg
;
1338 arg
= generate_asm_outputs(state
, insn
->asm_rules
->outputs
, asm_arguments
);
1339 arg
= generate_asm_inputs(state
, insn
->asm_rules
->inputs
, arg
);
1340 str
= replace_asm_args(str
, asm_arguments
, arg
- asm_arguments
);
1342 output_insn(state
, "%s", str
);
1345 static void generate_compare(struct bb_state
*state
, struct instruction
*insn
)
1347 struct hardreg
*src
;
1351 flush_cc_cache(state
);
1352 opcode
= insn
->opcode
;
1355 * We should try to switch these around if necessary,
1356 * and update the opcode to match..
1358 src
= getreg(state
, insn
->src1
, insn
->target
);
1359 src2
= generic(state
, insn
->src2
);
1361 output_insn(state
, "cmp.%d %s,%s", insn
->size
, src2
, src
->name
);
1363 add_cc_cache(state
, opcode
, insn
->target
);
1366 static void generate_one_insn(struct instruction
*insn
, struct bb_state
*state
)
1369 output_comment(state
, "%s", show_instruction(insn
));
1371 switch (insn
->opcode
) {
1373 struct symbol
*sym
= insn
->bb
->ep
->name
;
1374 const char *name
= show_ident(sym
->ident
);
1375 if (sym
->ctype
.modifiers
& MOD_STATIC
)
1376 printf("\n\n%s:\n", name
);
1378 printf("\n\n.globl %s\n%s:\n", name
, name
);
1383 * OP_SETVAL likewise doesn't actually generate any
1384 * code. On use, the "def" of the pseudo will be
1391 generate_store(insn
, state
);
1395 generate_load(insn
, state
);
1399 mark_pseudo_dead(state
, insn
->target
);
1403 generate_copy(state
, insn
);
1406 case OP_ADD
: case OP_MULU
: case OP_MULS
:
1407 case OP_AND
: case OP_OR
: case OP_XOR
:
1408 case OP_AND_BOOL
: case OP_OR_BOOL
:
1409 generate_commutative_binop(state
, insn
);
1412 case OP_SUB
: case OP_DIVU
: case OP_DIVS
:
1413 case OP_MODU
: case OP_MODS
:
1414 case OP_SHL
: case OP_LSR
: case OP_ASR
:
1415 generate_binop(state
, insn
);
1418 case OP_BINCMP
... OP_BINCMP_END
:
1419 generate_compare(state
, insn
);
1422 case OP_CAST
: case OP_SCAST
: case OP_FPCAST
: case OP_PTRCAST
:
1423 generate_cast(state
, insn
);
1427 generate_select(state
, insn
);
1431 generate_branch(state
, insn
);
1435 generate_switch(state
, insn
);
1439 generate_call(state
, insn
);
1443 generate_ret(state
, insn
);
1447 generate_asm(state
, insn
);
1453 output_insn(state
, "unimplemented: %s", show_instruction(insn
));
1456 kill_dead_pseudos(state
);
1459 #define VERY_BUSY 1000
1460 #define REG_FIXED 2000
1462 static void write_reg_to_storage(struct bb_state
*state
, struct hardreg
*reg
, pseudo_t pseudo
, struct storage
*storage
)
1465 struct hardreg
*out
;
1467 switch (storage
->type
) {
1469 out
= hardregs
+ storage
->regno
;
1472 output_insn(state
, "movl %s,%s", reg
->name
, out
->name
);
1475 if (reg
->busy
< VERY_BUSY
) {
1476 storage
->type
= REG_REG
;
1477 storage
->regno
= reg
- hardregs
;
1478 reg
->busy
= REG_FIXED
;
1482 /* Try to find a non-busy register.. */
1483 for (i
= 0; i
< REGNO
; i
++) {
1487 output_insn(state
, "movl %s,%s", reg
->name
, out
->name
);
1488 storage
->type
= REG_REG
;
1490 out
->busy
= REG_FIXED
;
1494 /* Fall back on stack allocation ... */
1495 alloc_stack(state
, storage
);
1498 output_insn(state
, "movl %s,%s", reg
->name
, show_memop(storage
));
1503 static void write_val_to_storage(struct bb_state
*state
, pseudo_t src
, struct storage
*storage
)
1505 struct hardreg
*out
;
1507 switch (storage
->type
) {
1509 alloc_stack(state
, storage
);
1511 output_insn(state
, "movl %s,%s", show_pseudo(src
), show_memop(storage
));
1514 out
= hardregs
+ storage
->regno
;
1515 output_insn(state
, "movl %s,%s", show_pseudo(src
), out
->name
);
1519 static void fill_output(struct bb_state
*state
, pseudo_t pseudo
, struct storage
*out
)
1522 struct storage_hash
*in
;
1523 struct instruction
*def
;
1525 /* Is that pseudo a constant value? */
1526 switch (pseudo
->type
) {
1528 write_val_to_storage(state
, pseudo
, out
);
1532 if (def
&& def
->opcode
== OP_SETVAL
) {
1533 write_val_to_storage(state
, pseudo
, out
);
1540 /* See if we have that pseudo in a register.. */
1541 for (i
= 0; i
< REGNO
; i
++) {
1542 struct hardreg
*reg
= hardregs
+ i
;
1545 FOR_EACH_PTR(reg
->contains
, p
) {
1547 write_reg_to_storage(state
, reg
, pseudo
, out
);
1550 } END_FOR_EACH_PTR(p
);
1553 /* Do we have it in another storage? */
1554 in
= find_storage_hash(pseudo
, state
->internal
);
1556 in
= find_storage_hash(pseudo
, state
->inputs
);
1561 switch (out
->type
) {
1563 *out
= *in
->storage
;
1566 output_insn(state
, "movl %s,%s", show_memop(in
->storage
), hardregs
[out
->regno
].name
);
1569 if (out
== in
->storage
)
1571 if ((out
->type
== in
->storage
->type
) && (out
->regno
== in
->storage
->regno
))
1573 output_insn(state
, "movl %s,%s", show_memop(in
->storage
), show_memop(out
));
1579 static int final_pseudo_flush(struct bb_state
*state
, pseudo_t pseudo
, struct hardreg
*reg
)
1581 struct storage_hash
*hash
;
1582 struct storage
*out
;
1583 struct hardreg
*dst
;
1586 * Since this pseudo is live at exit, we'd better have output
1589 hash
= find_storage_hash(pseudo
, state
->outputs
);
1592 out
= hash
->storage
;
1594 /* If the output is in a register, try to get it there.. */
1595 if (out
->type
== REG_REG
) {
1596 dst
= hardregs
+ out
->regno
;
1598 * Two good cases: nobody is using the right register,
1599 * or we've already set it aside for output..
1601 if (!dst
->contains
|| dst
->busy
> VERY_BUSY
)
1604 /* Aiee. Try to keep it in a register.. */
1605 dst
= empty_reg(state
);
1612 /* If the output is undefined, let's see if we can put it in a register.. */
1613 if (out
->type
== REG_UDEF
) {
1614 dst
= empty_reg(state
);
1616 out
->type
= REG_REG
;
1617 out
->regno
= dst
- hardregs
;
1620 /* Uhhuh. Not so good. No empty registers right now */
1624 /* If we know we need to flush it, just do so already .. */
1625 output_insn(state
, "movl %s,%s", reg
->name
, show_memop(out
));
1631 output_insn(state
, "movl %s,%s", reg
->name
, dst
->name
);
1632 add_pseudo_reg(state
, pseudo
, dst
);
1637 * This tries to make sure that we put all the pseudos that are
1638 * live on exit into the proper storage
1640 static void generate_output_storage(struct bb_state
*state
)
1642 struct storage_hash
*entry
;
1644 /* Go through the fixed outputs, making sure we have those regs free */
1645 FOR_EACH_PTR(state
->outputs
, entry
) {
1646 struct storage
*out
= entry
->storage
;
1647 if (out
->type
== REG_REG
) {
1648 struct hardreg
*reg
= hardregs
+ out
->regno
;
1652 reg
->busy
= REG_FIXED
;
1653 FOR_EACH_PTR(reg
->contains
, p
) {
1654 if (p
== entry
->pseudo
) {
1658 if (CURRENT_TAG(p
) & TAG_DEAD
)
1661 /* Try to write back the pseudo to where it should go ... */
1662 if (final_pseudo_flush(state
, p
, reg
)) {
1663 DELETE_CURRENT_PTR(p
);
1667 } END_FOR_EACH_PTR(p
);
1668 PACK_PTR_LIST(®
->contains
);
1670 flush_reg(state
, reg
);
1672 } END_FOR_EACH_PTR(entry
);
1674 FOR_EACH_PTR(state
->outputs
, entry
) {
1675 fill_output(state
, entry
->pseudo
, entry
->storage
);
1676 } END_FOR_EACH_PTR(entry
);
1679 static void generate(struct basic_block
*bb
, struct bb_state
*state
)
1682 struct storage_hash
*entry
;
1683 struct instruction
*insn
;
1685 for (i
= 0; i
< REGNO
; i
++) {
1686 free_ptr_list(&hardregs
[i
].contains
);
1687 hardregs
[i
].busy
= 0;
1688 hardregs
[i
].dead
= 0;
1689 hardregs
[i
].used
= 0;
1692 FOR_EACH_PTR(state
->inputs
, entry
) {
1693 struct storage
*storage
= entry
->storage
;
1694 const char *name
= show_storage(storage
);
1695 output_comment(state
, "incoming %s in %s", show_pseudo(entry
->pseudo
), name
);
1696 if (storage
->type
== REG_REG
) {
1697 int regno
= storage
->regno
;
1698 add_pseudo_reg(state
, entry
->pseudo
, hardregs
+ regno
);
1699 name
= hardregs
[regno
].name
;
1701 } END_FOR_EACH_PTR(entry
);
1703 output_label(state
, ".L%p", bb
);
1704 FOR_EACH_PTR(bb
->insns
, insn
) {
1707 generate_one_insn(insn
, state
);
1708 } END_FOR_EACH_PTR(insn
);
1711 output_comment(state
, "--- in ---");
1712 FOR_EACH_PTR(state
->inputs
, entry
) {
1713 output_comment(state
, "%s <- %s", show_pseudo(entry
->pseudo
), show_storage(entry
->storage
));
1714 } END_FOR_EACH_PTR(entry
);
1715 output_comment(state
, "--- spill ---");
1716 FOR_EACH_PTR(state
->internal
, entry
) {
1717 output_comment(state
, "%s <-> %s", show_pseudo(entry
->pseudo
), show_storage(entry
->storage
));
1718 } END_FOR_EACH_PTR(entry
);
1719 output_comment(state
, "--- out ---");
1720 FOR_EACH_PTR(state
->outputs
, entry
) {
1721 output_comment(state
, "%s -> %s", show_pseudo(entry
->pseudo
), show_storage(entry
->storage
));
1722 } END_FOR_EACH_PTR(entry
);
1727 static void generate_list(struct basic_block_list
*list
, unsigned long generation
)
1729 struct basic_block
*bb
;
1730 FOR_EACH_PTR(list
, bb
) {
1731 if (bb
->generation
== generation
)
1733 output_bb(bb
, generation
);
1734 } END_FOR_EACH_PTR(bb
);
1738 * Mark all the output registers of all the parents
1739 * as being "used" - this does not mean that we cannot
1740 * re-use them, but it means that we cannot ask the
1741 * parents to pass in another pseudo in one of those
1742 * registers that it already uses for another child.
1744 static void mark_used_registers(struct basic_block
*bb
, struct bb_state
*state
)
1746 struct basic_block
*parent
;
1748 FOR_EACH_PTR(bb
->parents
, parent
) {
1749 struct storage_hash_list
*outputs
= gather_storage(parent
, STOR_OUT
);
1750 struct storage_hash
*entry
;
1752 FOR_EACH_PTR(outputs
, entry
) {
1753 struct storage
*s
= entry
->storage
;
1754 if (s
->type
== REG_REG
) {
1755 struct hardreg
*reg
= hardregs
+ s
->regno
;
1758 } END_FOR_EACH_PTR(entry
);
1759 } END_FOR_EACH_PTR(parent
);
1762 static void output_bb(struct basic_block
*bb
, unsigned long generation
)
1764 struct bb_state state
;
1766 bb
->generation
= generation
;
1768 /* Make sure all parents have been generated first */
1769 generate_list(bb
->parents
, generation
);
1771 state
.pos
= bb
->pos
;
1772 state
.inputs
= gather_storage(bb
, STOR_IN
);
1773 state
.outputs
= gather_storage(bb
, STOR_OUT
);
1774 state
.internal
= NULL
;
1775 state
.cc_opcode
= 0;
1776 state
.cc_target
= NULL
;
1778 /* Mark incoming registers used */
1779 mark_used_registers(bb
, &state
);
1781 generate(bb
, &state
);
1783 free_ptr_list(&state
.inputs
);
1784 free_ptr_list(&state
.outputs
);
1786 /* Generate all children... */
1787 generate_list(bb
->children
, generation
);
1791 * We should set up argument sources here..
1793 * Things like "first three arguments in registers" etc
1794 * are all for this place.
1796 * On x86, we default to stack, unless it's a static
1797 * function that doesn't have its address taken.
1799 * I should implement the -mregparm=X cmd line option.
1801 static void set_up_arch_entry(struct entrypoint
*ep
, struct instruction
*entry
)
1804 struct symbol
*sym
, *argtype
;
1805 int i
, offset
, regparm
;
1809 if (!(sym
->ctype
.modifiers
& MOD_ADDRESSABLE
))
1811 sym
= sym
->ctype
.base_type
;
1814 PREPARE_PTR_LIST(sym
->arguments
, argtype
);
1815 FOR_EACH_PTR(entry
->arg_list
, arg
) {
1816 struct storage
*in
= lookup_storage(entry
->bb
, arg
, STOR_IN
);
1818 in
= alloc_storage();
1819 add_storage(in
, entry
->bb
, arg
, STOR_IN
);
1825 int bits
= argtype
? argtype
->bit_size
: 0;
1827 if (bits
< bits_in_int
)
1830 in
->type
= REG_FRAME
;
1831 in
->offset
= offset
;
1833 offset
+= bits
>> 3;
1836 NEXT_PTR_LIST(argtype
);
1837 } END_FOR_EACH_PTR(arg
);
1838 FINISH_PTR_LIST(argtype
);
1842 * Set up storage information for "return"
1844 * Not strictly necessary, since the code generator will
1845 * certainly move the return value to the right register,
1846 * but it can help register allocation if the allocator
1847 * sees that the target register is going to return in %eax.
1849 static void set_up_arch_exit(struct basic_block
*bb
, struct instruction
*ret
)
1851 pseudo_t pseudo
= ret
->src
;
1853 if (pseudo
&& pseudo
!= VOID
) {
1854 struct storage
*out
= lookup_storage(bb
, pseudo
, STOR_OUT
);
1856 out
= alloc_storage();
1857 add_storage(out
, bb
, pseudo
, STOR_OUT
);
1859 out
->type
= REG_REG
;
1865 * Set up dummy/silly output storage information for a switch
1866 * instruction. We need to make sure that a register is available
1867 * when we generate code for switch, so force that by creating
1868 * a dummy output rule.
1870 static void set_up_arch_switch(struct basic_block
*bb
, struct instruction
*insn
)
1872 pseudo_t pseudo
= insn
->cond
;
1873 struct storage
*out
= lookup_storage(bb
, pseudo
, STOR_OUT
);
1875 out
= alloc_storage();
1876 add_storage(out
, bb
, pseudo
, STOR_OUT
);
1878 out
->type
= REG_REG
;
1879 out
->regno
= SWITCH_REG
;
1882 static void arch_set_up_storage(struct entrypoint
*ep
)
1884 struct basic_block
*bb
;
1886 /* Argument storage etc.. */
1887 set_up_arch_entry(ep
, ep
->entry
);
1889 FOR_EACH_PTR(ep
->bbs
, bb
) {
1890 struct instruction
*insn
= last_instruction(bb
->insns
);
1893 switch (insn
->opcode
) {
1895 set_up_arch_exit(bb
, insn
);
1898 set_up_arch_switch(bb
, insn
);
1903 } END_FOR_EACH_PTR(bb
);
1906 static void output(struct entrypoint
*ep
)
1908 unsigned long generation
= ++bb_generation
;
1913 /* Get rid of SSA form (phinodes etc) */
1916 /* Set up initial inter-bb storage links */
1919 /* Architecture-specific storage rules.. */
1920 arch_set_up_storage(ep
);
1922 /* Show the results ... */
1923 output_bb(ep
->entry
->bb
, generation
);
1925 /* Clear the storage hashes for the next function.. */
1929 static int compile(struct symbol_list
*list
)
1932 FOR_EACH_PTR(list
, sym
) {
1933 struct entrypoint
*ep
;
1935 ep
= linearize_symbol(sym
);
1938 } END_FOR_EACH_PTR(sym
);
1943 int main(int argc
, char **argv
)
1945 struct string_list
*filelist
= NULL
;
1948 compile(sparse_initialize(argc
, argv
, &filelist
));
1950 FOR_EACH_PTR_NOTAG(filelist
, file
) {
1951 compile(sparse(file
));
1952 } END_FOR_EACH_PTR_NOTAG(file
);