2 * X86 code generator for TCC
4 * Copyright (c) 2001, 2002 Fabrice Bellard
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program; if not, write to the Free Software
18 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
21 /* number of available registers */
24 /* a register can belong to several classes. The classes must be
25 sorted from more general to more precise (see gv2() code which does
26 assumptions on it). */
27 #define RC_INT 0x0001 /* generic integer register */
28 #define RC_FLOAT 0x0002 /* generic float register */
33 #define RC_IRET RC_EAX /* function return: integer register */
34 #define RC_LRET RC_EDX /* function return: second integer register */
35 #define RC_FRET RC_ST0 /* function return: float register */
37 /* pretty names for the registers */
45 int reg_classes
[NB_REGS
] = {
46 /* eax */ RC_INT
| RC_EAX
,
47 /* ecx */ RC_INT
| RC_ECX
,
48 /* edx */ RC_INT
| RC_EDX
,
49 /* st0 */ RC_FLOAT
| RC_ST0
,
52 /* return registers for function */
53 #define REG_IRET REG_EAX /* single word int return register */
54 #define REG_LRET REG_EDX /* second word return register (for long long) */
55 #define REG_FRET REG_ST0 /* float return register */
57 /* defined if function parameters must be evaluated in reverse order */
58 #define INVERT_FUNC_PARAMS
60 /* defined if structures are passed as pointers. Otherwise structures
61 are directly pushed on stack. */
62 //#define FUNC_STRUCT_PARAM_AS_PTR
64 /* pointer size, in bytes */
67 /* long double size and alignment, in bytes */
68 #define LDOUBLE_SIZE 12
69 #define LDOUBLE_ALIGN 4
71 /* relocation type for 32 bit data relocation */
72 #define R_DATA_32 R_386_32
74 /* function call context */
75 typedef struct GFuncContext
{
77 int func_call
; /* func call type (FUNC_STDCALL or FUNC_CDECL) */
80 /******************************************************/
82 static int *func_sub_sp_ptr
;
83 static unsigned char *func_bound_ptr
;
84 static int func_ret_sub
;
107 /* output a symbol and patch all calls to it */
108 void gsym_addr(int t
, int a
)
112 n
= *(int *)t
; /* next value */
113 *(int *)t
= a
- t
- 4;
123 /* psym is used to put an instruction with a data field which is a
124 reference to a symbol. It is in fact the same as oad ! */
127 /* instruction + 4 bytes data. Return the address of the data */
128 int oad(int c
, int s
)
137 /* output constant with relocation if 'r & VT_SYM' is true */
138 void gen_addr32(int r
, int c
)
143 greloc(cur_text_section
,
144 (Sym
*)c
, ind
- (int)cur_text_section
->data
, R_386_32
);
149 /* generate a modrm reference. 'op_reg' contains the addtionnal 3
151 void gen_modrm(int op_reg
, int r
, int c
)
153 op_reg
= op_reg
<< 3;
154 if ((r
& VT_VALMASK
) == VT_CONST
) {
155 /* constant memory reference */
158 } else if ((r
& VT_VALMASK
) == VT_LOCAL
) {
159 /* currently, we use only ebp as base */
161 /* short reference */
165 oad(0x85 | op_reg
, c
);
168 g(0x00 | op_reg
| (r
& VT_VALMASK
));
173 /* load 'r' from value 'sv' */
174 void load(int r
, SValue
*sv
)
176 int v
, t
, ft
, fc
, fr
;
185 if (v
== VT_LLOCAL
) {
187 v1
.r
= VT_LOCAL
| VT_LVAL
;
192 if ((ft
& VT_BTYPE
) == VT_FLOAT
) {
195 } else if ((ft
& VT_BTYPE
) == VT_DOUBLE
) {
198 } else if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
201 } else if ((ft
& VT_TYPE
) == VT_BYTE
) {
202 o(0xbe0f); /* movsbl */
203 } else if ((ft
& VT_TYPE
) == (VT_BYTE
| VT_UNSIGNED
)) {
204 o(0xb60f); /* movzbl */
205 } else if ((ft
& VT_TYPE
) == VT_SHORT
) {
206 o(0xbf0f); /* movswl */
207 } else if ((ft
& VT_TYPE
) == (VT_SHORT
| VT_UNSIGNED
)) {
208 o(0xb70f); /* movzwl */
212 gen_modrm(r
, fr
, fc
);
215 o(0xb8 + r
); /* mov $xx, r */
217 } else if (v
== VT_LOCAL
) {
218 o(0x8d); /* lea xxx(%ebp), r */
219 gen_modrm(r
, VT_LOCAL
, fc
);
220 } else if (v
== VT_CMP
) {
221 oad(0xb8 + r
, 0); /* mov $0, r */
222 o(0x0f); /* setxx %br */
225 } else if (v
== VT_JMP
|| v
== VT_JMPI
) {
227 oad(0xb8 + r
, t
); /* mov $1, r */
228 oad(0xe9, 5); /* jmp after */
230 oad(0xb8 + r
, t
^ 1); /* mov $0, r */
233 o(0xc0 + r
+ v
* 8); /* mov v, r */
238 /* store register 'r' in lvalue 'v' */
239 void store(int r
, SValue
*v
)
245 fr
= v
->r
& VT_VALMASK
;
247 /* XXX: incorrect if float reg to reg */
248 if (bt
== VT_FLOAT
) {
251 } else if (bt
== VT_DOUBLE
) {
254 } else if (bt
== VT_LDOUBLE
) {
255 o(0xc0d9); /* fld %st(0) */
266 if (fr
== VT_CONST
||
269 gen_modrm(r
, v
->r
, fc
);
270 } else if (fr
!= r
) {
271 o(0xc0 + fr
+ r
* 8); /* mov r, fr */
275 /* start function call and return function call context */
276 void gfunc_start(GFuncContext
*c
, int func_call
)
279 c
->func_call
= func_call
;
282 /* push function parameter which is in (vtop->t, vtop->c). Stack entry
284 void gfunc_param(GFuncContext
*c
)
288 if ((vtop
->t
& VT_BTYPE
) == VT_STRUCT
) {
289 size
= type_size(vtop
->t
, &align
);
290 /* align to stack align size */
291 size
= (size
+ 3) & ~3;
292 /* allocate the necessary size on stack */
293 oad(0xec81, size
); /* sub $xxx, %esp */
294 /* generate structure store */
296 o(0x89); /* mov %esp, r */
298 vset(vtop
->t
, r
| VT_LVAL
, 0);
301 c
->args_size
+= size
;
302 } else if (is_float(vtop
->t
)) {
303 gv(RC_FLOAT
); /* only one float register */
304 if ((vtop
->t
& VT_BTYPE
) == VT_FLOAT
)
306 else if ((vtop
->t
& VT_BTYPE
) == VT_DOUBLE
)
310 oad(0xec81, size
); /* sub $xxx, %esp */
314 o(0x5cd9 + size
- 4); /* fstp[s|l] 0(%esp) */
317 c
->args_size
+= size
;
319 /* simple type (currently always same size) */
320 /* XXX: implicit cast ? */
322 if ((vtop
->t
& VT_BTYPE
) == VT_LLONG
) {
324 o(0x50 + vtop
->r2
); /* push r */
328 o(0x50 + r
); /* push r */
329 c
->args_size
+= size
;
334 static void gadd_sp(int val
)
336 if (val
== (char)val
) {
340 oad(0xc481, val
); /* add $xxx, %esp */
344 /* generate function call with address in (vtop->t, vtop->c) and free function
345 context. Stack entry is popped */
346 void gfunc_call(GFuncContext
*c
)
349 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
351 if (vtop
->r
& VT_SYM
) {
352 /* relocation case */
353 greloc(cur_text_section
, vtop
->c
.sym
,
354 ind
+ 1 - (int)cur_text_section
->data
, R_386_PC32
);
357 oad(0xe8, vtop
->c
.ul
- ind
- 5);
360 /* otherwise, indirect call */
362 o(0xff); /* call *r */
365 if (c
->args_size
&& c
->func_call
== FUNC_CDECL
)
366 gadd_sp(c
->args_size
);
370 /* generate function prolog of type 't' */
371 void gfunc_prolog(int t
)
373 int addr
, align
, size
, u
, func_call
;
376 sym
= sym_find((unsigned)t
>> VT_STRUCT_SHIFT
);
379 /* if the function returns a structure, then add an
380 implicit pointer parameter */
382 if ((func_vt
& VT_BTYPE
) == VT_STRUCT
) {
386 /* define parameters */
387 while ((sym
= sym
->next
) != NULL
) {
389 sym_push(sym
->v
& ~SYM_FIELD
, u
,
390 VT_LOCAL
| VT_LVAL
, addr
);
391 size
= type_size(u
, &align
);
392 size
= (size
+ 3) & ~3;
393 #ifdef FUNC_STRUCT_PARAM_AS_PTR
394 /* structs are passed as pointer */
395 if ((u
& VT_BTYPE
) == VT_STRUCT
) {
402 /* pascal type call ? */
403 if (func_call
== FUNC_STDCALL
)
404 func_ret_sub
= addr
- 8;
405 o(0xe58955); /* push %ebp, mov %esp, %ebp */
406 func_sub_sp_ptr
= (int *)oad(0xec81, 0); /* sub $xxx, %esp */
407 /* leave some room for bound checking code */
408 if (do_bounds_check
) {
409 oad(0xb8, 0); /* lbound section pointer */
410 oad(0xb8, 0); /* call to function */
411 func_bound_ptr
= lbounds_section
->data_ptr
;
415 /* generate function epilog */
416 void gfunc_epilog(void)
418 #ifdef CONFIG_TCC_BCHECK
419 if (do_bounds_check
&& func_bound_ptr
!= lbounds_section
->data_ptr
) {
423 /* add end of table info */
424 bounds_ptr
= (int *)lbounds_section
->data_ptr
;
426 lbounds_section
->data_ptr
= (unsigned char *)bounds_ptr
;
427 /* generate bound local allocation */
429 ind
= (int)func_sub_sp_ptr
+ 4;
430 sym_data
= get_sym_ref(char_pointer_type
, lbounds_section
,
431 func_bound_ptr
- lbounds_section
->data
,
432 lbounds_section
->data_ptr
- func_bound_ptr
);
433 greloc(cur_text_section
, sym_data
,
434 ind
+ 1 - (int)cur_text_section
->data
, R_386_32
);
435 oad(0xb8, 0); /* mov %eax, xxx */
436 sym
= external_sym(TOK___bound_local_new
, func_old_type
, 0);
437 greloc(cur_text_section
, sym
,
438 ind
+ 1 - (int)cur_text_section
->data
, R_386_PC32
);
441 /* generate bound check local freeing */
442 o(0x5250); /* save returned value, if any */
443 greloc(cur_text_section
, sym_data
,
444 ind
+ 1 - (int)cur_text_section
->data
, R_386_32
);
445 oad(0xb8, 0); /* mov %eax, xxx */
446 sym
= external_sym(TOK___bound_local_delete
, func_old_type
, 0);
447 greloc(cur_text_section
, sym
,
448 ind
+ 1 - (int)cur_text_section
->data
, R_386_PC32
);
450 o(0x585a); /* restore returned value, if any */
454 if (func_ret_sub
== 0) {
459 g(func_ret_sub
>> 8);
461 /* align local size to word & save local variables */
462 *func_sub_sp_ptr
= (-loc
+ 3) & -4;
465 /* generate a jump to a label */
468 return psym(0xe9, t
);
471 /* generate a jump to a fixed address */
472 void gjmp_addr(int a
)
474 oad(0xe9, a
- ind
- 5);
477 /* generate a test. set 'inv' to invert test. Stack entry is popped */
478 int gtst(int inv
, int t
)
481 v
= vtop
->r
& VT_VALMASK
;
483 /* fast case : can jump directly since flags are set */
485 t
= psym((vtop
->c
.i
- 16) ^ inv
, t
);
486 } else if (v
== VT_JMP
|| v
== VT_JMPI
) {
487 /* && or || optimization */
488 if ((v
& 1) == inv
) {
489 /* insert vtop->c jump list in t */
500 if (is_float(vtop
->t
)) {
504 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
505 /* constant jmp optimization */
506 if ((vtop
->c
.i
!= 0) != inv
)
513 t
= psym(0x85 ^ inv
, t
);
520 /* generate an integer binary operation */
527 case TOK_ADDC1
: /* add with carry generation */
530 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
537 /* XXX: generate inc and dec for smaller code ? */
539 o(0xc0 | (opc
<< 3) | r
);
543 oad(0xc0 | (opc
<< 3) | r
, c
);
549 o((opc
<< 3) | 0x01);
550 o(0xc0 + r
+ fr
* 8);
553 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
555 vset(VT_INT
, VT_CMP
, op
);
559 case TOK_SUBC1
: /* sub with carry generation */
562 case TOK_ADDC2
: /* add with carry use */
565 case TOK_SUBC2
: /* sub with carry use */
582 o(0xaf0f); /* imul fr, r */
583 o(0xc0 + fr
+ r
* 8);
594 opc
= 0xc0 | (opc
<< 3);
595 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
600 c
= vtop
->c
.i
& 0x1f;
601 o(0xc1); /* shl/shr/sar $xxx, r */
605 /* we generate the shift in ecx */
608 o(0xd3); /* shl/shr/sar %cl, r */
619 /* first operand must be in eax */
620 /* XXX: need better constraint for second operand */
626 if (op
== TOK_UMULL
) {
627 o(0xf7); /* mul fr */
632 if (op
== TOK_UDIV
|| op
== TOK_UMOD
) {
633 o(0xf7d231); /* xor %edx, %edx, div fr, %eax */
636 o(0xf799); /* cltd, idiv fr, %eax */
639 if (op
== '%' || op
== TOK_UMOD
)
652 /* generate a floating point operation 'v = t1 op t2' instruction. The
653 two operands are guaranted to have the same floating point type */
654 /* XXX: need to use ST1 too */
657 int a
, ft
, fc
, swapped
, r
;
659 /* convert constants to memory references */
660 if ((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
665 if ((vtop
[0].r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
)
668 /* must put at least one value in the floating point register */
669 if ((vtop
[-1].r
& VT_LVAL
) &&
670 (vtop
[0].r
& VT_LVAL
)) {
676 /* swap the stack if needed so that t1 is the register and t2 is
677 the memory reference */
678 if (vtop
[-1].r
& VT_LVAL
) {
682 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
683 /* load on stack second operand */
685 save_reg(REG_EAX
); /* eax is used by FP comparison code */
686 if (op
== TOK_GE
|| op
== TOK_GT
)
688 else if (op
== TOK_EQ
|| op
== TOK_NE
)
691 o(0xc9d9); /* fxch %st(1) */
692 o(0xe9da); /* fucompp */
693 o(0xe0df); /* fnstsw %ax */
695 o(0x45e480); /* and $0x45, %ah */
696 o(0x40fC80); /* cmp $0x40, %ah */
697 } else if (op
== TOK_NE
) {
698 o(0x45e480); /* and $0x45, %ah */
699 o(0x40f480); /* xor $0x40, %ah */
701 } else if (op
== TOK_GE
|| op
== TOK_LE
) {
702 o(0x05c4f6); /* test $0x05, %ah */
705 o(0x45c4f6); /* test $0x45, %ah */
712 /* no memory reference possible for long double operations */
713 if ((vtop
->t
& VT_BTYPE
) == VT_LDOUBLE
) {
739 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
740 o(0xde); /* fxxxp %st, %st(1) */
743 /* if saved lvalue, then we must reload it */
745 if ((r
& VT_VALMASK
) == VT_LLOCAL
) {
749 v1
.r
= VT_LOCAL
| VT_LVAL
;
755 if ((ft
& VT_BTYPE
) == VT_DOUBLE
)
765 /* convert integers to fp 't' type. Must handle 'int', 'unsigned int'
766 and 'long long' cases. */
767 void gen_cvt_itof(int t
)
771 if ((vtop
->t
& VT_BTYPE
) == VT_LLONG
) {
772 /* signed long long to float/double/long double (unsigned case
773 is handled generically) */
774 o(0x50 + vtop
->r2
); /* push r2 */
775 o(0x50 + (vtop
->r
& VT_VALMASK
)); /* push r */
776 o(0x242cdf); /* fildll (%esp) */
777 o(0x08c483); /* add $8, %esp */
778 } else if ((vtop
->t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
779 (VT_INT
| VT_UNSIGNED
)) {
780 /* unsigned int to float/double/long double */
781 o(0x6a); /* push $0 */
783 o(0x50 + (vtop
->r
& VT_VALMASK
)); /* push r */
784 o(0x242cdf); /* fildll (%esp) */
785 o(0x08c483); /* add $8, %esp */
787 /* int to float/double/long double */
788 o(0x50 + (vtop
->r
& VT_VALMASK
)); /* push r */
789 o(0x2404db); /* fildl (%esp) */
790 o(0x04c483); /* add $4, %esp */
795 /* convert fp to int 't' type */
796 /* XXX: handle long long case */
797 void gen_cvt_ftoi(int t
)
808 o(0x2dd9); /* ldcw xxx */
809 sym
= external_sym(TOK___tcc_int_fpu_control
,
810 VT_SHORT
| VT_UNSIGNED
, VT_LVAL
);
811 greloc(cur_text_section
, sym
,
812 ind
- (int)cur_text_section
->data
, R_386_32
);
815 oad(0xec81, size
); /* sub $xxx, %esp */
817 o(0x1cdb); /* fistpl */
819 o(0x3cdf); /* fistpll */
821 o(0x2dd9); /* ldcw xxx */
822 sym
= external_sym(TOK___tcc_fpu_control
,
823 VT_SHORT
| VT_UNSIGNED
, VT_LVAL
);
824 greloc(cur_text_section
, sym
,
825 ind
- (int)cur_text_section
->data
, R_386_32
);
829 o(0x58 + r
); /* pop r */
832 vtop
->r
= r
; /* mark reg as used */
833 r2
= get_reg(RC_INT
);
834 o(0x58 + r2
); /* pop r2 */
837 o(0x04c483); /* add $4, %esp */
843 /* convert from one floating point type to another */
844 void gen_cvt_ftof(int t
)
846 /* all we have to do on i386 is to put the float in a register */
850 /* bound check support functions */
851 #ifdef CONFIG_TCC_BCHECK
853 /* generate a bounded pointer addition */
854 void gen_bounded_ptr_add(void)
858 /* prepare fast i386 function call (args in eax and edx) */
860 /* save all temporary registers */
863 /* do a fast function call */
864 sym
= external_sym(TOK___bound_ptr_add
, func_old_type
, 0);
865 greloc(cur_text_section
, sym
,
866 ind
+ 1 - (int)cur_text_section
->data
, R_386_PC32
);
868 /* returned pointer is in eax */
870 vtop
->r
= REG_EAX
| VT_BOUNDED
;
871 /* address of bounding function call point */
872 vtop
->c
.ptr
= (cur_text_section
->reloc
->data_ptr
- sizeof(Elf32_Rel
));
875 /* patch pointer addition in vtop so that pointer dereferencing is
877 void gen_bounded_ptr_deref(void)
885 /* XXX: put that code in generic part of tcc */
886 if (!is_float(vtop
->t
)) {
887 if (vtop
->r
& VT_LVAL_BYTE
)
889 else if (vtop
->r
& VT_LVAL_SHORT
)
893 size
= type_size(vtop
->t
, &align
);
895 case 1: func
= TOK___bound_ptr_indir1
; break;
896 case 2: func
= TOK___bound_ptr_indir2
; break;
897 case 4: func
= TOK___bound_ptr_indir4
; break;
898 case 8: func
= TOK___bound_ptr_indir8
; break;
899 case 12: func
= TOK___bound_ptr_indir12
; break;
900 case 16: func
= TOK___bound_ptr_indir16
; break;
902 error("unhandled size when derefencing bounded pointer");
907 /* patch relocation */
908 /* XXX: find a better solution ? */
910 sym
= external_sym(func
, func_old_type
, 0);
912 put_extern_sym(sym
, NULL
, 0);
913 rel
->r_info
= ELF32_R_INFO(sym
->c
, ELF32_R_TYPE(rel
->r_info
));
917 /* end of X86 code generator */
918 /*************************************************************/