1 /* architecture-dependent code generation for x86 */
5 /* x86-64 registers, without r8-r15 */
36 #define MIN(a, b) ((a) < (b) ? (a) : (b))
37 #define ALIGN(x, a) (((x) + (a) - 1) & ~((a) - 1))
39 int tmpregs
[] = {0, 1, 2, 6, 7, 3};
42 #define OP2(o2, o1) (0x010000 | ((o2) << 8) | (o1))
43 #define O2(op) (((op) >> 8) & 0xff)
44 #define O1(op) ((op) & 0xff)
45 #define MODRM(m, r1, r2) ((m) << 6 | (r1) << 3 | (r2))
47 static struct mem cs
; /* generated code */
49 /* code generation functions */
50 static void os(void *s
, int n
)
55 static char *ointbuf(long n
, int l
)
59 for (i
= 0; i
< l
; i
++) {
66 static void oi(long n
, int l
)
68 mem_put(&cs
, ointbuf(n
, l
), l
);
71 static void oi_at(long pos
, long n
, int l
)
73 mem_cpy(&cs
, pos
, ointbuf(n
, l
), l
);
76 static long opos(void)
81 static void op_x(int op
, int r1
, int r2
, int bt
)
88 oi(sz
== 1 ? O1(op
) & ~0x1 : O1(op
), 1);
93 /* op_*(): r=reg, m=mem, i=imm, s=sym */
94 static void op_rm(int op
, int src
, int base
, int off
, int bt
)
96 int dis
= off
== (char) off
? 1 : 4;
97 int mod
= dis
== 4 ? 2 : 1;
98 if (!off
&& (base
& 7) != R_RBP
)
100 op_x(op
, src
, base
, bt
);
101 oi(MODRM(mod
, src
& 0x07, base
& 0x07), 1);
102 if ((base
& 7) == R_RSP
)
108 static void op_rr(int op
, int src
, int dst
, int bt
)
110 op_x(op
, src
, dst
, bt
);
111 oi(MODRM(3, src
& 0x07, dst
& 0x07), 1);
114 #define movrx_bt(bt) (LONGSZ)
116 static int movrx_op(int bt
, int mov
)
120 return OP2(0x0f, bt
& T_MSIGN
? 0xbf : 0xb7);
122 return OP2(0x0f, bt
& T_MSIGN
? 0xbe : 0xb6);
126 static void mov_r2r(int rd
, int r1
, unsigned bt
)
128 if (rd
!= r1
|| T_SZ(bt
) != LONGSZ
)
129 op_rr(movrx_op(bt
, I_MOVR
), rd
, r1
, movrx_bt(bt
));
132 static void i_mov(int rd
, int rn
)
134 op_rr(movrx_op(LONGSZ
, I_MOVR
), rd
, rn
, movrx_bt(LONGSZ
));
137 static void i_add(int op
, int rd
, int r1
, int r2
)
139 /* opcode for O_ADD, O_SUB, O_AND, O_OR, O_XOR */
140 static int rx
[] = {0003, 0053, 0043, 0013, 0063};
141 op_rr(rx
[op
& 0x0f], rd
, r2
, LONGSZ
);
144 static void i_add_imm(int op
, int rd
, int rn
, long n
)
146 /* opcode for O_ADD, O_SUB, O_AND, O_OR, O_XOR */
147 static int rx
[] = {0xc0, 0xe8, 0xe0, 0xc8, 0xf0};
148 unsigned char s
[4] = {0x83, rx
[op
& 0x0f] | rd
, n
& 0xff};
152 static void i_num(int rd
, long n
)
155 op_rr(I_XOR
, rd
, rd
, 4);
158 op_x(I_MOVIR
+ (rd
& 7), 0, rd
, LONGSZ
);
163 static void i_mul(int rd
, int r1
, int r2
)
167 op_rr(I_MUL
, 4, r2
, LONGSZ
);
170 static void i_div(int op
, int rd
, int r1
, int r2
)
175 op_x(I_CQO
, R_RAX
, R_RDX
, LONGSZ
);
179 op_rr(I_MUL
, bt
& T_MSIGN
? 7 : 6, r2
, LONGSZ
);
182 static void i_tst(int rn
, int rm
)
184 op_rr(I_TST
, rn
, rm
, LONGSZ
);
187 static void i_cmp(int rn
, int rm
)
189 op_rr(I_CMP
, rn
, rm
, LONGSZ
);
192 static void i_cmp_imm(int rn
, long n
)
194 unsigned char s
[4] = {0x83, 0xf8 | rn
, n
& 0xff};
198 static void i_shl(int op
, int rd
, int r1
, int rs
)
202 if ((op
& 0x0f) == 1)
203 sm
= bt
& T_MSIGN
? 7 : 5;
204 op_rr(I_SHX
, sm
, rd
, LONGSZ
);
207 static void i_shl_imm(int op
, int rd
, int rn
, long n
)
210 int sm
= (op
& 0x1) ? (bt
& T_MSIGN
? 0xf8 : 0xe8) : 0xe0;
211 char s
[4] = {0xc1, sm
| rn
, n
& 0xff};
215 static void i_neg(int rd
)
217 op_rr(I_NOT
, 3, rd
, LONGSZ
);
220 static void i_not(int rd
)
222 op_rr(I_NOT
, 2, rd
, LONGSZ
);
225 static int i_cond(long op
)
227 /* lt, ge, eq, ne, le, gt */
228 static int ucond
[] = {0x92, 0x93, 0x94, 0x95, 0x96, 0x97};
229 static int scond
[] = {0x9c, 0x9d, 0x94, 0x95, 0x9e, 0x9f};
231 return bt
& T_MSIGN
? scond
[op
& 0x0f] : ucond
[op
& 0x0f];
234 static void i_set(long op
, int rd
)
236 char set
[] = "\x0f\x00\xc0";
238 os(set
, 3); /* setl al */
239 os("\x0f\xb6\xc0", 3); /* movzx rax, al */
242 static void i_lnot(int rd
)
244 char cmp
[] = "\x83\xf8\x00";
246 os(cmp
, 3); /* cmp rax, 0 */
250 static void jx(int x
, int nbytes
)
254 oi(0x70 | (x
& 0x0f), 1); /* jx $addr */
257 os(op
, 2); /* jx $addr */
261 /* generate cmp or tst before a conditional jump */
262 static void i_jcmp(long op
, long rn
, long rm
)
274 /* generate a jump instruction and return the of its displacement */
275 static long i_jmp(long op
, int nb
)
278 jx(O_C(op
) == O_JZ
? 0x84 : 0x85, nb
);
280 jx(i_cond(op
) & ~0x10, nb
);
282 os(nb
== 1 ? "\xeb" : "\xe9", 1);
287 /* the length of a jump instruction opcode */
288 static int i_jlen(long op
, int nb
)
290 if (op
& (O_JZ
| O_JCC
))
296 static void i_zx(int rd
, int r1
, int bits
)
299 i_shl_imm(O_SHL
, rd
, rd
, LONGSZ
* 8 - bits
);
300 i_shl_imm(O_SHR
, rd
, rd
, LONGSZ
* 8 - bits
);
302 mov_r2r(rd
, r1
, bits
>> 3);
307 static void i_sx(int rd
, int r1
, int bits
)
309 mov_r2r(rd
, r1
, T_MSIGN
| (bits
>> 3));
312 static void i_cast(int rd
, int rn
, int bt
)
319 i_sx(rd
, rn
, T_SZ(bt
) * 8);
321 i_zx(rd
, rn
, T_SZ(bt
) * 8);
325 static void i_add_anyimm(int rd
, int rn
, long n
)
327 op_rm(I_LEA
, rd
, rn
, n
, LONGSZ
);
330 static long *rel_sym
; /* relocation symbols */
331 static long *rel_flg
; /* relocation flags */
332 static long *rel_off
; /* relocation offsets */
333 static long rel_n
, rel_sz
; /* relocation count */
335 static long lab_sz
; /* label count */
336 static long *lab_loc
; /* label offsets in cs */
337 static long jmp_n
, jmp_sz
; /* jump count */
338 static long *jmp_off
; /* jump offsets */
339 static long *jmp_dst
; /* jump destinations */
340 static long *jmp_op
; /* jump opcode */
341 static long jmp_ret
; /* the position of the last return jmp */
343 static void lab_add(long id
)
345 while (id
>= lab_sz
) {
347 lab_sz
= MAX(128, lab_sz
* 2);
348 lab_loc
= mextend(lab_loc
, lab_n
, lab_sz
, sizeof(*lab_loc
));
350 lab_loc
[id
] = opos();
353 static void jmp_add(long op
, long off
, long dst
)
355 if (jmp_n
== jmp_sz
) {
356 jmp_sz
= MAX(128, jmp_sz
* 2);
357 jmp_off
= mextend(jmp_off
, jmp_n
, jmp_sz
, sizeof(*jmp_off
));
358 jmp_dst
= mextend(jmp_dst
, jmp_n
, jmp_sz
, sizeof(*jmp_dst
));
359 jmp_op
= mextend(jmp_op
, jmp_n
, jmp_sz
, sizeof(*jmp_op
));
361 jmp_off
[jmp_n
] = off
;
362 jmp_dst
[jmp_n
] = dst
;
367 void i_label(long id
)
372 static void i_rel(long sym
, long flg
, long off
)
374 if (rel_n
== rel_sz
) {
375 rel_sz
= MAX(128, rel_sz
* 2);
376 rel_sym
= mextend(rel_sym
, rel_n
, rel_sz
, sizeof(*rel_sym
));
377 rel_flg
= mextend(rel_flg
, rel_n
, rel_sz
, sizeof(*rel_flg
));
378 rel_off
= mextend(rel_off
, rel_n
, rel_sz
, sizeof(*rel_off
));
380 rel_sym
[rel_n
] = sym
;
381 rel_flg
[rel_n
] = flg
;
382 rel_off
[rel_n
] = off
;
386 static void i_sym(int rd
, int sym
, int off
)
388 op_x(I_MOVIR
+ (rd
& 7), 0, rd
, LONGSZ
);
389 i_rel(sym
, OUT_CS
, opos());
393 static void i_saveregs(long sregs
, long sregs_pos
, int st
)
397 for (i
= 0; i
< N_TMPS
; i
++)
398 if ((1 << tmpregs
[i
]) & sregs
)
399 op_rm(st
? I_MOV
: I_MOVR
, tmpregs
[i
], REG_FP
,
400 sregs_pos
+ nsregs
++ * ULNG
, ULNG
);
403 void i_wrap(int argc
, long sargs
, long spsub
, int initfp
, long sregs
, long sregs_pos
)
407 long diff
; /* prologue length */
409 /* removing the last jmp to the epilogue */
410 if (jmp_ret
+ i_jlen(O_JMP
, 4) + 4 == opos()) {
411 mem_cut(&cs
, jmp_ret
);
414 lab_add(0); /* the return label */
415 body_n
= mem_len(&cs
);
417 /* generating function prologue */
419 os("\x55", 1); /* push rbp */
420 os("\x89\xe5", 2); /* mov rbp, rsp */
424 spsub
= ALIGN(spsub
, 8);
427 i_saveregs(sregs
, sregs_pos
, 1); /* saving registers */
429 mem_put(&cs
, body
, body_n
);
431 /* generating function epilogue */
432 i_saveregs(sregs
, sregs_pos
, 0); /* restoring saved registers */
434 os("\xc9", 1); /* leave */
435 os("\xc3", 1); /* ret */
436 /* adjusting code offsets */
437 for (i
= 0; i
< rel_n
; i
++)
439 for (i
= 0; i
< jmp_n
; i
++)
441 for (i
= 0; i
< lab_sz
; i
++)
445 /* introduce shorter jumps, if possible */
446 static void i_shortjumps(int *nb
)
448 long off
= 0; /* current code offset */
449 long dif
= 0; /* the difference after changing jump instructions */
450 int rel
= 0; /* current relocation */
451 int lab
= 1; /* current label */
452 long c_len
= mem_len(&cs
);
453 char *c
= mem_get(&cs
);
455 for (i
= 0; i
< jmp_n
; i
++)
456 nb
[i
] = abs(lab_loc
[jmp_dst
[i
]] - jmp_off
[i
]) < 0x70 ? 1 : 4;
457 for (i
= 0; i
< jmp_n
; i
++) {
458 long cur
= jmp_off
[i
] - i_jlen(jmp_op
[i
], 4);
459 while (rel
< rel_n
&& rel_off
[rel
] <= cur
)
460 rel_off
[rel
++] += dif
;
461 while (lab
< lab_sz
&& lab_loc
[lab
] <= cur
)
462 lab_loc
[lab
++] += dif
;
463 mem_put(&cs
, c
+ off
, cur
- off
);
464 jmp_off
[i
] = i_jmp(jmp_op
[i
], nb
[i
]);
465 off
= cur
+ i_jlen(jmp_op
[i
], 4) + 4;
466 dif
= mem_len(&cs
) - off
;
469 rel_off
[rel
++] += dif
;
471 lab_loc
[lab
++] += dif
;
473 mem_put(&cs
, c
+ off
, c_len
- off
);
477 void i_code(char **c
, long *c_len
, long **rsym
, long **rflg
, long **roff
, long *rcnt
)
479 int *nb
; /* number of bytes necessary for jump displacements */
481 /* more compact jmp instructions */
482 nb
= malloc(jmp_n
* sizeof(nb
[0]));
483 for (i
= 0; i
< jmp_n
; i
++)
486 for (i
= 0; i
< jmp_n
; i
++) /* filling jmp destinations */
487 oi_at(jmp_off
[i
], lab_loc
[jmp_dst
[i
]] -
488 jmp_off
[i
] - nb
[i
], nb
[i
]);
490 *c_len
= mem_len(&cs
);
512 long i_reg(long op
, long *rd
, long *r1
, long *r2
, long *r3
, long *tmp
)
523 if (oc
& (O_NUM
| O_SYM
))
524 *r1
= oc
& (O_NUM
| O_SYM
) ? LONGSZ
* 8 : R_TMPS
;
526 *r1
= T_SZ(bt
) == 1 ? R_BYTE
: R_TMPS
;
531 *r2
= oc
& O_NUM
? (oc
== O_ADD
? 32 : 8) : R_TMPS
;
547 *rd
= oc
== O_MOD
? (1 << R_RDX
) : (1 << R_RAX
);
549 *r2
= R_TMPS
& ~*rd
& ~*r1
;
551 *r2
&= ~(1 << R_RDX
);
552 *tmp
= (1 << R_RDX
) | (1 << R_RAX
);
558 *r2
= oc
& O_NUM
? 8 : R_TMPS
;
572 *tmp
= (1 << R_RDI
) | (1 << R_RCX
);
579 *tmp
= (1 << R_RDI
) | (1 << R_RSI
) | (1 << R_RCX
);
583 *r1
= (1 << REG_RET
);
587 *rd
= (1 << REG_RET
);
588 *r1
= oc
& O_SYM
? 0 : R_TMPS
;
589 *tmp
= R_TMPS
& ~R_PERM
;
593 *rd
= T_SZ(bt
) == 1 ? R_BYTE
: R_TMPS
;
595 *r2
= oc
& O_NUM
? 0 : R_TMPS
;
599 *r1
= T_SZ(bt
) == 1 ? R_BYTE
: R_TMPS
;
601 *r3
= oc
& O_NUM
? 0 : R_TMPS
;
610 *r2
= oc
& O_NUM
? 8 : R_TMPS
;
618 int i_imm(long lim
, long n
)
620 long max
= (1 << (lim
- 1)) - 1;
621 return n
<= max
&& n
+ 1 >= -max
;
624 long i_ins(long op
, long rd
, long r1
, long r2
, long r3
)
630 if (rd
== r1
&& r2
<= 127 && r2
>= -128)
631 i_add_imm(op
, r1
, r1
, r2
);
633 i_add_anyimm(rd
, r1
, r2
);
635 i_add(op
, r1
, r1
, r2
);
640 i_shl_imm(op
, r1
, r1
, r2
);
642 i_shl(op
, r1
, r1
, r2
);
646 i_mul(R_RAX
, r1
, r2
);
648 i_div(op
, R_RAX
, r1
, r2
);
650 i_div(op
, R_RDX
, r1
, r2
);
661 if (oc
& O_UOP
) { /* uop */
671 op_rr(I_CALL
, 2, r1
, LONGSZ
);
674 if (oc
== (O_CALL
| O_SYM
)) {
675 os("\xe8", 1); /* call $x */
676 i_rel(r1
, OUT_CS
| OUT_RLREL
, opos());
680 if (oc
== (O_MOV
| O_SYM
)) {
684 if (oc
== (O_MOV
| O_NUM
)) {
689 os("\xfc\xf3\xaa", 3); /* cld; rep stosb */
693 os("\xfc\xf3\xa4", 3); /* cld; rep movs */
698 jmp_add(O_JMP
, i_jmp(op
, 4), 0);
701 if (oc
== (O_LD
| O_NUM
)) {
702 op_rm(movrx_op(bt
, I_MOVR
), rd
, r1
, r2
, movrx_bt(bt
));
705 if (oc
== (O_ST
| O_NUM
)) {
706 op_rm(I_MOV
, r1
, r2
, r3
, bt
);
715 jmp_add(op
, i_jmp(op
, 4), r3
+ 1);