tcg-i386: Tidy move operations.
[qemu/kevin.git] / tcg / i386 / tcg-target.c
blob4b881383902df2f73b13918cff0d3eaf6c224fff
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
25 #ifndef NDEBUG
26 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
27 "%eax",
28 "%ecx",
29 "%edx",
30 "%ebx",
31 "%esp",
32 "%ebp",
33 "%esi",
34 "%edi",
36 #endif
38 static const int tcg_target_reg_alloc_order[] = {
39 TCG_REG_EBX,
40 TCG_REG_ESI,
41 TCG_REG_EDI,
42 TCG_REG_EBP,
43 TCG_REG_ECX,
44 TCG_REG_EDX,
45 TCG_REG_EAX,
48 static const int tcg_target_call_iarg_regs[3] = { TCG_REG_EAX, TCG_REG_EDX, TCG_REG_ECX };
49 static const int tcg_target_call_oarg_regs[2] = { TCG_REG_EAX, TCG_REG_EDX };
51 static uint8_t *tb_ret_addr;
53 static void patch_reloc(uint8_t *code_ptr, int type,
54 tcg_target_long value, tcg_target_long addend)
56 value += addend;
57 switch(type) {
58 case R_386_32:
59 *(uint32_t *)code_ptr = value;
60 break;
61 case R_386_PC32:
62 *(uint32_t *)code_ptr = value - (long)code_ptr;
63 break;
64 case R_386_PC8:
65 value -= (long)code_ptr;
66 if (value != (int8_t)value) {
67 tcg_abort();
69 *(uint8_t *)code_ptr = value;
70 break;
71 default:
72 tcg_abort();
76 /* maximum number of register used for input function arguments */
77 static inline int tcg_target_get_call_iarg_regs_count(int flags)
79 flags &= TCG_CALL_TYPE_MASK;
80 switch(flags) {
81 case TCG_CALL_TYPE_STD:
82 return 0;
83 case TCG_CALL_TYPE_REGPARM_1:
84 case TCG_CALL_TYPE_REGPARM_2:
85 case TCG_CALL_TYPE_REGPARM:
86 return flags - TCG_CALL_TYPE_REGPARM_1 + 1;
87 default:
88 tcg_abort();
92 /* parse target specific constraints */
93 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
95 const char *ct_str;
97 ct_str = *pct_str;
98 switch(ct_str[0]) {
99 case 'a':
100 ct->ct |= TCG_CT_REG;
101 tcg_regset_set_reg(ct->u.regs, TCG_REG_EAX);
102 break;
103 case 'b':
104 ct->ct |= TCG_CT_REG;
105 tcg_regset_set_reg(ct->u.regs, TCG_REG_EBX);
106 break;
107 case 'c':
108 ct->ct |= TCG_CT_REG;
109 tcg_regset_set_reg(ct->u.regs, TCG_REG_ECX);
110 break;
111 case 'd':
112 ct->ct |= TCG_CT_REG;
113 tcg_regset_set_reg(ct->u.regs, TCG_REG_EDX);
114 break;
115 case 'S':
116 ct->ct |= TCG_CT_REG;
117 tcg_regset_set_reg(ct->u.regs, TCG_REG_ESI);
118 break;
119 case 'D':
120 ct->ct |= TCG_CT_REG;
121 tcg_regset_set_reg(ct->u.regs, TCG_REG_EDI);
122 break;
123 case 'q':
124 ct->ct |= TCG_CT_REG;
125 tcg_regset_set32(ct->u.regs, 0, 0xf);
126 break;
127 case 'r':
128 ct->ct |= TCG_CT_REG;
129 tcg_regset_set32(ct->u.regs, 0, 0xff);
130 break;
132 /* qemu_ld/st address constraint */
133 case 'L':
134 ct->ct |= TCG_CT_REG;
135 tcg_regset_set32(ct->u.regs, 0, 0xff);
136 tcg_regset_reset_reg(ct->u.regs, TCG_REG_EAX);
137 tcg_regset_reset_reg(ct->u.regs, TCG_REG_EDX);
138 break;
139 default:
140 return -1;
142 ct_str++;
143 *pct_str = ct_str;
144 return 0;
147 /* test if a constant matches the constraint */
148 static inline int tcg_target_const_match(tcg_target_long val,
149 const TCGArgConstraint *arg_ct)
151 int ct;
152 ct = arg_ct->ct;
153 if (ct & TCG_CT_CONST)
154 return 1;
155 else
156 return 0;
159 #define P_EXT 0x100 /* 0x0f opcode prefix */
161 #define OPC_BSWAP (0xc8 | P_EXT)
162 #define OPC_MOVB_EvGv (0x88) /* stores, more or less */
163 #define OPC_MOVL_EvGv (0x89) /* stores, more or less */
164 #define OPC_MOVL_GvEv (0x8b) /* loads, more or less */
165 #define OPC_SHIFT_1 (0xd1)
166 #define OPC_SHIFT_Ib (0xc1)
167 #define OPC_SHIFT_cl (0xd3)
169 #define ARITH_ADD 0
170 #define ARITH_OR 1
171 #define ARITH_ADC 2
172 #define ARITH_SBB 3
173 #define ARITH_AND 4
174 #define ARITH_SUB 5
175 #define ARITH_XOR 6
176 #define ARITH_CMP 7
178 #define SHIFT_ROL 0
179 #define SHIFT_ROR 1
180 #define SHIFT_SHL 4
181 #define SHIFT_SHR 5
182 #define SHIFT_SAR 7
184 #define JCC_JMP (-1)
185 #define JCC_JO 0x0
186 #define JCC_JNO 0x1
187 #define JCC_JB 0x2
188 #define JCC_JAE 0x3
189 #define JCC_JE 0x4
190 #define JCC_JNE 0x5
191 #define JCC_JBE 0x6
192 #define JCC_JA 0x7
193 #define JCC_JS 0x8
194 #define JCC_JNS 0x9
195 #define JCC_JP 0xa
196 #define JCC_JNP 0xb
197 #define JCC_JL 0xc
198 #define JCC_JGE 0xd
199 #define JCC_JLE 0xe
200 #define JCC_JG 0xf
202 #define P_EXT 0x100 /* 0x0f opcode prefix */
204 static const uint8_t tcg_cond_to_jcc[10] = {
205 [TCG_COND_EQ] = JCC_JE,
206 [TCG_COND_NE] = JCC_JNE,
207 [TCG_COND_LT] = JCC_JL,
208 [TCG_COND_GE] = JCC_JGE,
209 [TCG_COND_LE] = JCC_JLE,
210 [TCG_COND_GT] = JCC_JG,
211 [TCG_COND_LTU] = JCC_JB,
212 [TCG_COND_GEU] = JCC_JAE,
213 [TCG_COND_LEU] = JCC_JBE,
214 [TCG_COND_GTU] = JCC_JA,
217 static inline void tcg_out_opc(TCGContext *s, int opc)
219 if (opc & P_EXT)
220 tcg_out8(s, 0x0f);
221 tcg_out8(s, opc);
224 static inline void tcg_out_modrm(TCGContext *s, int opc, int r, int rm)
226 tcg_out_opc(s, opc);
227 tcg_out8(s, 0xc0 | (r << 3) | rm);
230 /* rm == -1 means no register index */
231 static inline void tcg_out_modrm_offset(TCGContext *s, int opc, int r, int rm,
232 int32_t offset)
234 tcg_out_opc(s, opc);
235 if (rm == -1) {
236 tcg_out8(s, 0x05 | (r << 3));
237 tcg_out32(s, offset);
238 } else if (offset == 0 && rm != TCG_REG_EBP) {
239 if (rm == TCG_REG_ESP) {
240 tcg_out8(s, 0x04 | (r << 3));
241 tcg_out8(s, 0x24);
242 } else {
243 tcg_out8(s, 0x00 | (r << 3) | rm);
245 } else if ((int8_t)offset == offset) {
246 if (rm == TCG_REG_ESP) {
247 tcg_out8(s, 0x44 | (r << 3));
248 tcg_out8(s, 0x24);
249 } else {
250 tcg_out8(s, 0x40 | (r << 3) | rm);
252 tcg_out8(s, offset);
253 } else {
254 if (rm == TCG_REG_ESP) {
255 tcg_out8(s, 0x84 | (r << 3));
256 tcg_out8(s, 0x24);
257 } else {
258 tcg_out8(s, 0x80 | (r << 3) | rm);
260 tcg_out32(s, offset);
264 static inline void tcg_out_mov(TCGContext *s, int ret, int arg)
266 if (arg != ret) {
267 tcg_out_modrm(s, OPC_MOVL_GvEv, ret, arg);
271 static inline void tcg_out_movi(TCGContext *s, TCGType type,
272 int ret, int32_t arg)
274 if (arg == 0) {
275 /* xor r0,r0 */
276 tcg_out_modrm(s, 0x01 | (ARITH_XOR << 3), ret, ret);
277 } else {
278 tcg_out8(s, 0xb8 + ret);
279 tcg_out32(s, arg);
283 static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret,
284 int arg1, tcg_target_long arg2)
286 tcg_out_modrm_offset(s, OPC_MOVL_GvEv, ret, arg1, arg2);
289 static inline void tcg_out_st(TCGContext *s, TCGType type, int arg,
290 int arg1, tcg_target_long arg2)
292 tcg_out_modrm_offset(s, OPC_MOVL_EvGv, arg, arg1, arg2);
295 static void tcg_out_shifti(TCGContext *s, int subopc, int reg, int count)
297 if (count == 1) {
298 tcg_out_modrm(s, OPC_SHIFT_1, subopc, reg);
299 } else {
300 tcg_out_modrm(s, OPC_SHIFT_Ib, subopc, reg);
301 tcg_out8(s, count);
305 static inline void tcg_out_bswap32(TCGContext *s, int reg)
307 tcg_out_opc(s, OPC_BSWAP + reg);
310 static inline void tcg_out_rolw_8(TCGContext *s, int reg)
312 tcg_out8(s, 0x66);
313 tcg_out_shifti(s, SHIFT_ROL, reg, 8);
316 static inline void tgen_arithi(TCGContext *s, int c, int r0, int32_t val, int cf)
318 if (!cf && ((c == ARITH_ADD && val == 1) || (c == ARITH_SUB && val == -1))) {
319 /* inc */
320 tcg_out_opc(s, 0x40 + r0);
321 } else if (!cf && ((c == ARITH_ADD && val == -1) || (c == ARITH_SUB && val == 1))) {
322 /* dec */
323 tcg_out_opc(s, 0x48 + r0);
324 } else if (val == (int8_t)val) {
325 tcg_out_modrm(s, 0x83, c, r0);
326 tcg_out8(s, val);
327 } else if (c == ARITH_AND && val == 0xffu && r0 < 4) {
328 /* movzbl */
329 tcg_out_modrm(s, 0xb6 | P_EXT, r0, r0);
330 } else if (c == ARITH_AND && val == 0xffffu) {
331 /* movzwl */
332 tcg_out_modrm(s, 0xb7 | P_EXT, r0, r0);
333 } else {
334 tcg_out_modrm(s, 0x81, c, r0);
335 tcg_out32(s, val);
339 static void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
341 if (val != 0)
342 tgen_arithi(s, ARITH_ADD, reg, val, 0);
345 /* Use SMALL != 0 to force a short forward branch. */
346 static void tcg_out_jxx(TCGContext *s, int opc, int label_index, int small)
348 int32_t val, val1;
349 TCGLabel *l = &s->labels[label_index];
351 if (l->has_value) {
352 val = l->u.value - (tcg_target_long)s->code_ptr;
353 val1 = val - 2;
354 if ((int8_t)val1 == val1) {
355 if (opc == -1) {
356 tcg_out8(s, 0xeb);
357 } else {
358 tcg_out8(s, 0x70 + opc);
360 tcg_out8(s, val1);
361 } else {
362 if (small) {
363 tcg_abort();
365 if (opc == -1) {
366 tcg_out8(s, 0xe9);
367 tcg_out32(s, val - 5);
368 } else {
369 tcg_out8(s, 0x0f);
370 tcg_out8(s, 0x80 + opc);
371 tcg_out32(s, val - 6);
374 } else if (small) {
375 if (opc == -1) {
376 tcg_out8(s, 0xeb);
377 } else {
378 tcg_out8(s, 0x70 + opc);
380 tcg_out_reloc(s, s->code_ptr, R_386_PC8, label_index, -1);
381 s->code_ptr += 1;
382 } else {
383 if (opc == -1) {
384 tcg_out8(s, 0xe9);
385 } else {
386 tcg_out8(s, 0x0f);
387 tcg_out8(s, 0x80 + opc);
389 tcg_out_reloc(s, s->code_ptr, R_386_PC32, label_index, -4);
390 s->code_ptr += 4;
394 static void tcg_out_cmp(TCGContext *s, TCGArg arg1, TCGArg arg2,
395 int const_arg2)
397 if (const_arg2) {
398 if (arg2 == 0) {
399 /* test r, r */
400 tcg_out_modrm(s, 0x85, arg1, arg1);
401 } else {
402 tgen_arithi(s, ARITH_CMP, arg1, arg2, 0);
404 } else {
405 tcg_out_modrm(s, 0x01 | (ARITH_CMP << 3), arg2, arg1);
409 static void tcg_out_brcond(TCGContext *s, TCGCond cond,
410 TCGArg arg1, TCGArg arg2, int const_arg2,
411 int label_index, int small)
413 tcg_out_cmp(s, arg1, arg2, const_arg2);
414 tcg_out_jxx(s, tcg_cond_to_jcc[cond], label_index, small);
417 /* XXX: we implement it at the target level to avoid having to
418 handle cross basic blocks temporaries */
419 static void tcg_out_brcond2(TCGContext *s, const TCGArg *args,
420 const int *const_args, int small)
422 int label_next;
423 label_next = gen_new_label();
424 switch(args[4]) {
425 case TCG_COND_EQ:
426 tcg_out_brcond(s, TCG_COND_NE, args[0], args[2], const_args[2],
427 label_next, 1);
428 tcg_out_brcond(s, TCG_COND_EQ, args[1], args[3], const_args[3],
429 args[5], small);
430 break;
431 case TCG_COND_NE:
432 tcg_out_brcond(s, TCG_COND_NE, args[0], args[2], const_args[2],
433 args[5], small);
434 tcg_out_brcond(s, TCG_COND_NE, args[1], args[3], const_args[3],
435 args[5], small);
436 break;
437 case TCG_COND_LT:
438 tcg_out_brcond(s, TCG_COND_LT, args[1], args[3], const_args[3],
439 args[5], small);
440 tcg_out_jxx(s, JCC_JNE, label_next, 1);
441 tcg_out_brcond(s, TCG_COND_LTU, args[0], args[2], const_args[2],
442 args[5], small);
443 break;
444 case TCG_COND_LE:
445 tcg_out_brcond(s, TCG_COND_LT, args[1], args[3], const_args[3],
446 args[5], small);
447 tcg_out_jxx(s, JCC_JNE, label_next, 1);
448 tcg_out_brcond(s, TCG_COND_LEU, args[0], args[2], const_args[2],
449 args[5], small);
450 break;
451 case TCG_COND_GT:
452 tcg_out_brcond(s, TCG_COND_GT, args[1], args[3], const_args[3],
453 args[5], small);
454 tcg_out_jxx(s, JCC_JNE, label_next, 1);
455 tcg_out_brcond(s, TCG_COND_GTU, args[0], args[2], const_args[2],
456 args[5], small);
457 break;
458 case TCG_COND_GE:
459 tcg_out_brcond(s, TCG_COND_GT, args[1], args[3], const_args[3],
460 args[5], small);
461 tcg_out_jxx(s, JCC_JNE, label_next, 1);
462 tcg_out_brcond(s, TCG_COND_GEU, args[0], args[2], const_args[2],
463 args[5], small);
464 break;
465 case TCG_COND_LTU:
466 tcg_out_brcond(s, TCG_COND_LTU, args[1], args[3], const_args[3],
467 args[5], small);
468 tcg_out_jxx(s, JCC_JNE, label_next, 1);
469 tcg_out_brcond(s, TCG_COND_LTU, args[0], args[2], const_args[2],
470 args[5], small);
471 break;
472 case TCG_COND_LEU:
473 tcg_out_brcond(s, TCG_COND_LTU, args[1], args[3], const_args[3],
474 args[5], small);
475 tcg_out_jxx(s, JCC_JNE, label_next, 1);
476 tcg_out_brcond(s, TCG_COND_LEU, args[0], args[2], const_args[2],
477 args[5], small);
478 break;
479 case TCG_COND_GTU:
480 tcg_out_brcond(s, TCG_COND_GTU, args[1], args[3], const_args[3],
481 args[5], small);
482 tcg_out_jxx(s, JCC_JNE, label_next, 1);
483 tcg_out_brcond(s, TCG_COND_GTU, args[0], args[2], const_args[2],
484 args[5], small);
485 break;
486 case TCG_COND_GEU:
487 tcg_out_brcond(s, TCG_COND_GTU, args[1], args[3], const_args[3],
488 args[5], small);
489 tcg_out_jxx(s, JCC_JNE, label_next, 1);
490 tcg_out_brcond(s, TCG_COND_GEU, args[0], args[2], const_args[2],
491 args[5], small);
492 break;
493 default:
494 tcg_abort();
496 tcg_out_label(s, label_next, (tcg_target_long)s->code_ptr);
499 static void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGArg dest,
500 TCGArg arg1, TCGArg arg2, int const_arg2)
502 tcg_out_cmp(s, arg1, arg2, const_arg2);
503 /* setcc */
504 tcg_out_modrm(s, 0x90 | tcg_cond_to_jcc[cond] | P_EXT, 0, dest);
505 tgen_arithi(s, ARITH_AND, dest, 0xff, 0);
508 static void tcg_out_setcond2(TCGContext *s, const TCGArg *args,
509 const int *const_args)
511 TCGArg new_args[6];
512 int label_true, label_over;
514 memcpy(new_args, args+1, 5*sizeof(TCGArg));
516 if (args[0] == args[1] || args[0] == args[2]
517 || (!const_args[3] && args[0] == args[3])
518 || (!const_args[4] && args[0] == args[4])) {
519 /* When the destination overlaps with one of the argument
520 registers, don't do anything tricky. */
521 label_true = gen_new_label();
522 label_over = gen_new_label();
524 new_args[5] = label_true;
525 tcg_out_brcond2(s, new_args, const_args+1, 1);
527 tcg_out_movi(s, TCG_TYPE_I32, args[0], 0);
528 tcg_out_jxx(s, JCC_JMP, label_over, 1);
529 tcg_out_label(s, label_true, (tcg_target_long)s->code_ptr);
531 tcg_out_movi(s, TCG_TYPE_I32, args[0], 1);
532 tcg_out_label(s, label_over, (tcg_target_long)s->code_ptr);
533 } else {
534 /* When the destination does not overlap one of the arguments,
535 clear the destination first, jump if cond false, and emit an
536 increment in the true case. This results in smaller code. */
538 tcg_out_movi(s, TCG_TYPE_I32, args[0], 0);
540 label_over = gen_new_label();
541 new_args[4] = tcg_invert_cond(new_args[4]);
542 new_args[5] = label_over;
543 tcg_out_brcond2(s, new_args, const_args+1, 1);
545 tgen_arithi(s, ARITH_ADD, args[0], 1, 0);
546 tcg_out_label(s, label_over, (tcg_target_long)s->code_ptr);
550 #if defined(CONFIG_SOFTMMU)
552 #include "../../softmmu_defs.h"
554 static void *qemu_ld_helpers[4] = {
555 __ldb_mmu,
556 __ldw_mmu,
557 __ldl_mmu,
558 __ldq_mmu,
561 static void *qemu_st_helpers[4] = {
562 __stb_mmu,
563 __stw_mmu,
564 __stl_mmu,
565 __stq_mmu,
567 #endif
569 #ifndef CONFIG_USER_ONLY
570 #define GUEST_BASE 0
571 #endif
573 /* XXX: qemu_ld and qemu_st could be modified to clobber only EDX and
574 EAX. It will be useful once fixed registers globals are less
575 common. */
576 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
577 int opc)
579 int addr_reg, data_reg, data_reg2, r0, r1, mem_index, s_bits, bswap;
580 #if defined(CONFIG_SOFTMMU)
581 uint8_t *label1_ptr, *label2_ptr;
582 #endif
583 #if TARGET_LONG_BITS == 64
584 #if defined(CONFIG_SOFTMMU)
585 uint8_t *label3_ptr;
586 #endif
587 int addr_reg2;
588 #endif
590 data_reg = *args++;
591 if (opc == 3)
592 data_reg2 = *args++;
593 else
594 data_reg2 = 0;
595 addr_reg = *args++;
596 #if TARGET_LONG_BITS == 64
597 addr_reg2 = *args++;
598 #endif
599 mem_index = *args;
600 s_bits = opc & 3;
602 r0 = TCG_REG_EAX;
603 r1 = TCG_REG_EDX;
605 #if defined(CONFIG_SOFTMMU)
606 tcg_out_mov(s, r1, addr_reg);
608 tcg_out_mov(s, r0, addr_reg);
610 tcg_out_shifti(s, SHIFT_SHR, r1, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
612 tcg_out_modrm(s, 0x81, 4, r0); /* andl $x, r0 */
613 tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
615 tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
616 tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
618 tcg_out_opc(s, 0x8d); /* lea offset(r1, %ebp), r1 */
619 tcg_out8(s, 0x80 | (r1 << 3) | 0x04);
620 tcg_out8(s, (5 << 3) | r1);
621 tcg_out32(s, offsetof(CPUState, tlb_table[mem_index][0].addr_read));
623 /* cmp 0(r1), r0 */
624 tcg_out_modrm_offset(s, 0x3b, r0, r1, 0);
626 tcg_out_mov(s, r0, addr_reg);
628 #if TARGET_LONG_BITS == 32
629 /* je label1 */
630 tcg_out8(s, 0x70 + JCC_JE);
631 label1_ptr = s->code_ptr;
632 s->code_ptr++;
633 #else
634 /* jne label3 */
635 tcg_out8(s, 0x70 + JCC_JNE);
636 label3_ptr = s->code_ptr;
637 s->code_ptr++;
639 /* cmp 4(r1), addr_reg2 */
640 tcg_out_modrm_offset(s, 0x3b, addr_reg2, r1, 4);
642 /* je label1 */
643 tcg_out8(s, 0x70 + JCC_JE);
644 label1_ptr = s->code_ptr;
645 s->code_ptr++;
647 /* label3: */
648 *label3_ptr = s->code_ptr - label3_ptr - 1;
649 #endif
651 /* XXX: move that code at the end of the TB */
652 #if TARGET_LONG_BITS == 32
653 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_EDX, mem_index);
654 #else
655 tcg_out_mov(s, TCG_REG_EDX, addr_reg2);
656 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_ECX, mem_index);
657 #endif
658 tcg_out8(s, 0xe8);
659 tcg_out32(s, (tcg_target_long)qemu_ld_helpers[s_bits] -
660 (tcg_target_long)s->code_ptr - 4);
662 switch(opc) {
663 case 0 | 4:
664 /* movsbl */
665 tcg_out_modrm(s, 0xbe | P_EXT, data_reg, TCG_REG_EAX);
666 break;
667 case 1 | 4:
668 /* movswl */
669 tcg_out_modrm(s, 0xbf | P_EXT, data_reg, TCG_REG_EAX);
670 break;
671 case 0:
672 /* movzbl */
673 tcg_out_modrm(s, 0xb6 | P_EXT, data_reg, TCG_REG_EAX);
674 break;
675 case 1:
676 /* movzwl */
677 tcg_out_modrm(s, 0xb7 | P_EXT, data_reg, TCG_REG_EAX);
678 break;
679 case 2:
680 default:
681 tcg_out_mov(s, data_reg, TCG_REG_EAX);
682 break;
683 case 3:
684 if (data_reg == TCG_REG_EDX) {
685 tcg_out_opc(s, 0x90 + TCG_REG_EDX); /* xchg %edx, %eax */
686 tcg_out_mov(s, data_reg2, TCG_REG_EAX);
687 } else {
688 tcg_out_mov(s, data_reg, TCG_REG_EAX);
689 tcg_out_mov(s, data_reg2, TCG_REG_EDX);
691 break;
694 /* jmp label2 */
695 tcg_out8(s, 0xeb);
696 label2_ptr = s->code_ptr;
697 s->code_ptr++;
699 /* label1: */
700 *label1_ptr = s->code_ptr - label1_ptr - 1;
702 /* add x(r1), r0 */
703 tcg_out_modrm_offset(s, 0x03, r0, r1, offsetof(CPUTLBEntry, addend) -
704 offsetof(CPUTLBEntry, addr_read));
705 #else
706 r0 = addr_reg;
707 #endif
709 #ifdef TARGET_WORDS_BIGENDIAN
710 bswap = 1;
711 #else
712 bswap = 0;
713 #endif
714 switch(opc) {
715 case 0:
716 /* movzbl */
717 tcg_out_modrm_offset(s, 0xb6 | P_EXT, data_reg, r0, GUEST_BASE);
718 break;
719 case 0 | 4:
720 /* movsbl */
721 tcg_out_modrm_offset(s, 0xbe | P_EXT, data_reg, r0, GUEST_BASE);
722 break;
723 case 1:
724 /* movzwl */
725 tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, GUEST_BASE);
726 if (bswap) {
727 tcg_out_rolw_8(s, data_reg);
729 break;
730 case 1 | 4:
731 /* movswl */
732 tcg_out_modrm_offset(s, 0xbf | P_EXT, data_reg, r0, GUEST_BASE);
733 if (bswap) {
734 tcg_out_rolw_8(s, data_reg);
736 /* movswl data_reg, data_reg */
737 tcg_out_modrm(s, 0xbf | P_EXT, data_reg, data_reg);
739 break;
740 case 2:
741 tcg_out_ld(s, TCG_TYPE_I32, data_reg, r0, GUEST_BASE);
742 if (bswap) {
743 tcg_out_bswap32(s, data_reg);
745 break;
746 case 3:
747 /* XXX: could be nicer */
748 if (r0 == data_reg) {
749 r1 = TCG_REG_EDX;
750 if (r1 == data_reg)
751 r1 = TCG_REG_EAX;
752 tcg_out_mov(s, r1, r0);
753 r0 = r1;
755 if (!bswap) {
756 tcg_out_ld(s, TCG_TYPE_I32, data_reg, r0, GUEST_BASE);
757 tcg_out_ld(s, TCG_TYPE_I32, data_reg2, r0, GUEST_BASE + 4);
758 } else {
759 tcg_out_ld(s, TCG_TYPE_I32, data_reg, r0, GUEST_BASE + 4);
760 tcg_out_bswap32(s, data_reg);
762 tcg_out_ld(s, TCG_TYPE_I32, data_reg2, r0, GUEST_BASE);
763 tcg_out_bswap32(s, data_reg2);
765 break;
766 default:
767 tcg_abort();
770 #if defined(CONFIG_SOFTMMU)
771 /* label2: */
772 *label2_ptr = s->code_ptr - label2_ptr - 1;
773 #endif
777 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
778 int opc)
780 int addr_reg, data_reg, data_reg2, r0, r1, mem_index, s_bits, bswap;
781 #if defined(CONFIG_SOFTMMU)
782 uint8_t *label1_ptr, *label2_ptr;
783 #endif
784 #if TARGET_LONG_BITS == 64
785 #if defined(CONFIG_SOFTMMU)
786 uint8_t *label3_ptr;
787 #endif
788 int addr_reg2;
789 #endif
791 data_reg = *args++;
792 if (opc == 3)
793 data_reg2 = *args++;
794 else
795 data_reg2 = 0;
796 addr_reg = *args++;
797 #if TARGET_LONG_BITS == 64
798 addr_reg2 = *args++;
799 #endif
800 mem_index = *args;
802 s_bits = opc;
804 r0 = TCG_REG_EAX;
805 r1 = TCG_REG_EDX;
807 #if defined(CONFIG_SOFTMMU)
808 tcg_out_mov(s, r1, addr_reg);
810 tcg_out_mov(s, r0, addr_reg);
812 tcg_out_shifti(s, SHIFT_SHR, r1, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
814 tcg_out_modrm(s, 0x81, 4, r0); /* andl $x, r0 */
815 tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
817 tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
818 tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
820 tcg_out_opc(s, 0x8d); /* lea offset(r1, %ebp), r1 */
821 tcg_out8(s, 0x80 | (r1 << 3) | 0x04);
822 tcg_out8(s, (5 << 3) | r1);
823 tcg_out32(s, offsetof(CPUState, tlb_table[mem_index][0].addr_write));
825 /* cmp 0(r1), r0 */
826 tcg_out_modrm_offset(s, 0x3b, r0, r1, 0);
828 tcg_out_mov(s, r0, addr_reg);
830 #if TARGET_LONG_BITS == 32
831 /* je label1 */
832 tcg_out8(s, 0x70 + JCC_JE);
833 label1_ptr = s->code_ptr;
834 s->code_ptr++;
835 #else
836 /* jne label3 */
837 tcg_out8(s, 0x70 + JCC_JNE);
838 label3_ptr = s->code_ptr;
839 s->code_ptr++;
841 /* cmp 4(r1), addr_reg2 */
842 tcg_out_modrm_offset(s, 0x3b, addr_reg2, r1, 4);
844 /* je label1 */
845 tcg_out8(s, 0x70 + JCC_JE);
846 label1_ptr = s->code_ptr;
847 s->code_ptr++;
849 /* label3: */
850 *label3_ptr = s->code_ptr - label3_ptr - 1;
851 #endif
853 /* XXX: move that code at the end of the TB */
854 #if TARGET_LONG_BITS == 32
855 if (opc == 3) {
856 tcg_out_mov(s, TCG_REG_EDX, data_reg);
857 tcg_out_mov(s, TCG_REG_ECX, data_reg2);
858 tcg_out8(s, 0x6a); /* push Ib */
859 tcg_out8(s, mem_index);
860 tcg_out8(s, 0xe8);
861 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] -
862 (tcg_target_long)s->code_ptr - 4);
863 tcg_out_addi(s, TCG_REG_ESP, 4);
864 } else {
865 switch(opc) {
866 case 0:
867 /* movzbl */
868 tcg_out_modrm(s, 0xb6 | P_EXT, TCG_REG_EDX, data_reg);
869 break;
870 case 1:
871 /* movzwl */
872 tcg_out_modrm(s, 0xb7 | P_EXT, TCG_REG_EDX, data_reg);
873 break;
874 case 2:
875 tcg_out_mov(s, TCG_REG_EDX, data_reg);
876 break;
878 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_ECX, mem_index);
879 tcg_out8(s, 0xe8);
880 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] -
881 (tcg_target_long)s->code_ptr - 4);
883 #else
884 if (opc == 3) {
885 tcg_out_mov(s, TCG_REG_EDX, addr_reg2);
886 tcg_out8(s, 0x6a); /* push Ib */
887 tcg_out8(s, mem_index);
888 tcg_out_opc(s, 0x50 + data_reg2); /* push */
889 tcg_out_opc(s, 0x50 + data_reg); /* push */
890 tcg_out8(s, 0xe8);
891 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] -
892 (tcg_target_long)s->code_ptr - 4);
893 tcg_out_addi(s, TCG_REG_ESP, 12);
894 } else {
895 tcg_out_mov(s, TCG_REG_EDX, addr_reg2);
896 switch(opc) {
897 case 0:
898 /* movzbl */
899 tcg_out_modrm(s, 0xb6 | P_EXT, TCG_REG_ECX, data_reg);
900 break;
901 case 1:
902 /* movzwl */
903 tcg_out_modrm(s, 0xb7 | P_EXT, TCG_REG_ECX, data_reg);
904 break;
905 case 2:
906 tcg_out_mov(s, TCG_REG_ECX, data_reg);
907 break;
909 tcg_out8(s, 0x6a); /* push Ib */
910 tcg_out8(s, mem_index);
911 tcg_out8(s, 0xe8);
912 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] -
913 (tcg_target_long)s->code_ptr - 4);
914 tcg_out_addi(s, TCG_REG_ESP, 4);
916 #endif
918 /* jmp label2 */
919 tcg_out8(s, 0xeb);
920 label2_ptr = s->code_ptr;
921 s->code_ptr++;
923 /* label1: */
924 *label1_ptr = s->code_ptr - label1_ptr - 1;
926 /* add x(r1), r0 */
927 tcg_out_modrm_offset(s, 0x03, r0, r1, offsetof(CPUTLBEntry, addend) -
928 offsetof(CPUTLBEntry, addr_write));
929 #else
930 r0 = addr_reg;
931 #endif
933 #ifdef TARGET_WORDS_BIGENDIAN
934 bswap = 1;
935 #else
936 bswap = 0;
937 #endif
938 switch(opc) {
939 case 0:
940 tcg_out_modrm_offset(s, OPC_MOVB_EvGv, data_reg, r0, GUEST_BASE);
941 break;
942 case 1:
943 if (bswap) {
944 tcg_out_mov(s, r1, data_reg);
945 tcg_out_rolw_8(s, r1);
946 data_reg = r1;
948 /* movw */
949 tcg_out8(s, 0x66);
950 tcg_out_modrm_offset(s, OPC_MOVL_EvGv, data_reg, r0, GUEST_BASE);
951 break;
952 case 2:
953 if (bswap) {
954 tcg_out_mov(s, r1, data_reg);
955 tcg_out_bswap32(s, r1);
956 data_reg = r1;
958 tcg_out_st(s, TCG_TYPE_I32, data_reg, r0, GUEST_BASE);
959 break;
960 case 3:
961 if (bswap) {
962 tcg_out_mov(s, r1, data_reg2);
963 tcg_out_bswap32(s, r1);
964 tcg_out_st(s, TCG_TYPE_I32, r1, r0, GUEST_BASE);
965 tcg_out_mov(s, r1, data_reg);
966 tcg_out_bswap32(s, r1);
967 tcg_out_st(s, TCG_TYPE_I32, r1, r0, GUEST_BASE + 4);
968 } else {
969 tcg_out_st(s, TCG_TYPE_I32, data_reg, r0, GUEST_BASE);
970 tcg_out_st(s, TCG_TYPE_I32, data_reg2, r0, GUEST_BASE + 4);
972 break;
973 default:
974 tcg_abort();
977 #if defined(CONFIG_SOFTMMU)
978 /* label2: */
979 *label2_ptr = s->code_ptr - label2_ptr - 1;
980 #endif
983 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
984 const TCGArg *args, const int *const_args)
986 int c;
988 switch(opc) {
989 case INDEX_op_exit_tb:
990 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_EAX, args[0]);
991 tcg_out8(s, 0xe9); /* jmp tb_ret_addr */
992 tcg_out32(s, tb_ret_addr - s->code_ptr - 4);
993 break;
994 case INDEX_op_goto_tb:
995 if (s->tb_jmp_offset) {
996 /* direct jump method */
997 tcg_out8(s, 0xe9); /* jmp im */
998 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
999 tcg_out32(s, 0);
1000 } else {
1001 /* indirect jump method */
1002 /* jmp Ev */
1003 tcg_out_modrm_offset(s, 0xff, 4, -1,
1004 (tcg_target_long)(s->tb_next + args[0]));
1006 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1007 break;
1008 case INDEX_op_call:
1009 if (const_args[0]) {
1010 tcg_out8(s, 0xe8);
1011 tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
1012 } else {
1013 tcg_out_modrm(s, 0xff, 2, args[0]);
1015 break;
1016 case INDEX_op_jmp:
1017 if (const_args[0]) {
1018 tcg_out8(s, 0xe9);
1019 tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
1020 } else {
1021 tcg_out_modrm(s, 0xff, 4, args[0]);
1023 break;
1024 case INDEX_op_br:
1025 tcg_out_jxx(s, JCC_JMP, args[0], 0);
1026 break;
1027 case INDEX_op_movi_i32:
1028 tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
1029 break;
1030 case INDEX_op_ld8u_i32:
1031 /* movzbl */
1032 tcg_out_modrm_offset(s, 0xb6 | P_EXT, args[0], args[1], args[2]);
1033 break;
1034 case INDEX_op_ld8s_i32:
1035 /* movsbl */
1036 tcg_out_modrm_offset(s, 0xbe | P_EXT, args[0], args[1], args[2]);
1037 break;
1038 case INDEX_op_ld16u_i32:
1039 /* movzwl */
1040 tcg_out_modrm_offset(s, 0xb7 | P_EXT, args[0], args[1], args[2]);
1041 break;
1042 case INDEX_op_ld16s_i32:
1043 /* movswl */
1044 tcg_out_modrm_offset(s, 0xbf | P_EXT, args[0], args[1], args[2]);
1045 break;
1046 case INDEX_op_ld_i32:
1047 tcg_out_ld(s, TCG_TYPE_I32, args[0], args[1], args[2]);
1048 break;
1049 case INDEX_op_st8_i32:
1050 /* movb */
1051 tcg_out_modrm_offset(s, OPC_MOVB_EvGv, args[0], args[1], args[2]);
1052 break;
1053 case INDEX_op_st16_i32:
1054 /* movw */
1055 tcg_out8(s, 0x66);
1056 tcg_out_modrm_offset(s, OPC_MOVL_EvGv, args[0], args[1], args[2]);
1057 break;
1058 case INDEX_op_st_i32:
1059 tcg_out_st(s, TCG_TYPE_I32, args[0], args[1], args[2]);
1060 break;
1061 case INDEX_op_sub_i32:
1062 c = ARITH_SUB;
1063 goto gen_arith;
1064 case INDEX_op_and_i32:
1065 c = ARITH_AND;
1066 goto gen_arith;
1067 case INDEX_op_or_i32:
1068 c = ARITH_OR;
1069 goto gen_arith;
1070 case INDEX_op_xor_i32:
1071 c = ARITH_XOR;
1072 goto gen_arith;
1073 case INDEX_op_add_i32:
1074 c = ARITH_ADD;
1075 gen_arith:
1076 if (const_args[2]) {
1077 tgen_arithi(s, c, args[0], args[2], 0);
1078 } else {
1079 tcg_out_modrm(s, 0x01 | (c << 3), args[2], args[0]);
1081 break;
1082 case INDEX_op_mul_i32:
1083 if (const_args[2]) {
1084 int32_t val;
1085 val = args[2];
1086 if (val == (int8_t)val) {
1087 tcg_out_modrm(s, 0x6b, args[0], args[0]);
1088 tcg_out8(s, val);
1089 } else {
1090 tcg_out_modrm(s, 0x69, args[0], args[0]);
1091 tcg_out32(s, val);
1093 } else {
1094 tcg_out_modrm(s, 0xaf | P_EXT, args[0], args[2]);
1096 break;
1097 case INDEX_op_mulu2_i32:
1098 tcg_out_modrm(s, 0xf7, 4, args[3]);
1099 break;
1100 case INDEX_op_div2_i32:
1101 tcg_out_modrm(s, 0xf7, 7, args[4]);
1102 break;
1103 case INDEX_op_divu2_i32:
1104 tcg_out_modrm(s, 0xf7, 6, args[4]);
1105 break;
1106 case INDEX_op_shl_i32:
1107 c = SHIFT_SHL;
1108 gen_shift32:
1109 if (const_args[2]) {
1110 tcg_out_shifti(s, c, args[0], args[2]);
1111 } else {
1112 tcg_out_modrm(s, OPC_SHIFT_cl, c, args[0]);
1114 break;
1115 case INDEX_op_shr_i32:
1116 c = SHIFT_SHR;
1117 goto gen_shift32;
1118 case INDEX_op_sar_i32:
1119 c = SHIFT_SAR;
1120 goto gen_shift32;
1121 case INDEX_op_rotl_i32:
1122 c = SHIFT_ROL;
1123 goto gen_shift32;
1124 case INDEX_op_rotr_i32:
1125 c = SHIFT_ROR;
1126 goto gen_shift32;
1128 case INDEX_op_add2_i32:
1129 if (const_args[4])
1130 tgen_arithi(s, ARITH_ADD, args[0], args[4], 1);
1131 else
1132 tcg_out_modrm(s, 0x01 | (ARITH_ADD << 3), args[4], args[0]);
1133 if (const_args[5])
1134 tgen_arithi(s, ARITH_ADC, args[1], args[5], 1);
1135 else
1136 tcg_out_modrm(s, 0x01 | (ARITH_ADC << 3), args[5], args[1]);
1137 break;
1138 case INDEX_op_sub2_i32:
1139 if (const_args[4])
1140 tgen_arithi(s, ARITH_SUB, args[0], args[4], 1);
1141 else
1142 tcg_out_modrm(s, 0x01 | (ARITH_SUB << 3), args[4], args[0]);
1143 if (const_args[5])
1144 tgen_arithi(s, ARITH_SBB, args[1], args[5], 1);
1145 else
1146 tcg_out_modrm(s, 0x01 | (ARITH_SBB << 3), args[5], args[1]);
1147 break;
1148 case INDEX_op_brcond_i32:
1149 tcg_out_brcond(s, args[2], args[0], args[1], const_args[1],
1150 args[3], 0);
1151 break;
1152 case INDEX_op_brcond2_i32:
1153 tcg_out_brcond2(s, args, const_args, 0);
1154 break;
1156 case INDEX_op_bswap16_i32:
1157 tcg_out_rolw_8(s, args[0]);
1158 break;
1159 case INDEX_op_bswap32_i32:
1160 tcg_out_bswap32(s, args[0]);
1161 break;
1163 case INDEX_op_neg_i32:
1164 tcg_out_modrm(s, 0xf7, 3, args[0]);
1165 break;
1167 case INDEX_op_not_i32:
1168 tcg_out_modrm(s, 0xf7, 2, args[0]);
1169 break;
1171 case INDEX_op_ext8s_i32:
1172 tcg_out_modrm(s, 0xbe | P_EXT, args[0], args[1]);
1173 break;
1174 case INDEX_op_ext16s_i32:
1175 tcg_out_modrm(s, 0xbf | P_EXT, args[0], args[1]);
1176 break;
1177 case INDEX_op_ext8u_i32:
1178 tcg_out_modrm(s, 0xb6 | P_EXT, args[0], args[1]);
1179 break;
1180 case INDEX_op_ext16u_i32:
1181 tcg_out_modrm(s, 0xb7 | P_EXT, args[0], args[1]);
1182 break;
1184 case INDEX_op_setcond_i32:
1185 tcg_out_setcond(s, args[3], args[0], args[1], args[2], const_args[2]);
1186 break;
1187 case INDEX_op_setcond2_i32:
1188 tcg_out_setcond2(s, args, const_args);
1189 break;
1191 case INDEX_op_qemu_ld8u:
1192 tcg_out_qemu_ld(s, args, 0);
1193 break;
1194 case INDEX_op_qemu_ld8s:
1195 tcg_out_qemu_ld(s, args, 0 | 4);
1196 break;
1197 case INDEX_op_qemu_ld16u:
1198 tcg_out_qemu_ld(s, args, 1);
1199 break;
1200 case INDEX_op_qemu_ld16s:
1201 tcg_out_qemu_ld(s, args, 1 | 4);
1202 break;
1203 case INDEX_op_qemu_ld32:
1204 tcg_out_qemu_ld(s, args, 2);
1205 break;
1206 case INDEX_op_qemu_ld64:
1207 tcg_out_qemu_ld(s, args, 3);
1208 break;
1210 case INDEX_op_qemu_st8:
1211 tcg_out_qemu_st(s, args, 0);
1212 break;
1213 case INDEX_op_qemu_st16:
1214 tcg_out_qemu_st(s, args, 1);
1215 break;
1216 case INDEX_op_qemu_st32:
1217 tcg_out_qemu_st(s, args, 2);
1218 break;
1219 case INDEX_op_qemu_st64:
1220 tcg_out_qemu_st(s, args, 3);
1221 break;
1223 default:
1224 tcg_abort();
1228 static const TCGTargetOpDef x86_op_defs[] = {
1229 { INDEX_op_exit_tb, { } },
1230 { INDEX_op_goto_tb, { } },
1231 { INDEX_op_call, { "ri" } },
1232 { INDEX_op_jmp, { "ri" } },
1233 { INDEX_op_br, { } },
1234 { INDEX_op_mov_i32, { "r", "r" } },
1235 { INDEX_op_movi_i32, { "r" } },
1236 { INDEX_op_ld8u_i32, { "r", "r" } },
1237 { INDEX_op_ld8s_i32, { "r", "r" } },
1238 { INDEX_op_ld16u_i32, { "r", "r" } },
1239 { INDEX_op_ld16s_i32, { "r", "r" } },
1240 { INDEX_op_ld_i32, { "r", "r" } },
1241 { INDEX_op_st8_i32, { "q", "r" } },
1242 { INDEX_op_st16_i32, { "r", "r" } },
1243 { INDEX_op_st_i32, { "r", "r" } },
1245 { INDEX_op_add_i32, { "r", "0", "ri" } },
1246 { INDEX_op_sub_i32, { "r", "0", "ri" } },
1247 { INDEX_op_mul_i32, { "r", "0", "ri" } },
1248 { INDEX_op_mulu2_i32, { "a", "d", "a", "r" } },
1249 { INDEX_op_div2_i32, { "a", "d", "0", "1", "r" } },
1250 { INDEX_op_divu2_i32, { "a", "d", "0", "1", "r" } },
1251 { INDEX_op_and_i32, { "r", "0", "ri" } },
1252 { INDEX_op_or_i32, { "r", "0", "ri" } },
1253 { INDEX_op_xor_i32, { "r", "0", "ri" } },
1255 { INDEX_op_shl_i32, { "r", "0", "ci" } },
1256 { INDEX_op_shr_i32, { "r", "0", "ci" } },
1257 { INDEX_op_sar_i32, { "r", "0", "ci" } },
1258 { INDEX_op_rotl_i32, { "r", "0", "ci" } },
1259 { INDEX_op_rotr_i32, { "r", "0", "ci" } },
1261 { INDEX_op_brcond_i32, { "r", "ri" } },
1263 { INDEX_op_add2_i32, { "r", "r", "0", "1", "ri", "ri" } },
1264 { INDEX_op_sub2_i32, { "r", "r", "0", "1", "ri", "ri" } },
1265 { INDEX_op_brcond2_i32, { "r", "r", "ri", "ri" } },
1267 { INDEX_op_bswap16_i32, { "r", "0" } },
1268 { INDEX_op_bswap32_i32, { "r", "0" } },
1270 { INDEX_op_neg_i32, { "r", "0" } },
1272 { INDEX_op_not_i32, { "r", "0" } },
1274 { INDEX_op_ext8s_i32, { "r", "q" } },
1275 { INDEX_op_ext16s_i32, { "r", "r" } },
1276 { INDEX_op_ext8u_i32, { "r", "q"} },
1277 { INDEX_op_ext16u_i32, { "r", "r"} },
1279 { INDEX_op_setcond_i32, { "q", "r", "ri" } },
1280 { INDEX_op_setcond2_i32, { "r", "r", "r", "ri", "ri" } },
1282 #if TARGET_LONG_BITS == 32
1283 { INDEX_op_qemu_ld8u, { "r", "L" } },
1284 { INDEX_op_qemu_ld8s, { "r", "L" } },
1285 { INDEX_op_qemu_ld16u, { "r", "L" } },
1286 { INDEX_op_qemu_ld16s, { "r", "L" } },
1287 { INDEX_op_qemu_ld32, { "r", "L" } },
1288 { INDEX_op_qemu_ld64, { "r", "r", "L" } },
1290 { INDEX_op_qemu_st8, { "cb", "L" } },
1291 { INDEX_op_qemu_st16, { "L", "L" } },
1292 { INDEX_op_qemu_st32, { "L", "L" } },
1293 { INDEX_op_qemu_st64, { "L", "L", "L" } },
1294 #else
1295 { INDEX_op_qemu_ld8u, { "r", "L", "L" } },
1296 { INDEX_op_qemu_ld8s, { "r", "L", "L" } },
1297 { INDEX_op_qemu_ld16u, { "r", "L", "L" } },
1298 { INDEX_op_qemu_ld16s, { "r", "L", "L" } },
1299 { INDEX_op_qemu_ld32, { "r", "L", "L" } },
1300 { INDEX_op_qemu_ld64, { "r", "r", "L", "L" } },
1302 { INDEX_op_qemu_st8, { "cb", "L", "L" } },
1303 { INDEX_op_qemu_st16, { "L", "L", "L" } },
1304 { INDEX_op_qemu_st32, { "L", "L", "L" } },
1305 { INDEX_op_qemu_st64, { "L", "L", "L", "L" } },
1306 #endif
1307 { -1 },
1310 static int tcg_target_callee_save_regs[] = {
1311 /* TCG_REG_EBP, */ /* currently used for the global env, so no
1312 need to save */
1313 TCG_REG_EBX,
1314 TCG_REG_ESI,
1315 TCG_REG_EDI,
1318 static inline void tcg_out_push(TCGContext *s, int reg)
1320 tcg_out_opc(s, 0x50 + reg);
1323 static inline void tcg_out_pop(TCGContext *s, int reg)
1325 tcg_out_opc(s, 0x58 + reg);
1328 /* Generate global QEMU prologue and epilogue code */
1329 void tcg_target_qemu_prologue(TCGContext *s)
1331 int i, frame_size, push_size, stack_addend;
1333 /* TB prologue */
1334 /* save all callee saved registers */
1335 for(i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
1336 tcg_out_push(s, tcg_target_callee_save_regs[i]);
1338 /* reserve some stack space */
1339 push_size = 4 + ARRAY_SIZE(tcg_target_callee_save_regs) * 4;
1340 frame_size = push_size + TCG_STATIC_CALL_ARGS_SIZE;
1341 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
1342 ~(TCG_TARGET_STACK_ALIGN - 1);
1343 stack_addend = frame_size - push_size;
1344 tcg_out_addi(s, TCG_REG_ESP, -stack_addend);
1346 tcg_out_modrm(s, 0xff, 4, TCG_REG_EAX); /* jmp *%eax */
1348 /* TB epilogue */
1349 tb_ret_addr = s->code_ptr;
1350 tcg_out_addi(s, TCG_REG_ESP, stack_addend);
1351 for(i = ARRAY_SIZE(tcg_target_callee_save_regs) - 1; i >= 0; i--) {
1352 tcg_out_pop(s, tcg_target_callee_save_regs[i]);
1354 tcg_out8(s, 0xc3); /* ret */
1357 void tcg_target_init(TCGContext *s)
1359 #if !defined(CONFIG_USER_ONLY)
1360 /* fail safe */
1361 if ((1 << CPU_TLB_ENTRY_BITS) != sizeof(CPUTLBEntry))
1362 tcg_abort();
1363 #endif
1365 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xff);
1367 tcg_regset_clear(tcg_target_call_clobber_regs);
1368 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_EAX);
1369 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_EDX);
1370 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_ECX);
1372 tcg_regset_clear(s->reserved_regs);
1373 tcg_regset_set_reg(s->reserved_regs, TCG_REG_ESP);
1375 tcg_add_target_add_op_defs(x86_op_defs);