qmu-img: fix qemu-img convert to generate a valid image when the source referenced...
[qemu-kvm/fedora.git] / tcg / ppc / tcg-target.c
blobe3f4b598c030e9586cd13d1149b19d124dc021f3
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
25 static uint8_t *tb_ret_addr;
27 #ifdef __APPLE__
28 #define LINKAGE_AREA_SIZE 24
29 #define LR_OFFSET 8
30 #elif defined _AIX
31 #define LINKAGE_AREA_SIZE 52
32 #define LR_OFFSET 8
33 #else
34 #define LINKAGE_AREA_SIZE 8
35 #define LR_OFFSET 4
36 #endif
38 #define FAST_PATH
39 #if TARGET_PHYS_ADDR_BITS <= 32
40 #define ADDEND_OFFSET 0
41 #else
42 #define ADDEND_OFFSET 4
43 #endif
45 #ifndef NDEBUG
46 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
47 "r0",
48 "r1",
49 "rp",
50 "r3",
51 "r4",
52 "r5",
53 "r6",
54 "r7",
55 "r8",
56 "r9",
57 "r10",
58 "r11",
59 "r12",
60 "r13",
61 "r14",
62 "r15",
63 "r16",
64 "r17",
65 "r18",
66 "r19",
67 "r20",
68 "r21",
69 "r22",
70 "r23",
71 "r24",
72 "r25",
73 "r26",
74 "r27",
75 "r28",
76 "r29",
77 "r30",
78 "r31"
80 #endif
82 static const int tcg_target_reg_alloc_order[] = {
83 TCG_REG_R14,
84 TCG_REG_R15,
85 TCG_REG_R16,
86 TCG_REG_R17,
87 TCG_REG_R18,
88 TCG_REG_R19,
89 TCG_REG_R20,
90 TCG_REG_R21,
91 TCG_REG_R22,
92 TCG_REG_R23,
93 TCG_REG_R28,
94 TCG_REG_R29,
95 TCG_REG_R30,
96 TCG_REG_R31,
97 #ifdef __APPLE__
98 TCG_REG_R2,
99 #endif
100 TCG_REG_R3,
101 TCG_REG_R4,
102 TCG_REG_R5,
103 TCG_REG_R6,
104 TCG_REG_R7,
105 TCG_REG_R8,
106 TCG_REG_R9,
107 TCG_REG_R10,
108 #ifndef __APPLE__
109 TCG_REG_R11,
110 #endif
111 TCG_REG_R12,
112 #ifndef __linux__
113 TCG_REG_R13,
114 #endif
115 TCG_REG_R24,
116 TCG_REG_R25,
117 TCG_REG_R26,
118 TCG_REG_R27
121 static const int tcg_target_call_iarg_regs[] = {
122 TCG_REG_R3,
123 TCG_REG_R4,
124 TCG_REG_R5,
125 TCG_REG_R6,
126 TCG_REG_R7,
127 TCG_REG_R8,
128 TCG_REG_R9,
129 TCG_REG_R10
132 static const int tcg_target_call_oarg_regs[2] = {
133 TCG_REG_R3,
134 TCG_REG_R4
137 static const int tcg_target_callee_save_regs[] = {
138 #ifdef __APPLE__
139 TCG_REG_R11,
140 TCG_REG_R13,
141 #endif
142 #ifdef _AIX
143 TCG_REG_R13,
144 #endif
145 TCG_REG_R14,
146 TCG_REG_R15,
147 TCG_REG_R16,
148 TCG_REG_R17,
149 TCG_REG_R18,
150 TCG_REG_R19,
151 TCG_REG_R20,
152 TCG_REG_R21,
153 TCG_REG_R22,
154 TCG_REG_R23,
155 TCG_REG_R24,
156 TCG_REG_R25,
157 TCG_REG_R26,
158 /* TCG_REG_R27, */ /* currently used for the global env, so no
159 need to save */
160 TCG_REG_R28,
161 TCG_REG_R29,
162 TCG_REG_R30,
163 TCG_REG_R31
166 static uint32_t reloc_pc24_val (void *pc, tcg_target_long target)
168 tcg_target_long disp;
170 disp = target - (tcg_target_long) pc;
171 if ((disp << 6) >> 6 != disp)
172 tcg_abort ();
174 return disp & 0x3fffffc;
177 static void reloc_pc24 (void *pc, tcg_target_long target)
179 *(uint32_t *) pc = (*(uint32_t *) pc & ~0x3fffffc)
180 | reloc_pc24_val (pc, target);
183 static uint16_t reloc_pc14_val (void *pc, tcg_target_long target)
185 tcg_target_long disp;
187 disp = target - (tcg_target_long) pc;
188 if (disp != (int16_t) disp)
189 tcg_abort ();
191 return disp & 0xfffc;
194 static void reloc_pc14 (void *pc, tcg_target_long target)
196 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xfffc)
197 | reloc_pc14_val (pc, target);
200 static void patch_reloc(uint8_t *code_ptr, int type,
201 tcg_target_long value, tcg_target_long addend)
203 value += addend;
204 switch (type) {
205 case R_PPC_REL14:
206 reloc_pc14 (code_ptr, value);
207 break;
208 case R_PPC_REL24:
209 reloc_pc24 (code_ptr, value);
210 break;
211 default:
212 tcg_abort();
216 /* maximum number of register used for input function arguments */
217 static int tcg_target_get_call_iarg_regs_count(int flags)
219 return ARRAY_SIZE (tcg_target_call_iarg_regs);
222 /* parse target specific constraints */
223 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
225 const char *ct_str;
227 ct_str = *pct_str;
228 switch (ct_str[0]) {
229 case 'A': case 'B': case 'C': case 'D':
230 ct->ct |= TCG_CT_REG;
231 tcg_regset_set_reg(ct->u.regs, 3 + ct_str[0] - 'A');
232 break;
233 case 'r':
234 ct->ct |= TCG_CT_REG;
235 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
236 break;
237 #ifdef CONFIG_SOFTMMU
238 case 'L': /* qemu_ld constraint */
239 ct->ct |= TCG_CT_REG;
240 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
241 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
242 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4);
243 break;
244 case 'K': /* qemu_st[8..32] constraint */
245 ct->ct |= TCG_CT_REG;
246 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
247 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
248 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4);
249 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R5);
250 #if TARGET_LONG_BITS == 64
251 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R6);
252 #endif
253 break;
254 case 'M': /* qemu_st64 constraint */
255 ct->ct |= TCG_CT_REG;
256 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
257 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
258 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4);
259 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R5);
260 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R6);
261 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R7);
262 break;
263 #else
264 case 'L':
265 case 'K':
266 ct->ct |= TCG_CT_REG;
267 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
268 break;
269 case 'M':
270 ct->ct |= TCG_CT_REG;
271 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
272 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
273 break;
274 #endif
275 default:
276 return -1;
278 ct_str++;
279 *pct_str = ct_str;
280 return 0;
283 /* test if a constant matches the constraint */
284 static int tcg_target_const_match(tcg_target_long val,
285 const TCGArgConstraint *arg_ct)
287 int ct;
289 ct = arg_ct->ct;
290 if (ct & TCG_CT_CONST)
291 return 1;
292 return 0;
295 #define OPCD(opc) ((opc)<<26)
296 #define XO31(opc) (OPCD(31)|((opc)<<1))
297 #define XO19(opc) (OPCD(19)|((opc)<<1))
299 #define B OPCD(18)
300 #define BC OPCD(16)
301 #define LBZ OPCD(34)
302 #define LHZ OPCD(40)
303 #define LHA OPCD(42)
304 #define LWZ OPCD(32)
305 #define STB OPCD(38)
306 #define STH OPCD(44)
307 #define STW OPCD(36)
309 #define ADDI OPCD(14)
310 #define ADDIS OPCD(15)
311 #define ORI OPCD(24)
312 #define ORIS OPCD(25)
313 #define XORI OPCD(26)
314 #define XORIS OPCD(27)
315 #define ANDI OPCD(28)
316 #define ANDIS OPCD(29)
317 #define MULLI OPCD( 7)
318 #define CMPLI OPCD(10)
319 #define CMPI OPCD(11)
321 #define LWZU OPCD(33)
322 #define STWU OPCD(37)
324 #define RLWINM OPCD(21)
326 #define BCLR XO19( 16)
327 #define BCCTR XO19(528)
328 #define CRAND XO19(257)
329 #define CRANDC XO19(129)
330 #define CRNAND XO19(225)
331 #define CROR XO19(449)
333 #define EXTSB XO31(954)
334 #define EXTSH XO31(922)
335 #define ADD XO31(266)
336 #define ADDE XO31(138)
337 #define ADDC XO31( 10)
338 #define AND XO31( 28)
339 #define SUBF XO31( 40)
340 #define SUBFC XO31( 8)
341 #define SUBFE XO31(136)
342 #define OR XO31(444)
343 #define XOR XO31(316)
344 #define MULLW XO31(235)
345 #define MULHWU XO31( 11)
346 #define DIVW XO31(491)
347 #define DIVWU XO31(459)
348 #define CMP XO31( 0)
349 #define CMPL XO31( 32)
350 #define LHBRX XO31(790)
351 #define LWBRX XO31(534)
352 #define STHBRX XO31(918)
353 #define STWBRX XO31(662)
354 #define MFSPR XO31(339)
355 #define MTSPR XO31(467)
356 #define SRAWI XO31(824)
357 #define NEG XO31(104)
359 #define LBZX XO31( 87)
360 #define LHZX XO31(276)
361 #define LHAX XO31(343)
362 #define LWZX XO31( 23)
363 #define STBX XO31(215)
364 #define STHX XO31(407)
365 #define STWX XO31(151)
367 #define SPR(a,b) ((((a)<<5)|(b))<<11)
368 #define LR SPR(8, 0)
369 #define CTR SPR(9, 0)
371 #define SLW XO31( 24)
372 #define SRW XO31(536)
373 #define SRAW XO31(792)
375 #define TW XO31(4)
376 #define TRAP (TW | TO (31))
378 #define RT(r) ((r)<<21)
379 #define RS(r) ((r)<<21)
380 #define RA(r) ((r)<<16)
381 #define RB(r) ((r)<<11)
382 #define TO(t) ((t)<<21)
383 #define SH(s) ((s)<<11)
384 #define MB(b) ((b)<<6)
385 #define ME(e) ((e)<<1)
386 #define BO(o) ((o)<<21)
388 #define LK 1
390 #define TAB(t,a,b) (RT(t) | RA(a) | RB(b))
391 #define SAB(s,a,b) (RS(s) | RA(a) | RB(b))
393 #define BF(n) ((n)<<23)
394 #define BI(n, c) (((c)+((n)*4))<<16)
395 #define BT(n, c) (((c)+((n)*4))<<21)
396 #define BA(n, c) (((c)+((n)*4))<<16)
397 #define BB(n, c) (((c)+((n)*4))<<11)
399 #define BO_COND_TRUE BO (12)
400 #define BO_COND_FALSE BO (4)
401 #define BO_ALWAYS BO (20)
403 enum {
404 CR_LT,
405 CR_GT,
406 CR_EQ,
407 CR_SO
410 static const uint32_t tcg_to_bc[10] = {
411 [TCG_COND_EQ] = BC | BI (7, CR_EQ) | BO_COND_TRUE,
412 [TCG_COND_NE] = BC | BI (7, CR_EQ) | BO_COND_FALSE,
413 [TCG_COND_LT] = BC | BI (7, CR_LT) | BO_COND_TRUE,
414 [TCG_COND_GE] = BC | BI (7, CR_LT) | BO_COND_FALSE,
415 [TCG_COND_LE] = BC | BI (7, CR_GT) | BO_COND_FALSE,
416 [TCG_COND_GT] = BC | BI (7, CR_GT) | BO_COND_TRUE,
417 [TCG_COND_LTU] = BC | BI (7, CR_LT) | BO_COND_TRUE,
418 [TCG_COND_GEU] = BC | BI (7, CR_LT) | BO_COND_FALSE,
419 [TCG_COND_LEU] = BC | BI (7, CR_GT) | BO_COND_FALSE,
420 [TCG_COND_GTU] = BC | BI (7, CR_GT) | BO_COND_TRUE,
423 static void tcg_out_mov(TCGContext *s, int ret, int arg)
425 tcg_out32 (s, OR | SAB (arg, ret, arg));
428 static void tcg_out_movi(TCGContext *s, TCGType type,
429 int ret, tcg_target_long arg)
431 if (arg == (int16_t) arg)
432 tcg_out32 (s, ADDI | RT (ret) | RA (0) | (arg & 0xffff));
433 else {
434 tcg_out32 (s, ADDIS | RT (ret) | RA (0) | ((arg >> 16) & 0xffff));
435 if (arg & 0xffff)
436 tcg_out32 (s, ORI | RS (ret) | RA (ret) | (arg & 0xffff));
440 static void tcg_out_ldst (TCGContext *s, int ret, int addr,
441 int offset, int op1, int op2)
443 if (offset == (int16_t) offset)
444 tcg_out32 (s, op1 | RT (ret) | RA (addr) | (offset & 0xffff));
445 else {
446 tcg_out_movi (s, TCG_TYPE_I32, 0, offset);
447 tcg_out32 (s, op2 | RT (ret) | RA (addr) | RB (0));
451 static void tcg_out_b (TCGContext *s, int mask, tcg_target_long target)
453 tcg_target_long disp;
455 disp = target - (tcg_target_long) s->code_ptr;
456 if ((disp << 6) >> 6 == disp)
457 tcg_out32 (s, B | (disp & 0x3fffffc) | mask);
458 else {
459 tcg_out_movi (s, TCG_TYPE_I32, 0, (tcg_target_long) target);
460 tcg_out32 (s, MTSPR | RS (0) | CTR);
461 tcg_out32 (s, BCCTR | BO_ALWAYS | mask);
465 #ifdef _AIX
466 static void tcg_out_call (TCGContext *s, tcg_target_long arg, int const_arg)
468 int reg;
470 if (const_arg) {
471 reg = 2;
472 tcg_out_movi (s, TCG_TYPE_I32, reg, arg);
474 else reg = arg;
476 tcg_out32 (s, LWZ | RT (0) | RA (reg));
477 tcg_out32 (s, MTSPR | RA (0) | CTR);
478 tcg_out32 (s, LWZ | RT (2) | RA (reg) | 4);
479 tcg_out32 (s, BCCTR | BO_ALWAYS | LK);
481 #endif
483 #if defined(CONFIG_SOFTMMU)
485 #include "../../softmmu_defs.h"
487 static void *qemu_ld_helpers[4] = {
488 __ldb_mmu,
489 __ldw_mmu,
490 __ldl_mmu,
491 __ldq_mmu,
494 static void *qemu_st_helpers[4] = {
495 __stb_mmu,
496 __stw_mmu,
497 __stl_mmu,
498 __stq_mmu,
500 #endif
502 static void tcg_out_qemu_ld (TCGContext *s, const TCGArg *args, int opc)
504 int addr_reg, data_reg, data_reg2, r0, r1, mem_index, s_bits, bswap;
505 #ifdef CONFIG_SOFTMMU
506 int r2;
507 void *label1_ptr, *label2_ptr;
508 #endif
509 #if TARGET_LONG_BITS == 64
510 int addr_reg2;
511 #endif
513 data_reg = *args++;
514 if (opc == 3)
515 data_reg2 = *args++;
516 else
517 data_reg2 = 0;
518 addr_reg = *args++;
519 #if TARGET_LONG_BITS == 64
520 addr_reg2 = *args++;
521 #endif
522 mem_index = *args;
523 s_bits = opc & 3;
525 #ifdef CONFIG_SOFTMMU
526 r0 = 3;
527 r1 = 4;
528 r2 = 0;
530 tcg_out32 (s, (RLWINM
531 | RA (r0)
532 | RS (addr_reg)
533 | SH (32 - (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS))
534 | MB (32 - (CPU_TLB_BITS + CPU_TLB_ENTRY_BITS))
535 | ME (31 - CPU_TLB_ENTRY_BITS)
538 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (TCG_AREG0));
539 tcg_out32 (s, (LWZU
540 | RT (r1)
541 | RA (r0)
542 | offsetof (CPUState, tlb_table[mem_index][0].addr_read)
545 tcg_out32 (s, (RLWINM
546 | RA (r2)
547 | RS (addr_reg)
548 | SH (0)
549 | MB ((32 - s_bits) & 31)
550 | ME (31 - TARGET_PAGE_BITS)
554 tcg_out32 (s, CMP | BF (7) | RA (r2) | RB (r1));
555 #if TARGET_LONG_BITS == 64
556 tcg_out32 (s, LWZ | RT (r1) | RA (r0) | 4);
557 tcg_out32 (s, CMP | BF (6) | RA (addr_reg2) | RB (r1));
558 tcg_out32 (s, CRAND | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, CR_EQ));
559 #endif
561 label1_ptr = s->code_ptr;
562 #ifdef FAST_PATH
563 tcg_out32 (s, BC | BI (7, CR_EQ) | BO_COND_TRUE);
564 #endif
566 /* slow path */
567 #if TARGET_LONG_BITS == 32
568 tcg_out_mov (s, 3, addr_reg);
569 tcg_out_movi (s, TCG_TYPE_I32, 4, mem_index);
570 #else
571 tcg_out_mov (s, 3, addr_reg2);
572 tcg_out_mov (s, 4, addr_reg);
573 tcg_out_movi (s, TCG_TYPE_I32, 5, mem_index);
574 #endif
576 #ifdef _AIX
577 tcg_out_call (s, (tcg_target_long) qemu_ld_helpers[s_bits], 1);
578 #else
579 tcg_out_b (s, LK, (tcg_target_long) qemu_ld_helpers[s_bits]);
580 #endif
581 switch (opc) {
582 case 0|4:
583 tcg_out32 (s, EXTSB | RA (data_reg) | RS (3));
584 break;
585 case 1|4:
586 tcg_out32 (s, EXTSH | RA (data_reg) | RS (3));
587 break;
588 case 0:
589 case 1:
590 case 2:
591 if (data_reg != 3)
592 tcg_out_mov (s, data_reg, 3);
593 break;
594 case 3:
595 if (data_reg == 3) {
596 if (data_reg2 == 4) {
597 tcg_out_mov (s, 0, 4);
598 tcg_out_mov (s, 4, 3);
599 tcg_out_mov (s, 3, 0);
601 else {
602 tcg_out_mov (s, data_reg2, 3);
603 tcg_out_mov (s, 3, 4);
606 else {
607 if (data_reg != 4) tcg_out_mov (s, data_reg, 4);
608 if (data_reg2 != 3) tcg_out_mov (s, data_reg2, 3);
610 break;
612 label2_ptr = s->code_ptr;
613 tcg_out32 (s, B);
615 /* label1: fast path */
616 #ifdef FAST_PATH
617 reloc_pc14 (label1_ptr, (tcg_target_long) s->code_ptr);
618 #endif
620 /* r0 now contains &env->tlb_table[mem_index][index].addr_read */
621 tcg_out32 (s, (LWZ
622 | RT (r0)
623 | RA (r0)
624 | (ADDEND_OFFSET + offsetof (CPUTLBEntry, addend)
625 - offsetof (CPUTLBEntry, addr_read))
627 /* r0 = env->tlb_table[mem_index][index].addend */
628 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (addr_reg));
629 /* r0 = env->tlb_table[mem_index][index].addend + addr */
631 #else /* !CONFIG_SOFTMMU */
632 r0 = addr_reg;
633 r1 = 3;
634 #endif
636 #ifdef TARGET_WORDS_BIGENDIAN
637 bswap = 0;
638 #else
639 bswap = 1;
640 #endif
641 switch (opc) {
642 default:
643 case 0:
644 tcg_out32 (s, LBZ | RT (data_reg) | RA (r0));
645 break;
646 case 0|4:
647 tcg_out32 (s, LBZ | RT (data_reg) | RA (r0));
648 tcg_out32 (s, EXTSB | RA (data_reg) | RS (data_reg));
649 break;
650 case 1:
651 if (bswap) tcg_out32 (s, LHBRX | RT (data_reg) | RB (r0));
652 else tcg_out32 (s, LHZ | RT (data_reg) | RA (r0));
653 break;
654 case 1|4:
655 if (bswap) {
656 tcg_out32 (s, LHBRX | RT (data_reg) | RB (r0));
657 tcg_out32 (s, EXTSH | RA (data_reg) | RS (data_reg));
659 else tcg_out32 (s, LHA | RT (data_reg) | RA (r0));
660 break;
661 case 2:
662 if (bswap) tcg_out32 (s, LWBRX | RT (data_reg) | RB (r0));
663 else tcg_out32 (s, LWZ | RT (data_reg)| RA (r0));
664 break;
665 case 3:
666 if (bswap) {
667 tcg_out32 (s, ADDI | RT (r1) | RA (r0) | 4);
668 tcg_out32 (s, LWBRX | RT (data_reg) | RB (r0));
669 tcg_out32 (s, LWBRX | RT (data_reg2) | RB (r1));
671 else {
672 if (r0 == data_reg2) {
673 tcg_out32 (s, LWZ | RT (0) | RA (r0));
674 tcg_out32 (s, LWZ | RT (data_reg) | RA (r0) | 4);
675 tcg_out_mov (s, data_reg2, 0);
677 else {
678 tcg_out32 (s, LWZ | RT (data_reg2) | RA (r0));
679 tcg_out32 (s, LWZ | RT (data_reg) | RA (r0) | 4);
682 break;
685 #ifdef CONFIG_SOFTMMU
686 reloc_pc24 (label2_ptr, (tcg_target_long) s->code_ptr);
687 #endif
690 static void tcg_out_qemu_st (TCGContext *s, const TCGArg *args, int opc)
692 int addr_reg, r0, r1, data_reg, data_reg2, mem_index, bswap;
693 #ifdef CONFIG_SOFTMMU
694 int r2, ir;
695 void *label1_ptr, *label2_ptr;
696 #endif
697 #if TARGET_LONG_BITS == 64
698 int addr_reg2;
699 #endif
701 data_reg = *args++;
702 if (opc == 3)
703 data_reg2 = *args++;
704 else
705 data_reg2 = 0;
706 addr_reg = *args++;
707 #if TARGET_LONG_BITS == 64
708 addr_reg2 = *args++;
709 #endif
710 mem_index = *args;
712 #ifdef CONFIG_SOFTMMU
713 r0 = 3;
714 r1 = 4;
715 r2 = 0;
717 tcg_out32 (s, (RLWINM
718 | RA (r0)
719 | RS (addr_reg)
720 | SH (32 - (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS))
721 | MB (32 - (CPU_TLB_ENTRY_BITS + CPU_TLB_BITS))
722 | ME (31 - CPU_TLB_ENTRY_BITS)
725 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (TCG_AREG0));
726 tcg_out32 (s, (LWZU
727 | RT (r1)
728 | RA (r0)
729 | offsetof (CPUState, tlb_table[mem_index][0].addr_write)
732 tcg_out32 (s, (RLWINM
733 | RA (r2)
734 | RS (addr_reg)
735 | SH (0)
736 | MB ((32 - opc) & 31)
737 | ME (31 - TARGET_PAGE_BITS)
741 tcg_out32 (s, CMP | (7 << 23) | RA (r2) | RB (r1));
742 #if TARGET_LONG_BITS == 64
743 tcg_out32 (s, LWZ | RT (r1) | RA (r0) | 4);
744 tcg_out32 (s, CMP | BF (6) | RA (addr_reg2) | RB (r1));
745 tcg_out32 (s, CRAND | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, CR_EQ));
746 #endif
748 label1_ptr = s->code_ptr;
749 #ifdef FAST_PATH
750 tcg_out32 (s, BC | BI (7, CR_EQ) | BO_COND_TRUE);
751 #endif
753 /* slow path */
754 #if TARGET_LONG_BITS == 32
755 tcg_out_mov (s, 3, addr_reg);
756 ir = 4;
757 #else
758 tcg_out_mov (s, 3, addr_reg2);
759 tcg_out_mov (s, 4, addr_reg);
760 #ifdef TCG_TARGET_CALL_ALIGN_ARGS
761 ir = 5;
762 #else
763 ir = 4;
764 #endif
765 #endif
767 switch (opc) {
768 case 0:
769 tcg_out32 (s, (RLWINM
770 | RA (ir)
771 | RS (data_reg)
772 | SH (0)
773 | MB (24)
774 | ME (31)));
775 break;
776 case 1:
777 tcg_out32 (s, (RLWINM
778 | RA (ir)
779 | RS (data_reg)
780 | SH (0)
781 | MB (16)
782 | ME (31)));
783 break;
784 case 2:
785 tcg_out_mov (s, ir, data_reg);
786 break;
787 case 3:
788 #ifdef TCG_TARGET_CALL_ALIGN_ARGS
789 ir = 5;
790 #endif
791 tcg_out_mov (s, ir++, data_reg2);
792 tcg_out_mov (s, ir, data_reg);
793 break;
795 ir++;
797 tcg_out_movi (s, TCG_TYPE_I32, ir, mem_index);
798 #ifdef _AIX
799 tcg_out_call (s, (tcg_target_long) qemu_st_helpers[opc], 1);
800 #else
801 tcg_out_b (s, LK, (tcg_target_long) qemu_st_helpers[opc]);
802 #endif
803 label2_ptr = s->code_ptr;
804 tcg_out32 (s, B);
806 /* label1: fast path */
807 #ifdef FAST_PATH
808 reloc_pc14 (label1_ptr, (tcg_target_long) s->code_ptr);
809 #endif
811 tcg_out32 (s, (LWZ
812 | RT (r0)
813 | RA (r0)
814 | (ADDEND_OFFSET + offsetof (CPUTLBEntry, addend)
815 - offsetof (CPUTLBEntry, addr_write))
817 /* r0 = env->tlb_table[mem_index][index].addend */
818 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (addr_reg));
819 /* r0 = env->tlb_table[mem_index][index].addend + addr */
821 #else /* !CONFIG_SOFTMMU */
822 r1 = 3;
823 r0 = addr_reg;
824 #endif
826 #ifdef TARGET_WORDS_BIGENDIAN
827 bswap = 0;
828 #else
829 bswap = 1;
830 #endif
831 switch (opc) {
832 case 0:
833 tcg_out32 (s, STB | RS (data_reg) | RA (r0));
834 break;
835 case 1:
836 if (bswap) tcg_out32 (s, STHBRX | RS (data_reg) | RA (0) | RB (r0));
837 else tcg_out32 (s, STH | RS (data_reg) | RA (r0));
838 break;
839 case 2:
840 if (bswap) tcg_out32 (s, STWBRX | RS (data_reg) | RA (0) | RB (r0));
841 else tcg_out32 (s, STW | RS (data_reg) | RA (r0));
842 break;
843 case 3:
844 if (bswap) {
845 tcg_out32 (s, ADDI | RT (r1) | RA (r0) | 4);
846 tcg_out32 (s, STWBRX | RS (data_reg) | RA (0) | RB (r0));
847 tcg_out32 (s, STWBRX | RS (data_reg2) | RA (0) | RB (r1));
849 else {
850 tcg_out32 (s, STW | RS (data_reg2) | RA (r0));
851 tcg_out32 (s, STW | RS (data_reg) | RA (r0) | 4);
853 break;
856 #ifdef CONFIG_SOFTMMU
857 reloc_pc24 (label2_ptr, (tcg_target_long) s->code_ptr);
858 #endif
861 void tcg_target_qemu_prologue (TCGContext *s)
863 int i, frame_size;
865 frame_size = 0
866 + LINKAGE_AREA_SIZE
867 + TCG_STATIC_CALL_ARGS_SIZE
868 + ARRAY_SIZE (tcg_target_callee_save_regs) * 4
870 frame_size = (frame_size + 15) & ~15;
872 #ifdef _AIX
874 uint32_t addr;
876 /* First emit adhoc function descriptor */
877 addr = (uint32_t) s->code_ptr + 12;
878 tcg_out32 (s, addr); /* entry point */
879 s->code_ptr += 8; /* skip TOC and environment pointer */
881 #endif
882 tcg_out32 (s, MFSPR | RT (0) | LR);
883 tcg_out32 (s, STWU | RS (1) | RA (1) | (-frame_size & 0xffff));
884 for (i = 0; i < ARRAY_SIZE (tcg_target_callee_save_regs); ++i)
885 tcg_out32 (s, (STW
886 | RS (tcg_target_callee_save_regs[i])
887 | RA (1)
888 | (i * 4 + LINKAGE_AREA_SIZE + TCG_STATIC_CALL_ARGS_SIZE)
891 tcg_out32 (s, STW | RS (0) | RA (1) | (frame_size + LR_OFFSET));
893 tcg_out32 (s, MTSPR | RS (3) | CTR);
894 tcg_out32 (s, BCCTR | BO_ALWAYS);
895 tb_ret_addr = s->code_ptr;
897 for (i = 0; i < ARRAY_SIZE (tcg_target_callee_save_regs); ++i)
898 tcg_out32 (s, (LWZ
899 | RT (tcg_target_callee_save_regs[i])
900 | RA (1)
901 | (i * 4 + LINKAGE_AREA_SIZE + TCG_STATIC_CALL_ARGS_SIZE)
904 tcg_out32 (s, LWZ | RT (0) | RA (1) | (frame_size + LR_OFFSET));
905 tcg_out32 (s, MTSPR | RS (0) | LR);
906 tcg_out32 (s, ADDI | RT (1) | RA (1) | frame_size);
907 tcg_out32 (s, BCLR | BO_ALWAYS);
910 static void tcg_out_ld (TCGContext *s, TCGType type, int ret, int arg1,
911 tcg_target_long arg2)
913 tcg_out_ldst (s, ret, arg1, arg2, LWZ, LWZX);
916 static void tcg_out_st (TCGContext *s, TCGType type, int arg, int arg1,
917 tcg_target_long arg2)
919 tcg_out_ldst (s, arg, arg1, arg2, STW, STWX);
922 static void ppc_addi (TCGContext *s, int rt, int ra, tcg_target_long si)
924 if (!si && rt == ra)
925 return;
927 if (si == (int16_t) si)
928 tcg_out32 (s, ADDI | RT (rt) | RA (ra) | (si & 0xffff));
929 else {
930 uint16_t h = ((si >> 16) & 0xffff) + ((uint16_t) si >> 15);
931 tcg_out32 (s, ADDIS | RT (rt) | RA (ra) | h);
932 tcg_out32 (s, ADDI | RT (rt) | RA (rt) | (si & 0xffff));
936 static void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
938 ppc_addi (s, reg, reg, val);
941 static void tcg_out_cmp (TCGContext *s, int cond, TCGArg arg1, TCGArg arg2,
942 int const_arg2, int cr)
944 int imm;
945 uint32_t op;
947 switch (cond) {
948 case TCG_COND_EQ:
949 case TCG_COND_NE:
950 if (const_arg2) {
951 if ((int16_t) arg2 == arg2) {
952 op = CMPI;
953 imm = 1;
954 break;
956 else if ((uint16_t) arg2 == arg2) {
957 op = CMPLI;
958 imm = 1;
959 break;
962 op = CMPL;
963 imm = 0;
964 break;
966 case TCG_COND_LT:
967 case TCG_COND_GE:
968 case TCG_COND_LE:
969 case TCG_COND_GT:
970 if (const_arg2) {
971 if ((int16_t) arg2 == arg2) {
972 op = CMPI;
973 imm = 1;
974 break;
977 op = CMP;
978 imm = 0;
979 break;
981 case TCG_COND_LTU:
982 case TCG_COND_GEU:
983 case TCG_COND_LEU:
984 case TCG_COND_GTU:
985 if (const_arg2) {
986 if ((uint16_t) arg2 == arg2) {
987 op = CMPLI;
988 imm = 1;
989 break;
992 op = CMPL;
993 imm = 0;
994 break;
996 default:
997 tcg_abort ();
999 op |= BF (cr);
1001 if (imm)
1002 tcg_out32 (s, op | RA (arg1) | (arg2 & 0xffff));
1003 else {
1004 if (const_arg2) {
1005 tcg_out_movi (s, TCG_TYPE_I32, 0, arg2);
1006 tcg_out32 (s, op | RA (arg1) | RB (0));
1008 else
1009 tcg_out32 (s, op | RA (arg1) | RB (arg2));
1014 static void tcg_out_bc (TCGContext *s, int bc, int label_index)
1016 TCGLabel *l = &s->labels[label_index];
1018 if (l->has_value)
1019 tcg_out32 (s, bc | reloc_pc14_val (s->code_ptr, l->u.value));
1020 else {
1021 uint16_t val = *(uint16_t *) &s->code_ptr[2];
1023 /* Thanks to Andrzej Zaborowski */
1024 tcg_out32 (s, bc | (val & 0xfffc));
1025 tcg_out_reloc (s, s->code_ptr - 4, R_PPC_REL14, label_index, 0);
1029 static void tcg_out_brcond (TCGContext *s, int cond,
1030 TCGArg arg1, TCGArg arg2, int const_arg2,
1031 int label_index)
1033 tcg_out_cmp (s, cond, arg1, arg2, const_arg2, 7);
1034 tcg_out_bc (s, tcg_to_bc[cond], label_index);
1037 /* XXX: we implement it at the target level to avoid having to
1038 handle cross basic blocks temporaries */
1039 static void tcg_out_brcond2 (TCGContext *s, const TCGArg *args,
1040 const int *const_args)
1042 int cond = args[4], label_index = args[5], op;
1043 struct { int bit1; int bit2; int cond2; } bits[] = {
1044 [TCG_COND_LT ] = { CR_LT, CR_LT, TCG_COND_LT },
1045 [TCG_COND_LE ] = { CR_LT, CR_GT, TCG_COND_LT },
1046 [TCG_COND_GT ] = { CR_GT, CR_GT, TCG_COND_GT },
1047 [TCG_COND_GE ] = { CR_GT, CR_LT, TCG_COND_GT },
1048 [TCG_COND_LTU] = { CR_LT, CR_LT, TCG_COND_LTU },
1049 [TCG_COND_LEU] = { CR_LT, CR_GT, TCG_COND_LTU },
1050 [TCG_COND_GTU] = { CR_GT, CR_GT, TCG_COND_GTU },
1051 [TCG_COND_GEU] = { CR_GT, CR_LT, TCG_COND_GTU },
1052 }, *b = &bits[cond];
1054 switch (cond) {
1055 case TCG_COND_EQ:
1056 case TCG_COND_NE:
1057 op = (cond == TCG_COND_EQ) ? CRAND : CRNAND;
1058 tcg_out_cmp (s, cond, args[0], args[2], const_args[2], 6);
1059 tcg_out_cmp (s, cond, args[1], args[3], const_args[3], 7);
1060 tcg_out32 (s, op | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, CR_EQ));
1061 break;
1062 case TCG_COND_LT:
1063 case TCG_COND_LE:
1064 case TCG_COND_GT:
1065 case TCG_COND_GE:
1066 case TCG_COND_LTU:
1067 case TCG_COND_LEU:
1068 case TCG_COND_GTU:
1069 case TCG_COND_GEU:
1070 op = (b->bit1 != b->bit2) ? CRANDC : CRAND;
1071 tcg_out_cmp (s, b->cond2, args[1], args[3], const_args[3], 5);
1072 tcg_out_cmp (s, TCG_COND_EQ, args[1], args[3], const_args[3], 6);
1073 tcg_out_cmp (s, cond, args[0], args[2], const_args[2], 7);
1074 tcg_out32 (s, op | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, b->bit2));
1075 tcg_out32 (s, CROR | BT (7, CR_EQ) | BA (5, b->bit1) | BB (7, CR_EQ));
1076 break;
1077 default:
1078 tcg_abort();
1081 tcg_out_bc (s, (BC | BI (7, CR_EQ) | BO_COND_TRUE), label_index);
1084 void ppc_tb_set_jmp_target (unsigned long jmp_addr, unsigned long addr)
1086 uint32_t *ptr;
1087 long disp = addr - jmp_addr;
1088 unsigned long patch_size;
1090 ptr = (uint32_t *)jmp_addr;
1092 if ((disp << 6) >> 6 != disp) {
1093 ptr[0] = 0x3c000000 | (addr >> 16); /* lis 0,addr@ha */
1094 ptr[1] = 0x60000000 | (addr & 0xffff); /* la 0,addr@l(0) */
1095 ptr[2] = 0x7c0903a6; /* mtctr 0 */
1096 ptr[3] = 0x4e800420; /* brctr */
1097 patch_size = 16;
1098 } else {
1099 /* patch the branch destination */
1100 if (disp != 16) {
1101 *ptr = 0x48000000 | (disp & 0x03fffffc); /* b disp */
1102 patch_size = 4;
1103 } else {
1104 ptr[0] = 0x60000000; /* nop */
1105 ptr[1] = 0x60000000;
1106 ptr[2] = 0x60000000;
1107 ptr[3] = 0x60000000;
1108 patch_size = 16;
1111 /* flush icache */
1112 flush_icache_range(jmp_addr, jmp_addr + patch_size);
1115 static void tcg_out_op(TCGContext *s, int opc, const TCGArg *args,
1116 const int *const_args)
1118 switch (opc) {
1119 case INDEX_op_exit_tb:
1120 tcg_out_movi (s, TCG_TYPE_I32, TCG_REG_R3, args[0]);
1121 tcg_out_b (s, 0, (tcg_target_long) tb_ret_addr);
1122 break;
1123 case INDEX_op_goto_tb:
1124 if (s->tb_jmp_offset) {
1125 /* direct jump method */
1127 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
1128 s->code_ptr += 16;
1130 else {
1131 tcg_abort ();
1133 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1134 break;
1135 case INDEX_op_br:
1137 TCGLabel *l = &s->labels[args[0]];
1139 if (l->has_value) {
1140 tcg_out_b (s, 0, l->u.value);
1142 else {
1143 uint32_t val = *(uint32_t *) s->code_ptr;
1145 /* Thanks to Andrzej Zaborowski */
1146 tcg_out32 (s, B | (val & 0x3fffffc));
1147 tcg_out_reloc (s, s->code_ptr - 4, R_PPC_REL24, args[0], 0);
1150 break;
1151 case INDEX_op_call:
1152 #ifdef _AIX
1153 tcg_out_call (s, args[0], const_args[0]);
1154 #else
1155 if (const_args[0]) {
1156 tcg_out_b (s, LK, args[0]);
1158 else {
1159 tcg_out32 (s, MTSPR | RS (args[0]) | LR);
1160 tcg_out32 (s, BCLR | BO_ALWAYS | LK);
1162 #endif
1163 break;
1164 case INDEX_op_jmp:
1165 if (const_args[0]) {
1166 tcg_out_b (s, 0, args[0]);
1168 else {
1169 tcg_out32 (s, MTSPR | RS (args[0]) | CTR);
1170 tcg_out32 (s, BCCTR | BO_ALWAYS);
1172 break;
1173 case INDEX_op_movi_i32:
1174 tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
1175 break;
1176 case INDEX_op_ld8u_i32:
1177 tcg_out_ldst (s, args[0], args[1], args[2], LBZ, LBZX);
1178 break;
1179 case INDEX_op_ld8s_i32:
1180 tcg_out_ldst (s, args[0], args[1], args[2], LBZ, LBZX);
1181 tcg_out32 (s, EXTSB | RS (args[0]) | RA (args[0]));
1182 break;
1183 case INDEX_op_ld16u_i32:
1184 tcg_out_ldst (s, args[0], args[1], args[2], LHZ, LHZX);
1185 break;
1186 case INDEX_op_ld16s_i32:
1187 tcg_out_ldst (s, args[0], args[1], args[2], LHA, LHAX);
1188 break;
1189 case INDEX_op_ld_i32:
1190 tcg_out_ldst (s, args[0], args[1], args[2], LWZ, LWZX);
1191 break;
1192 case INDEX_op_st8_i32:
1193 tcg_out_ldst (s, args[0], args[1], args[2], STB, STBX);
1194 break;
1195 case INDEX_op_st16_i32:
1196 tcg_out_ldst (s, args[0], args[1], args[2], STH, STHX);
1197 break;
1198 case INDEX_op_st_i32:
1199 tcg_out_ldst (s, args[0], args[1], args[2], STW, STWX);
1200 break;
1202 case INDEX_op_add_i32:
1203 if (const_args[2])
1204 ppc_addi (s, args[0], args[1], args[2]);
1205 else
1206 tcg_out32 (s, ADD | TAB (args[0], args[1], args[2]));
1207 break;
1208 case INDEX_op_sub_i32:
1209 if (const_args[2])
1210 ppc_addi (s, args[0], args[1], -args[2]);
1211 else
1212 tcg_out32 (s, SUBF | TAB (args[0], args[2], args[1]));
1213 break;
1215 case INDEX_op_and_i32:
1216 if (const_args[2]) {
1217 if ((args[2] & 0xffff) == args[2])
1218 tcg_out32 (s, ANDI | RS (args[1]) | RA (args[0]) | args[2]);
1219 else if ((args[2] & 0xffff0000) == args[2])
1220 tcg_out32 (s, ANDIS | RS (args[1]) | RA (args[0])
1221 | ((args[2] >> 16) & 0xffff));
1222 else {
1223 tcg_out_movi (s, TCG_TYPE_I32, 0, args[2]);
1224 tcg_out32 (s, AND | SAB (args[1], args[0], 0));
1227 else
1228 tcg_out32 (s, AND | SAB (args[1], args[0], args[2]));
1229 break;
1230 case INDEX_op_or_i32:
1231 if (const_args[2]) {
1232 if (args[2] & 0xffff) {
1233 tcg_out32 (s, ORI | RS (args[1]) | RA (args[0])
1234 | (args[2] & 0xffff));
1235 if (args[2] >> 16)
1236 tcg_out32 (s, ORIS | RS (args[0]) | RA (args[0])
1237 | ((args[2] >> 16) & 0xffff));
1239 else {
1240 tcg_out32 (s, ORIS | RS (args[1]) | RA (args[0])
1241 | ((args[2] >> 16) & 0xffff));
1244 else
1245 tcg_out32 (s, OR | SAB (args[1], args[0], args[2]));
1246 break;
1247 case INDEX_op_xor_i32:
1248 if (const_args[2]) {
1249 if ((args[2] & 0xffff) == args[2])
1250 tcg_out32 (s, XORI | RS (args[1]) | RA (args[0])
1251 | (args[2] & 0xffff));
1252 else if ((args[2] & 0xffff0000) == args[2])
1253 tcg_out32 (s, XORIS | RS (args[1]) | RA (args[0])
1254 | ((args[2] >> 16) & 0xffff));
1255 else {
1256 tcg_out_movi (s, TCG_TYPE_I32, 0, args[2]);
1257 tcg_out32 (s, XOR | SAB (args[1], args[0], 0));
1260 else
1261 tcg_out32 (s, XOR | SAB (args[1], args[0], args[2]));
1262 break;
1264 case INDEX_op_mul_i32:
1265 if (const_args[2]) {
1266 if (args[2] == (int16_t) args[2])
1267 tcg_out32 (s, MULLI | RT (args[0]) | RA (args[1])
1268 | (args[2] & 0xffff));
1269 else {
1270 tcg_out_movi (s, TCG_TYPE_I32, 0, args[2]);
1271 tcg_out32 (s, MULLW | TAB (args[0], args[1], 0));
1274 else
1275 tcg_out32 (s, MULLW | TAB (args[0], args[1], args[2]));
1276 break;
1278 case INDEX_op_div_i32:
1279 tcg_out32 (s, DIVW | TAB (args[0], args[1], args[2]));
1280 break;
1282 case INDEX_op_divu_i32:
1283 tcg_out32 (s, DIVWU | TAB (args[0], args[1], args[2]));
1284 break;
1286 case INDEX_op_rem_i32:
1287 tcg_out32 (s, DIVW | TAB (0, args[1], args[2]));
1288 tcg_out32 (s, MULLW | TAB (0, 0, args[2]));
1289 tcg_out32 (s, SUBF | TAB (args[0], 0, args[1]));
1290 break;
1292 case INDEX_op_remu_i32:
1293 tcg_out32 (s, DIVWU | TAB (0, args[1], args[2]));
1294 tcg_out32 (s, MULLW | TAB (0, 0, args[2]));
1295 tcg_out32 (s, SUBF | TAB (args[0], 0, args[1]));
1296 break;
1298 case INDEX_op_mulu2_i32:
1299 if (args[0] == args[2] || args[0] == args[3]) {
1300 tcg_out32 (s, MULLW | TAB (0, args[2], args[3]));
1301 tcg_out32 (s, MULHWU | TAB (args[1], args[2], args[3]));
1302 tcg_out_mov (s, args[0], 0);
1304 else {
1305 tcg_out32 (s, MULLW | TAB (args[0], args[2], args[3]));
1306 tcg_out32 (s, MULHWU | TAB (args[1], args[2], args[3]));
1308 break;
1310 case INDEX_op_shl_i32:
1311 if (const_args[2]) {
1312 tcg_out32 (s, (RLWINM
1313 | RA (args[0])
1314 | RS (args[1])
1315 | SH (args[2])
1316 | MB (0)
1317 | ME (31 - args[2])
1321 else
1322 tcg_out32 (s, SLW | SAB (args[1], args[0], args[2]));
1323 break;
1324 case INDEX_op_shr_i32:
1325 if (const_args[2]) {
1326 tcg_out32 (s, (RLWINM
1327 | RA (args[0])
1328 | RS (args[1])
1329 | SH (32 - args[2])
1330 | MB (args[2])
1331 | ME (31)
1335 else
1336 tcg_out32 (s, SRW | SAB (args[1], args[0], args[2]));
1337 break;
1338 case INDEX_op_sar_i32:
1339 if (const_args[2])
1340 tcg_out32 (s, SRAWI | RS (args[1]) | RA (args[0]) | SH (args[2]));
1341 else
1342 tcg_out32 (s, SRAW | SAB (args[1], args[0], args[2]));
1343 break;
1345 case INDEX_op_add2_i32:
1346 if (args[0] == args[3] || args[0] == args[5]) {
1347 tcg_out32 (s, ADDC | TAB (0, args[2], args[4]));
1348 tcg_out32 (s, ADDE | TAB (args[1], args[3], args[5]));
1349 tcg_out_mov (s, args[0], 0);
1351 else {
1352 tcg_out32 (s, ADDC | TAB (args[0], args[2], args[4]));
1353 tcg_out32 (s, ADDE | TAB (args[1], args[3], args[5]));
1355 break;
1356 case INDEX_op_sub2_i32:
1357 if (args[0] == args[3] || args[0] == args[5]) {
1358 tcg_out32 (s, SUBFC | TAB (0, args[4], args[2]));
1359 tcg_out32 (s, SUBFE | TAB (args[1], args[5], args[3]));
1360 tcg_out_mov (s, args[0], 0);
1362 else {
1363 tcg_out32 (s, SUBFC | TAB (args[0], args[4], args[2]));
1364 tcg_out32 (s, SUBFE | TAB (args[1], args[5], args[3]));
1366 break;
1368 case INDEX_op_brcond_i32:
1370 args[0] = r0
1371 args[1] = r1
1372 args[2] = cond
1373 args[3] = r1 is const
1374 args[4] = label_index
1376 tcg_out_brcond (s, args[2], args[0], args[1], const_args[1], args[3]);
1377 break;
1378 case INDEX_op_brcond2_i32:
1379 tcg_out_brcond2(s, args, const_args);
1380 break;
1382 case INDEX_op_neg_i32:
1383 tcg_out32 (s, NEG | RT (args[0]) | RA (args[1]));
1384 break;
1386 case INDEX_op_qemu_ld8u:
1387 tcg_out_qemu_ld(s, args, 0);
1388 break;
1389 case INDEX_op_qemu_ld8s:
1390 tcg_out_qemu_ld(s, args, 0 | 4);
1391 break;
1392 case INDEX_op_qemu_ld16u:
1393 tcg_out_qemu_ld(s, args, 1);
1394 break;
1395 case INDEX_op_qemu_ld16s:
1396 tcg_out_qemu_ld(s, args, 1 | 4);
1397 break;
1398 case INDEX_op_qemu_ld32u:
1399 tcg_out_qemu_ld(s, args, 2);
1400 break;
1401 case INDEX_op_qemu_ld64:
1402 tcg_out_qemu_ld(s, args, 3);
1403 break;
1404 case INDEX_op_qemu_st8:
1405 tcg_out_qemu_st(s, args, 0);
1406 break;
1407 case INDEX_op_qemu_st16:
1408 tcg_out_qemu_st(s, args, 1);
1409 break;
1410 case INDEX_op_qemu_st32:
1411 tcg_out_qemu_st(s, args, 2);
1412 break;
1413 case INDEX_op_qemu_st64:
1414 tcg_out_qemu_st(s, args, 3);
1415 break;
1417 case INDEX_op_ext8s_i32:
1418 tcg_out32 (s, EXTSB | RS (args[1]) | RA (args[0]));
1419 break;
1420 case INDEX_op_ext16s_i32:
1421 tcg_out32 (s, EXTSH | RS (args[1]) | RA (args[0]));
1422 break;
1424 default:
1425 tcg_dump_ops (s, stderr);
1426 tcg_abort ();
1430 static const TCGTargetOpDef ppc_op_defs[] = {
1431 { INDEX_op_exit_tb, { } },
1432 { INDEX_op_goto_tb, { } },
1433 { INDEX_op_call, { "ri" } },
1434 { INDEX_op_jmp, { "ri" } },
1435 { INDEX_op_br, { } },
1437 { INDEX_op_mov_i32, { "r", "r" } },
1438 { INDEX_op_movi_i32, { "r" } },
1439 { INDEX_op_ld8u_i32, { "r", "r" } },
1440 { INDEX_op_ld8s_i32, { "r", "r" } },
1441 { INDEX_op_ld16u_i32, { "r", "r" } },
1442 { INDEX_op_ld16s_i32, { "r", "r" } },
1443 { INDEX_op_ld_i32, { "r", "r" } },
1444 { INDEX_op_st8_i32, { "r", "r" } },
1445 { INDEX_op_st16_i32, { "r", "r" } },
1446 { INDEX_op_st_i32, { "r", "r" } },
1448 { INDEX_op_add_i32, { "r", "r", "ri" } },
1449 { INDEX_op_mul_i32, { "r", "r", "ri" } },
1450 { INDEX_op_div_i32, { "r", "r", "r" } },
1451 { INDEX_op_divu_i32, { "r", "r", "r" } },
1452 { INDEX_op_rem_i32, { "r", "r", "r" } },
1453 { INDEX_op_remu_i32, { "r", "r", "r" } },
1454 { INDEX_op_mulu2_i32, { "r", "r", "r", "r" } },
1455 { INDEX_op_sub_i32, { "r", "r", "ri" } },
1456 { INDEX_op_and_i32, { "r", "r", "ri" } },
1457 { INDEX_op_or_i32, { "r", "r", "ri" } },
1458 { INDEX_op_xor_i32, { "r", "r", "ri" } },
1460 { INDEX_op_shl_i32, { "r", "r", "ri" } },
1461 { INDEX_op_shr_i32, { "r", "r", "ri" } },
1462 { INDEX_op_sar_i32, { "r", "r", "ri" } },
1464 { INDEX_op_brcond_i32, { "r", "ri" } },
1466 { INDEX_op_add2_i32, { "r", "r", "r", "r", "r", "r" } },
1467 { INDEX_op_sub2_i32, { "r", "r", "r", "r", "r", "r" } },
1468 { INDEX_op_brcond2_i32, { "r", "r", "r", "r" } },
1470 { INDEX_op_neg_i32, { "r", "r" } },
1472 #if TARGET_LONG_BITS == 32
1473 { INDEX_op_qemu_ld8u, { "r", "L" } },
1474 { INDEX_op_qemu_ld8s, { "r", "L" } },
1475 { INDEX_op_qemu_ld16u, { "r", "L" } },
1476 { INDEX_op_qemu_ld16s, { "r", "L" } },
1477 { INDEX_op_qemu_ld32u, { "r", "L" } },
1478 { INDEX_op_qemu_ld32s, { "r", "L" } },
1479 { INDEX_op_qemu_ld64, { "r", "r", "L" } },
1481 { INDEX_op_qemu_st8, { "K", "K" } },
1482 { INDEX_op_qemu_st16, { "K", "K" } },
1483 { INDEX_op_qemu_st32, { "K", "K" } },
1484 { INDEX_op_qemu_st64, { "M", "M", "M" } },
1485 #else
1486 { INDEX_op_qemu_ld8u, { "r", "L", "L" } },
1487 { INDEX_op_qemu_ld8s, { "r", "L", "L" } },
1488 { INDEX_op_qemu_ld16u, { "r", "L", "L" } },
1489 { INDEX_op_qemu_ld16s, { "r", "L", "L" } },
1490 { INDEX_op_qemu_ld32u, { "r", "L", "L" } },
1491 { INDEX_op_qemu_ld32s, { "r", "L", "L" } },
1492 { INDEX_op_qemu_ld64, { "r", "L", "L", "L" } },
1494 { INDEX_op_qemu_st8, { "K", "K", "K" } },
1495 { INDEX_op_qemu_st16, { "K", "K", "K" } },
1496 { INDEX_op_qemu_st32, { "K", "K", "K" } },
1497 { INDEX_op_qemu_st64, { "M", "M", "M", "M" } },
1498 #endif
1500 { INDEX_op_ext8s_i32, { "r", "r" } },
1501 { INDEX_op_ext16s_i32, { "r", "r" } },
1503 { -1 },
1506 void tcg_target_init(TCGContext *s)
1508 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffffffff);
1509 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1510 (1 << TCG_REG_R0) |
1511 #ifdef __APPLE__
1512 (1 << TCG_REG_R2) |
1513 #endif
1514 (1 << TCG_REG_R3) |
1515 (1 << TCG_REG_R4) |
1516 (1 << TCG_REG_R5) |
1517 (1 << TCG_REG_R6) |
1518 (1 << TCG_REG_R7) |
1519 (1 << TCG_REG_R8) |
1520 (1 << TCG_REG_R9) |
1521 (1 << TCG_REG_R10) |
1522 (1 << TCG_REG_R11) |
1523 (1 << TCG_REG_R12)
1526 tcg_regset_clear(s->reserved_regs);
1527 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R0);
1528 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R1);
1529 #ifndef __APPLE__
1530 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R2);
1531 #endif
1532 #ifdef __linux__
1533 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R13);
1534 #endif
1536 tcg_add_target_add_op_defs(ppc_op_defs);