Add missing r24..r26 to calle save registers
[qemu/mini2440.git] / tcg / ppc / tcg-target.c
blob23f94a83c65e076b32821389dbaf278904814fef
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
25 static uint8_t *tb_ret_addr;
27 #ifdef __APPLE__
28 #define LINKAGE_AREA_SIZE 24
29 #define LR_OFFSET 8
30 #elif defined _AIX
31 #define LINKAGE_AREA_SIZE 52
32 #define LR_OFFSET 8
33 #else
34 #define LINKAGE_AREA_SIZE 8
35 #define LR_OFFSET 4
36 #endif
38 #define FAST_PATH
39 #if TARGET_PHYS_ADDR_BITS <= 32
40 #define ADDEND_OFFSET 0
41 #else
42 #define ADDEND_OFFSET 4
43 #endif
45 #ifndef NDEBUG
46 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
47 "r0",
48 "r1",
49 "rp",
50 "r3",
51 "r4",
52 "r5",
53 "r6",
54 "r7",
55 "r8",
56 "r9",
57 "r10",
58 "r11",
59 "r12",
60 "r13",
61 "r14",
62 "r15",
63 "r16",
64 "r17",
65 "r18",
66 "r19",
67 "r20",
68 "r21",
69 "r22",
70 "r23",
71 "r24",
72 "r25",
73 "r26",
74 "r27",
75 "r28",
76 "r29",
77 "r30",
78 "r31"
80 #endif
82 static const int tcg_target_reg_alloc_order[] = {
83 TCG_REG_R14,
84 TCG_REG_R15,
85 TCG_REG_R16,
86 TCG_REG_R17,
87 TCG_REG_R18,
88 TCG_REG_R19,
89 TCG_REG_R20,
90 TCG_REG_R21,
91 TCG_REG_R22,
92 TCG_REG_R23,
93 TCG_REG_R28,
94 TCG_REG_R29,
95 TCG_REG_R30,
96 TCG_REG_R31,
97 #ifdef __APPLE__
98 TCG_REG_R2,
99 #endif
100 TCG_REG_R3,
101 TCG_REG_R4,
102 TCG_REG_R5,
103 TCG_REG_R6,
104 TCG_REG_R7,
105 TCG_REG_R8,
106 TCG_REG_R9,
107 TCG_REG_R10,
108 #ifndef __APPLE__
109 TCG_REG_R11,
110 #endif
111 TCG_REG_R12,
112 #ifndef __linux__
113 TCG_REG_R13,
114 #endif
115 TCG_REG_R0,
116 TCG_REG_R1,
117 TCG_REG_R2,
118 TCG_REG_R24,
119 TCG_REG_R25,
120 TCG_REG_R26,
121 TCG_REG_R27
124 static const int tcg_target_call_iarg_regs[] = {
125 TCG_REG_R3,
126 TCG_REG_R4,
127 TCG_REG_R5,
128 TCG_REG_R6,
129 TCG_REG_R7,
130 TCG_REG_R8,
131 TCG_REG_R9,
132 TCG_REG_R10
135 static const int tcg_target_call_oarg_regs[2] = {
136 TCG_REG_R3,
137 TCG_REG_R4
140 static const int tcg_target_callee_save_regs[] = {
141 #ifdef __APPLE__
142 TCG_REG_R11,
143 TCG_REG_R13,
144 #endif
145 #ifdef _AIX
146 TCG_REG_R13,
147 #endif
148 TCG_REG_R14,
149 TCG_REG_R15,
150 TCG_REG_R16,
151 TCG_REG_R17,
152 TCG_REG_R18,
153 TCG_REG_R19,
154 TCG_REG_R20,
155 TCG_REG_R21,
156 TCG_REG_R22,
157 TCG_REG_R23,
158 TCG_REG_R24,
159 TCG_REG_R25,
160 TCG_REG_R26,
161 /* TCG_REG_R27, */ /* currently used for the global env, so no
162 need to save */
163 TCG_REG_R28,
164 TCG_REG_R29,
165 TCG_REG_R30,
166 TCG_REG_R31
169 static uint32_t reloc_pc24_val (void *pc, tcg_target_long target)
171 tcg_target_long disp;
173 disp = target - (tcg_target_long) pc;
174 if ((disp << 6) >> 6 != disp)
175 tcg_abort ();
177 return disp & 0x3fffffc;
180 static void reloc_pc24 (void *pc, tcg_target_long target)
182 *(uint32_t *) pc = (*(uint32_t *) pc & ~0x3fffffc)
183 | reloc_pc24_val (pc, target);
186 static uint16_t reloc_pc14_val (void *pc, tcg_target_long target)
188 tcg_target_long disp;
190 disp = target - (tcg_target_long) pc;
191 if (disp != (int16_t) disp)
192 tcg_abort ();
194 return disp & 0xfffc;
197 static void reloc_pc14 (void *pc, tcg_target_long target)
199 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xfffc)
200 | reloc_pc14_val (pc, target);
203 static void patch_reloc(uint8_t *code_ptr, int type,
204 tcg_target_long value, tcg_target_long addend)
206 value += addend;
207 switch (type) {
208 case R_PPC_REL14:
209 reloc_pc14 (code_ptr, value);
210 break;
211 case R_PPC_REL24:
212 reloc_pc24 (code_ptr, value);
213 break;
214 default:
215 tcg_abort();
219 /* maximum number of register used for input function arguments */
220 static int tcg_target_get_call_iarg_regs_count(int flags)
222 return ARRAY_SIZE (tcg_target_call_iarg_regs);
225 /* parse target specific constraints */
226 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
228 const char *ct_str;
230 ct_str = *pct_str;
231 switch (ct_str[0]) {
232 case 'A': case 'B': case 'C': case 'D':
233 ct->ct |= TCG_CT_REG;
234 tcg_regset_set_reg(ct->u.regs, 3 + ct_str[0] - 'A');
235 break;
236 case 'r':
237 ct->ct |= TCG_CT_REG;
238 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
239 break;
240 #ifdef CONFIG_SOFTMMU
241 case 'L': /* qemu_ld constraint */
242 ct->ct |= TCG_CT_REG;
243 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
244 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
245 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4);
246 break;
247 case 'K': /* qemu_st[8..32] constraint */
248 ct->ct |= TCG_CT_REG;
249 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
250 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
251 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4);
252 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R5);
253 #if TARGET_LONG_BITS == 64
254 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R6);
255 #endif
256 break;
257 case 'M': /* qemu_st64 constraint */
258 ct->ct |= TCG_CT_REG;
259 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
260 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
261 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4);
262 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R5);
263 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R6);
264 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R7);
265 break;
266 #else
267 case 'L':
268 case 'K':
269 ct->ct |= TCG_CT_REG;
270 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
271 break;
272 case 'M':
273 ct->ct |= TCG_CT_REG;
274 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
275 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
276 break;
277 #endif
278 default:
279 return -1;
281 ct_str++;
282 *pct_str = ct_str;
283 return 0;
286 /* test if a constant matches the constraint */
287 static int tcg_target_const_match(tcg_target_long val,
288 const TCGArgConstraint *arg_ct)
290 int ct;
292 ct = arg_ct->ct;
293 if (ct & TCG_CT_CONST)
294 return 1;
295 return 0;
298 #define OPCD(opc) ((opc)<<26)
299 #define XO31(opc) (OPCD(31)|((opc)<<1))
300 #define XO19(opc) (OPCD(19)|((opc)<<1))
302 #define B OPCD(18)
303 #define BC OPCD(16)
304 #define LBZ OPCD(34)
305 #define LHZ OPCD(40)
306 #define LHA OPCD(42)
307 #define LWZ OPCD(32)
308 #define STB OPCD(38)
309 #define STH OPCD(44)
310 #define STW OPCD(36)
312 #define ADDI OPCD(14)
313 #define ADDIS OPCD(15)
314 #define ORI OPCD(24)
315 #define ORIS OPCD(25)
316 #define XORI OPCD(26)
317 #define XORIS OPCD(27)
318 #define ANDI OPCD(28)
319 #define ANDIS OPCD(29)
320 #define MULLI OPCD( 7)
321 #define CMPLI OPCD(10)
322 #define CMPI OPCD(11)
324 #define LWZU OPCD(33)
325 #define STWU OPCD(37)
327 #define RLWINM OPCD(21)
329 #define BCLR XO19( 16)
330 #define BCCTR XO19(528)
331 #define CRAND XO19(257)
332 #define CRANDC XO19(129)
333 #define CRNAND XO19(225)
334 #define CROR XO19(449)
336 #define EXTSB XO31(954)
337 #define EXTSH XO31(922)
338 #define ADD XO31(266)
339 #define ADDE XO31(138)
340 #define ADDC XO31( 10)
341 #define AND XO31( 28)
342 #define SUBF XO31( 40)
343 #define SUBFC XO31( 8)
344 #define SUBFE XO31(136)
345 #define OR XO31(444)
346 #define XOR XO31(316)
347 #define MULLW XO31(235)
348 #define MULHWU XO31( 11)
349 #define DIVW XO31(491)
350 #define DIVWU XO31(459)
351 #define CMP XO31( 0)
352 #define CMPL XO31( 32)
353 #define LHBRX XO31(790)
354 #define LWBRX XO31(534)
355 #define STHBRX XO31(918)
356 #define STWBRX XO31(662)
357 #define MFSPR XO31(339)
358 #define MTSPR XO31(467)
359 #define SRAWI XO31(824)
360 #define NEG XO31(104)
362 #define LBZX XO31( 87)
363 #define LHZX XO31(276)
364 #define LHAX XO31(343)
365 #define LWZX XO31( 23)
366 #define STBX XO31(215)
367 #define STHX XO31(407)
368 #define STWX XO31(151)
370 #define SPR(a,b) ((((a)<<5)|(b))<<11)
371 #define LR SPR(8, 0)
372 #define CTR SPR(9, 0)
374 #define SLW XO31( 24)
375 #define SRW XO31(536)
376 #define SRAW XO31(792)
378 #define LMW OPCD(46)
379 #define STMW OPCD(47)
381 #define TW XO31(4)
382 #define TRAP (TW | TO (31))
384 #define RT(r) ((r)<<21)
385 #define RS(r) ((r)<<21)
386 #define RA(r) ((r)<<16)
387 #define RB(r) ((r)<<11)
388 #define TO(t) ((t)<<21)
389 #define SH(s) ((s)<<11)
390 #define MB(b) ((b)<<6)
391 #define ME(e) ((e)<<1)
392 #define BO(o) ((o)<<21)
394 #define LK 1
396 #define TAB(t,a,b) (RT(t) | RA(a) | RB(b))
397 #define SAB(s,a,b) (RS(s) | RA(a) | RB(b))
399 #define BF(n) ((n)<<23)
400 #define BI(n, c) (((c)+((n)*4))<<16)
401 #define BT(n, c) (((c)+((n)*4))<<21)
402 #define BA(n, c) (((c)+((n)*4))<<16)
403 #define BB(n, c) (((c)+((n)*4))<<11)
405 #define BO_COND_TRUE BO (12)
406 #define BO_COND_FALSE BO (4)
407 #define BO_ALWAYS BO (20)
409 enum {
410 CR_LT,
411 CR_GT,
412 CR_EQ,
413 CR_SO
416 static const uint32_t tcg_to_bc[10] = {
417 [TCG_COND_EQ] = BC | BI (7, CR_EQ) | BO_COND_TRUE,
418 [TCG_COND_NE] = BC | BI (7, CR_EQ) | BO_COND_FALSE,
419 [TCG_COND_LT] = BC | BI (7, CR_LT) | BO_COND_TRUE,
420 [TCG_COND_GE] = BC | BI (7, CR_LT) | BO_COND_FALSE,
421 [TCG_COND_LE] = BC | BI (7, CR_GT) | BO_COND_FALSE,
422 [TCG_COND_GT] = BC | BI (7, CR_GT) | BO_COND_TRUE,
423 [TCG_COND_LTU] = BC | BI (7, CR_LT) | BO_COND_TRUE,
424 [TCG_COND_GEU] = BC | BI (7, CR_LT) | BO_COND_FALSE,
425 [TCG_COND_LEU] = BC | BI (7, CR_GT) | BO_COND_FALSE,
426 [TCG_COND_GTU] = BC | BI (7, CR_GT) | BO_COND_TRUE,
429 static void tcg_out_mov(TCGContext *s, int ret, int arg)
431 tcg_out32 (s, OR | SAB (arg, ret, arg));
434 static void tcg_out_movi(TCGContext *s, TCGType type,
435 int ret, tcg_target_long arg)
437 if (arg == (int16_t) arg)
438 tcg_out32 (s, ADDI | RT (ret) | RA (0) | (arg & 0xffff));
439 else {
440 tcg_out32 (s, ADDIS | RT (ret) | RA (0) | ((arg >> 16) & 0xffff));
441 if (arg & 0xffff)
442 tcg_out32 (s, ORI | RS (ret) | RA (ret) | (arg & 0xffff));
446 static void tcg_out_ldst (TCGContext *s, int ret, int addr,
447 int offset, int op1, int op2)
449 if (offset == (int16_t) offset)
450 tcg_out32 (s, op1 | RT (ret) | RA (addr) | (offset & 0xffff));
451 else {
452 tcg_out_movi (s, TCG_TYPE_I32, 0, offset);
453 tcg_out32 (s, op2 | RT (ret) | RA (addr) | RB (0));
457 static void tcg_out_b (TCGContext *s, int mask, tcg_target_long target)
459 tcg_target_long disp;
461 disp = target - (tcg_target_long) s->code_ptr;
462 if ((disp << 6) >> 6 == disp)
463 tcg_out32 (s, B | (disp & 0x3fffffc) | mask);
464 else {
465 tcg_out_movi (s, TCG_TYPE_I32, 0, (tcg_target_long) target);
466 tcg_out32 (s, MTSPR | RS (0) | CTR);
467 tcg_out32 (s, BCCTR | BO_ALWAYS | mask);
471 #ifdef _AIX
472 static void tcg_out_call (TCGContext *s, tcg_target_long arg, int const_arg)
474 int reg;
476 if (const_arg) {
477 reg = 2;
478 tcg_out_movi (s, TCG_TYPE_I32, reg, arg);
480 else reg = arg;
482 tcg_out32 (s, LWZ | RT (0) | RA (reg));
483 tcg_out32 (s, MTSPR | RA (0) | CTR);
484 tcg_out32 (s, LWZ | RT (2) | RA (reg) | 4);
485 tcg_out32 (s, BCCTR | BO_ALWAYS | LK);
487 #endif
489 #if defined(CONFIG_SOFTMMU)
491 #include "../../softmmu_defs.h"
493 static void *qemu_ld_helpers[4] = {
494 __ldb_mmu,
495 __ldw_mmu,
496 __ldl_mmu,
497 __ldq_mmu,
500 static void *qemu_st_helpers[4] = {
501 __stb_mmu,
502 __stw_mmu,
503 __stl_mmu,
504 __stq_mmu,
506 #endif
508 static void tcg_out_qemu_ld (TCGContext *s, const TCGArg *args, int opc)
510 int addr_reg, data_reg, data_reg2, r0, r1, mem_index, s_bits, bswap;
511 #ifdef CONFIG_SOFTMMU
512 int r2;
513 void *label1_ptr, *label2_ptr;
514 #endif
515 #if TARGET_LONG_BITS == 64
516 int addr_reg2;
517 #endif
519 data_reg = *args++;
520 if (opc == 3)
521 data_reg2 = *args++;
522 else
523 data_reg2 = 0;
524 addr_reg = *args++;
525 #if TARGET_LONG_BITS == 64
526 addr_reg2 = *args++;
527 #endif
528 mem_index = *args;
529 s_bits = opc & 3;
531 #ifdef CONFIG_SOFTMMU
532 r0 = 3;
533 r1 = 4;
534 r2 = 0;
536 tcg_out32 (s, (RLWINM
537 | RA (r0)
538 | RS (addr_reg)
539 | SH (32 - (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS))
540 | MB (32 - (CPU_TLB_BITS + CPU_TLB_ENTRY_BITS))
541 | ME (31 - CPU_TLB_ENTRY_BITS)
544 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (TCG_AREG0));
545 tcg_out32 (s, (LWZU
546 | RT (r1)
547 | RA (r0)
548 | offsetof (CPUState, tlb_table[mem_index][0].addr_read)
551 tcg_out32 (s, (RLWINM
552 | RA (r2)
553 | RS (addr_reg)
554 | SH (0)
555 | MB ((32 - s_bits) & 31)
556 | ME (31 - TARGET_PAGE_BITS)
560 tcg_out32 (s, CMP | BF (7) | RA (r2) | RB (r1));
561 #if TARGET_LONG_BITS == 64
562 tcg_out32 (s, LWZ | RT (r1) | RA (r0) | 4);
563 tcg_out32 (s, CMP | BF (6) | RA (addr_reg2) | RB (r1));
564 tcg_out32 (s, CRAND | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, CR_EQ));
565 #endif
567 label1_ptr = s->code_ptr;
568 #ifdef FAST_PATH
569 tcg_out32 (s, BC | BI (7, CR_EQ) | BO_COND_TRUE);
570 #endif
572 /* slow path */
573 #if TARGET_LONG_BITS == 32
574 tcg_out_mov (s, 3, addr_reg);
575 tcg_out_movi (s, TCG_TYPE_I32, 4, mem_index);
576 #else
577 tcg_out_mov (s, 3, addr_reg2);
578 tcg_out_mov (s, 4, addr_reg);
579 tcg_out_movi (s, TCG_TYPE_I32, 5, mem_index);
580 #endif
582 #ifdef _AIX
583 tcg_out_call (s, (tcg_target_long) qemu_ld_helpers[s_bits], 1);
584 #else
585 tcg_out_b (s, LK, (tcg_target_long) qemu_ld_helpers[s_bits]);
586 #endif
587 switch (opc) {
588 case 0|4:
589 tcg_out32 (s, EXTSB | RA (data_reg) | RS (3));
590 break;
591 case 1|4:
592 tcg_out32 (s, EXTSH | RA (data_reg) | RS (3));
593 break;
594 case 0:
595 case 1:
596 case 2:
597 if (data_reg != 3)
598 tcg_out_mov (s, data_reg, 3);
599 break;
600 case 3:
601 if (data_reg == 3) {
602 if (data_reg2 == 4) {
603 tcg_out_mov (s, 0, 4);
604 tcg_out_mov (s, 4, 3);
605 tcg_out_mov (s, 3, 0);
607 else {
608 tcg_out_mov (s, data_reg2, 3);
609 tcg_out_mov (s, 3, 4);
612 else {
613 if (data_reg != 4) tcg_out_mov (s, data_reg, 4);
614 if (data_reg2 != 3) tcg_out_mov (s, data_reg2, 3);
616 break;
618 label2_ptr = s->code_ptr;
619 tcg_out32 (s, B);
621 /* label1: fast path */
622 #ifdef FAST_PATH
623 reloc_pc14 (label1_ptr, (tcg_target_long) s->code_ptr);
624 #endif
626 /* r0 now contains &env->tlb_table[mem_index][index].addr_read */
627 tcg_out32 (s, (LWZ
628 | RT (r0)
629 | RA (r0)
630 | (ADDEND_OFFSET + offsetof (CPUTLBEntry, addend)
631 - offsetof (CPUTLBEntry, addr_read))
633 /* r0 = env->tlb_table[mem_index][index].addend */
634 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (addr_reg));
635 /* r0 = env->tlb_table[mem_index][index].addend + addr */
637 #else /* !CONFIG_SOFTMMU */
638 r0 = addr_reg;
639 r1 = 3;
640 #endif
642 #ifdef TARGET_WORDS_BIGENDIAN
643 bswap = 0;
644 #else
645 bswap = 1;
646 #endif
647 switch (opc) {
648 default:
649 case 0:
650 tcg_out32 (s, LBZ | RT (data_reg) | RA (r0));
651 break;
652 case 0|4:
653 tcg_out32 (s, LBZ | RT (data_reg) | RA (r0));
654 tcg_out32 (s, EXTSB | RA (data_reg) | RS (data_reg));
655 break;
656 case 1:
657 if (bswap) tcg_out32 (s, LHBRX | RT (data_reg) | RB (r0));
658 else tcg_out32 (s, LHZ | RT (data_reg) | RA (r0));
659 break;
660 case 1|4:
661 if (bswap) {
662 tcg_out32 (s, LHBRX | RT (data_reg) | RB (r0));
663 tcg_out32 (s, EXTSH | RA (data_reg) | RS (data_reg));
665 else tcg_out32 (s, LHA | RT (data_reg) | RA (r0));
666 break;
667 case 2:
668 if (bswap) tcg_out32 (s, LWBRX | RT (data_reg) | RB (r0));
669 else tcg_out32 (s, LWZ | RT (data_reg)| RA (r0));
670 break;
671 case 3:
672 if (bswap) {
673 tcg_out32 (s, ADDI | RT (r1) | RA (r0) | 4);
674 tcg_out32 (s, LWBRX | RT (data_reg) | RB (r0));
675 tcg_out32 (s, LWBRX | RT (data_reg2) | RB (r1));
677 else {
678 if (r0 == data_reg2) {
679 tcg_out32 (s, LWZ | RT (0) | RA (r0));
680 tcg_out32 (s, LWZ | RT (data_reg) | RA (r0) | 4);
681 tcg_out_mov (s, data_reg2, 0);
683 else {
684 tcg_out32 (s, LWZ | RT (data_reg2) | RA (r0));
685 tcg_out32 (s, LWZ | RT (data_reg) | RA (r0) | 4);
688 break;
691 #ifdef CONFIG_SOFTMMU
692 reloc_pc24 (label2_ptr, (tcg_target_long) s->code_ptr);
693 #endif
696 static void tcg_out_qemu_st (TCGContext *s, const TCGArg *args, int opc)
698 int addr_reg, r0, r1, data_reg, data_reg2, mem_index, bswap;
699 #ifdef CONFIG_SOFTMMU
700 int r2, ir;
701 void *label1_ptr, *label2_ptr;
702 #endif
703 #if TARGET_LONG_BITS == 64
704 int addr_reg2;
705 #endif
707 data_reg = *args++;
708 if (opc == 3)
709 data_reg2 = *args++;
710 else
711 data_reg2 = 0;
712 addr_reg = *args++;
713 #if TARGET_LONG_BITS == 64
714 addr_reg2 = *args++;
715 #endif
716 mem_index = *args;
718 #ifdef CONFIG_SOFTMMU
719 r0 = 3;
720 r1 = 4;
721 r2 = 0;
723 tcg_out32 (s, (RLWINM
724 | RA (r0)
725 | RS (addr_reg)
726 | SH (32 - (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS))
727 | MB (32 - (CPU_TLB_ENTRY_BITS + CPU_TLB_BITS))
728 | ME (31 - CPU_TLB_ENTRY_BITS)
731 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (TCG_AREG0));
732 tcg_out32 (s, (LWZU
733 | RT (r1)
734 | RA (r0)
735 | offsetof (CPUState, tlb_table[mem_index][0].addr_write)
738 tcg_out32 (s, (RLWINM
739 | RA (r2)
740 | RS (addr_reg)
741 | SH (0)
742 | MB ((32 - opc) & 31)
743 | ME (31 - TARGET_PAGE_BITS)
747 tcg_out32 (s, CMP | (7 << 23) | RA (r2) | RB (r1));
748 #if TARGET_LONG_BITS == 64
749 tcg_out32 (s, LWZ | RT (r1) | RA (r0) | 4);
750 tcg_out32 (s, CMP | BF (6) | RA (addr_reg2) | RB (r1));
751 tcg_out32 (s, CRAND | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, CR_EQ));
752 #endif
754 label1_ptr = s->code_ptr;
755 #ifdef FAST_PATH
756 tcg_out32 (s, BC | BI (7, CR_EQ) | BO_COND_TRUE);
757 #endif
759 /* slow path */
760 #if TARGET_LONG_BITS == 32
761 tcg_out_mov (s, 3, addr_reg);
762 ir = 4;
763 #else
764 tcg_out_mov (s, 3, addr_reg2);
765 tcg_out_mov (s, 4, addr_reg);
766 #ifdef TCG_TARGET_CALL_ALIGN_ARGS
767 ir = 5;
768 #else
769 ir = 4;
770 #endif
771 #endif
773 switch (opc) {
774 case 0:
775 tcg_out32 (s, (RLWINM
776 | RA (ir)
777 | RS (data_reg)
778 | SH (0)
779 | MB (24)
780 | ME (31)));
781 break;
782 case 1:
783 tcg_out32 (s, (RLWINM
784 | RA (ir)
785 | RS (data_reg)
786 | SH (0)
787 | MB (16)
788 | ME (31)));
789 break;
790 case 2:
791 tcg_out_mov (s, ir, data_reg);
792 break;
793 case 3:
794 #ifdef TCG_TARGET_CALL_ALIGN_ARGS
795 ir = 5;
796 #endif
797 tcg_out_mov (s, ir++, data_reg2);
798 tcg_out_mov (s, ir, data_reg);
799 break;
801 ir++;
803 tcg_out_movi (s, TCG_TYPE_I32, ir, mem_index);
804 #ifdef _AIX
805 tcg_out_call (s, (tcg_target_long) qemu_st_helpers[opc], 1);
806 #else
807 tcg_out_b (s, LK, (tcg_target_long) qemu_st_helpers[opc]);
808 #endif
809 label2_ptr = s->code_ptr;
810 tcg_out32 (s, B);
812 /* label1: fast path */
813 #ifdef FAST_PATH
814 reloc_pc14 (label1_ptr, (tcg_target_long) s->code_ptr);
815 #endif
817 tcg_out32 (s, (LWZ
818 | RT (r0)
819 | RA (r0)
820 | (ADDEND_OFFSET + offsetof (CPUTLBEntry, addend)
821 - offsetof (CPUTLBEntry, addr_write))
823 /* r0 = env->tlb_table[mem_index][index].addend */
824 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (addr_reg));
825 /* r0 = env->tlb_table[mem_index][index].addend + addr */
827 #else /* !CONFIG_SOFTMMU */
828 r1 = 3;
829 r0 = addr_reg;
830 #endif
832 #ifdef TARGET_WORDS_BIGENDIAN
833 bswap = 0;
834 #else
835 bswap = 1;
836 #endif
837 switch (opc) {
838 case 0:
839 tcg_out32 (s, STB | RS (data_reg) | RA (r0));
840 break;
841 case 1:
842 if (bswap) tcg_out32 (s, STHBRX | RS (data_reg) | RA (0) | RB (r0));
843 else tcg_out32 (s, STH | RS (data_reg) | RA (r0));
844 break;
845 case 2:
846 if (bswap) tcg_out32 (s, STWBRX | RS (data_reg) | RA (0) | RB (r0));
847 else tcg_out32 (s, STW | RS (data_reg) | RA (r0));
848 break;
849 case 3:
850 if (bswap) {
851 tcg_out32 (s, ADDI | RT (r1) | RA (r0) | 4);
852 tcg_out32 (s, STWBRX | RS (data_reg) | RA (0) | RB (r0));
853 tcg_out32 (s, STWBRX | RS (data_reg2) | RA (0) | RB (r1));
855 else {
856 tcg_out32 (s, STW | RS (data_reg2) | RA (r0));
857 tcg_out32 (s, STW | RS (data_reg) | RA (r0) | 4);
859 break;
862 #ifdef CONFIG_SOFTMMU
863 reloc_pc24 (label2_ptr, (tcg_target_long) s->code_ptr);
864 #endif
867 void tcg_target_qemu_prologue (TCGContext *s)
869 int i, frame_size;
871 frame_size = 0
872 + LINKAGE_AREA_SIZE
873 + TCG_STATIC_CALL_ARGS_SIZE
874 + ARRAY_SIZE (tcg_target_callee_save_regs) * 4
876 frame_size = (frame_size + 15) & ~15;
878 #ifdef _AIX
880 uint32_t addr;
882 /* First emit adhoc function descriptor */
883 addr = (uint32_t) s->code_ptr + 12;
884 tcg_out32 (s, addr); /* entry point */
885 s->code_ptr += 8; /* skip TOC and environment pointer */
887 #endif
888 tcg_out32 (s, MFSPR | RT (0) | LR);
889 tcg_out32 (s, STWU | RS (1) | RA (1) | (-frame_size & 0xffff));
890 for (i = 0; i < ARRAY_SIZE (tcg_target_callee_save_regs); ++i)
891 tcg_out32 (s, (STW
892 | RS (tcg_target_callee_save_regs[i])
893 | RA (1)
894 | (i * 4 + LINKAGE_AREA_SIZE + TCG_STATIC_CALL_ARGS_SIZE)
897 tcg_out32 (s, STW | RS (0) | RA (1) | (frame_size + LR_OFFSET));
899 tcg_out32 (s, MTSPR | RS (3) | CTR);
900 tcg_out32 (s, BCCTR | BO_ALWAYS);
901 tb_ret_addr = s->code_ptr;
903 for (i = 0; i < ARRAY_SIZE (tcg_target_callee_save_regs); ++i)
904 tcg_out32 (s, (LWZ
905 | RT (tcg_target_callee_save_regs[i])
906 | RA (1)
907 | (i * 4 + LINKAGE_AREA_SIZE + TCG_STATIC_CALL_ARGS_SIZE)
910 tcg_out32 (s, LWZ | RT (0) | RA (1) | (frame_size + LR_OFFSET));
911 tcg_out32 (s, MTSPR | RS (0) | LR);
912 tcg_out32 (s, ADDI | RT (1) | RA (1) | frame_size);
913 tcg_out32 (s, BCLR | BO_ALWAYS);
916 static void tcg_out_ld (TCGContext *s, TCGType type, int ret, int arg1,
917 tcg_target_long arg2)
919 tcg_out_ldst (s, ret, arg1, arg2, LWZ, LWZX);
922 static void tcg_out_st (TCGContext *s, TCGType type, int arg, int arg1,
923 tcg_target_long arg2)
925 tcg_out_ldst (s, arg, arg1, arg2, STW, STWX);
928 static void ppc_addi (TCGContext *s, int rt, int ra, tcg_target_long si)
930 if (!si && rt == ra)
931 return;
933 if (si == (int16_t) si)
934 tcg_out32 (s, ADDI | RT (rt) | RA (ra) | (si & 0xffff));
935 else {
936 uint16_t h = ((si >> 16) & 0xffff) + ((uint16_t) si >> 15);
937 tcg_out32 (s, ADDIS | RT (rt) | RA (ra) | h);
938 tcg_out32 (s, ADDI | RT (rt) | RA (rt) | (si & 0xffff));
942 static void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
944 ppc_addi (s, reg, reg, val);
947 static void tcg_out_cmp (TCGContext *s, int cond, TCGArg arg1, TCGArg arg2,
948 int const_arg2, int cr)
950 int imm;
951 uint32_t op;
953 switch (cond) {
954 case TCG_COND_EQ:
955 case TCG_COND_NE:
956 if (const_arg2) {
957 if ((int16_t) arg2 == arg2) {
958 op = CMPI;
959 imm = 1;
960 break;
962 else if ((uint16_t) arg2 == arg2) {
963 op = CMPLI;
964 imm = 1;
965 break;
968 op = CMPL;
969 imm = 0;
970 break;
972 case TCG_COND_LT:
973 case TCG_COND_GE:
974 case TCG_COND_LE:
975 case TCG_COND_GT:
976 if (const_arg2) {
977 if ((int16_t) arg2 == arg2) {
978 op = CMPI;
979 imm = 1;
980 break;
983 op = CMP;
984 imm = 0;
985 break;
987 case TCG_COND_LTU:
988 case TCG_COND_GEU:
989 case TCG_COND_LEU:
990 case TCG_COND_GTU:
991 if (const_arg2) {
992 if ((uint16_t) arg2 == arg2) {
993 op = CMPLI;
994 imm = 1;
995 break;
998 op = CMPL;
999 imm = 0;
1000 break;
1002 default:
1003 tcg_abort ();
1005 op |= BF (cr);
1007 if (imm)
1008 tcg_out32 (s, op | RA (arg1) | (arg2 & 0xffff));
1009 else {
1010 if (const_arg2) {
1011 tcg_out_movi (s, TCG_TYPE_I32, 0, arg2);
1012 tcg_out32 (s, op | RA (arg1) | RB (0));
1014 else
1015 tcg_out32 (s, op | RA (arg1) | RB (arg2));
1020 static void tcg_out_bc (TCGContext *s, int bc, int label_index)
1022 TCGLabel *l = &s->labels[label_index];
1024 if (l->has_value)
1025 tcg_out32 (s, bc | reloc_pc14_val (s->code_ptr, l->u.value));
1026 else {
1027 uint16_t val = *(uint16_t *) &s->code_ptr[2];
1029 /* Thanks to Andrzej Zaborowski */
1030 tcg_out32 (s, bc | (val & 0xfffc));
1031 tcg_out_reloc (s, s->code_ptr - 4, R_PPC_REL14, label_index, 0);
1035 static void tcg_out_brcond (TCGContext *s, int cond,
1036 TCGArg arg1, TCGArg arg2, int const_arg2,
1037 int label_index)
1039 tcg_out_cmp (s, cond, arg1, arg2, const_arg2, 7);
1040 tcg_out_bc (s, tcg_to_bc[cond], label_index);
1043 /* XXX: we implement it at the target level to avoid having to
1044 handle cross basic blocks temporaries */
1045 static void tcg_out_brcond2 (TCGContext *s, const TCGArg *args,
1046 const int *const_args)
1048 int cond = args[4], label_index = args[5], op;
1049 struct { int bit1; int bit2; int cond2; } bits[] = {
1050 [TCG_COND_LT ] = { CR_LT, CR_LT, TCG_COND_LT },
1051 [TCG_COND_LE ] = { CR_LT, CR_GT, TCG_COND_LT },
1052 [TCG_COND_GT ] = { CR_GT, CR_GT, TCG_COND_GT },
1053 [TCG_COND_GE ] = { CR_GT, CR_LT, TCG_COND_GT },
1054 [TCG_COND_LTU] = { CR_LT, CR_LT, TCG_COND_LTU },
1055 [TCG_COND_LEU] = { CR_LT, CR_GT, TCG_COND_LTU },
1056 [TCG_COND_GTU] = { CR_GT, CR_GT, TCG_COND_GTU },
1057 [TCG_COND_GEU] = { CR_GT, CR_LT, TCG_COND_GTU },
1058 }, *b = &bits[cond];
1060 switch (cond) {
1061 case TCG_COND_EQ:
1062 case TCG_COND_NE:
1063 op = (cond == TCG_COND_EQ) ? CRAND : CRNAND;
1064 tcg_out_cmp (s, cond, args[0], args[2], const_args[2], 6);
1065 tcg_out_cmp (s, cond, args[1], args[3], const_args[3], 7);
1066 tcg_out32 (s, op | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, CR_EQ));
1067 break;
1068 case TCG_COND_LT:
1069 case TCG_COND_LE:
1070 case TCG_COND_GT:
1071 case TCG_COND_GE:
1072 case TCG_COND_LTU:
1073 case TCG_COND_LEU:
1074 case TCG_COND_GTU:
1075 case TCG_COND_GEU:
1076 op = (b->bit1 != b->bit2) ? CRANDC : CRAND;
1077 tcg_out_cmp (s, b->cond2, args[1], args[3], const_args[3], 5);
1078 tcg_out_cmp (s, TCG_COND_EQ, args[1], args[3], const_args[3], 6);
1079 tcg_out_cmp (s, cond, args[0], args[2], const_args[2], 7);
1080 tcg_out32 (s, op | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, b->bit2));
1081 tcg_out32 (s, CROR | BT (7, CR_EQ) | BA (5, b->bit1) | BB (7, CR_EQ));
1082 break;
1083 default:
1084 tcg_abort();
1087 tcg_out_bc (s, (BC | BI (7, CR_EQ) | BO_COND_TRUE), label_index);
1090 void ppc_tb_set_jmp_target (unsigned long jmp_addr, unsigned long addr)
1092 uint32_t *ptr;
1093 long disp = addr - jmp_addr;
1094 unsigned long patch_size;
1096 ptr = (uint32_t *)jmp_addr;
1098 if ((disp << 6) >> 6 != disp) {
1099 ptr[0] = 0x3c000000 | (addr >> 16); /* lis 0,addr@ha */
1100 ptr[1] = 0x60000000 | (addr & 0xffff); /* la 0,addr@l(0) */
1101 ptr[2] = 0x7c0903a6; /* mtctr 0 */
1102 ptr[3] = 0x4e800420; /* brctr */
1103 patch_size = 16;
1104 } else {
1105 /* patch the branch destination */
1106 if (disp != 16) {
1107 *ptr = 0x48000000 | (disp & 0x03fffffc); /* b disp */
1108 patch_size = 4;
1109 } else {
1110 ptr[0] = 0x60000000; /* nop */
1111 ptr[1] = 0x60000000;
1112 ptr[2] = 0x60000000;
1113 ptr[3] = 0x60000000;
1114 patch_size = 16;
1117 /* flush icache */
1118 flush_icache_range(jmp_addr, jmp_addr + patch_size);
1121 static void tcg_out_op(TCGContext *s, int opc, const TCGArg *args,
1122 const int *const_args)
1124 switch (opc) {
1125 case INDEX_op_exit_tb:
1126 tcg_out_movi (s, TCG_TYPE_I32, TCG_REG_R3, args[0]);
1127 tcg_out_b (s, 0, (tcg_target_long) tb_ret_addr);
1128 break;
1129 case INDEX_op_goto_tb:
1130 if (s->tb_jmp_offset) {
1131 /* direct jump method */
1133 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
1134 s->code_ptr += 16;
1136 else {
1137 tcg_abort ();
1139 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1140 break;
1141 case INDEX_op_br:
1143 TCGLabel *l = &s->labels[args[0]];
1145 if (l->has_value) {
1146 tcg_out_b (s, 0, l->u.value);
1148 else {
1149 uint32_t val = *(uint32_t *) s->code_ptr;
1151 /* Thanks to Andrzej Zaborowski */
1152 tcg_out32 (s, B | (val & 0x3fffffc));
1153 tcg_out_reloc (s, s->code_ptr - 4, R_PPC_REL24, args[0], 0);
1156 break;
1157 case INDEX_op_call:
1158 #ifdef _AIX
1159 tcg_out_call (s, args[0], const_args[0]);
1160 #else
1161 if (const_args[0]) {
1162 tcg_out_b (s, LK, args[0]);
1164 else {
1165 tcg_out32 (s, MTSPR | RS (args[0]) | LR);
1166 tcg_out32 (s, BCLR | BO_ALWAYS | LK);
1168 #endif
1169 break;
1170 case INDEX_op_jmp:
1171 if (const_args[0]) {
1172 tcg_out_b (s, 0, args[0]);
1174 else {
1175 tcg_out32 (s, MTSPR | RS (args[0]) | CTR);
1176 tcg_out32 (s, BCCTR | BO_ALWAYS);
1178 break;
1179 case INDEX_op_movi_i32:
1180 tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
1181 break;
1182 case INDEX_op_ld8u_i32:
1183 tcg_out_ldst (s, args[0], args[1], args[2], LBZ, LBZX);
1184 break;
1185 case INDEX_op_ld8s_i32:
1186 tcg_out_ldst (s, args[0], args[1], args[2], LBZ, LBZX);
1187 tcg_out32 (s, EXTSB | RS (args[0]) | RA (args[0]));
1188 break;
1189 case INDEX_op_ld16u_i32:
1190 tcg_out_ldst (s, args[0], args[1], args[2], LHZ, LHZX);
1191 break;
1192 case INDEX_op_ld16s_i32:
1193 tcg_out_ldst (s, args[0], args[1], args[2], LHA, LHAX);
1194 break;
1195 case INDEX_op_ld_i32:
1196 tcg_out_ldst (s, args[0], args[1], args[2], LWZ, LWZX);
1197 break;
1198 case INDEX_op_st8_i32:
1199 tcg_out_ldst (s, args[0], args[1], args[2], STB, STBX);
1200 break;
1201 case INDEX_op_st16_i32:
1202 tcg_out_ldst (s, args[0], args[1], args[2], STH, STHX);
1203 break;
1204 case INDEX_op_st_i32:
1205 tcg_out_ldst (s, args[0], args[1], args[2], STW, STWX);
1206 break;
1208 case INDEX_op_add_i32:
1209 if (const_args[2])
1210 ppc_addi (s, args[0], args[1], args[2]);
1211 else
1212 tcg_out32 (s, ADD | TAB (args[0], args[1], args[2]));
1213 break;
1214 case INDEX_op_sub_i32:
1215 if (const_args[2])
1216 ppc_addi (s, args[0], args[1], -args[2]);
1217 else
1218 tcg_out32 (s, SUBF | TAB (args[0], args[2], args[1]));
1219 break;
1221 case INDEX_op_and_i32:
1222 if (const_args[2]) {
1223 if ((args[2] & 0xffff) == args[2])
1224 tcg_out32 (s, ANDI | RS (args[1]) | RA (args[0]) | args[2]);
1225 else if ((args[2] & 0xffff0000) == args[2])
1226 tcg_out32 (s, ANDIS | RS (args[1]) | RA (args[0])
1227 | ((args[2] >> 16) & 0xffff));
1228 else {
1229 tcg_out_movi (s, TCG_TYPE_I32, 0, args[2]);
1230 tcg_out32 (s, AND | SAB (args[1], args[0], 0));
1233 else
1234 tcg_out32 (s, AND | SAB (args[1], args[0], args[2]));
1235 break;
1236 case INDEX_op_or_i32:
1237 if (const_args[2]) {
1238 if (args[2] & 0xffff) {
1239 tcg_out32 (s, ORI | RS (args[1]) | RA (args[0])
1240 | (args[2] & 0xffff));
1241 if (args[2] >> 16)
1242 tcg_out32 (s, ORIS | RS (args[0]) | RA (args[0])
1243 | ((args[2] >> 16) & 0xffff));
1245 else {
1246 tcg_out32 (s, ORIS | RS (args[1]) | RA (args[0])
1247 | ((args[2] >> 16) & 0xffff));
1250 else
1251 tcg_out32 (s, OR | SAB (args[1], args[0], args[2]));
1252 break;
1253 case INDEX_op_xor_i32:
1254 if (const_args[2]) {
1255 if ((args[2] & 0xffff) == args[2])
1256 tcg_out32 (s, XORI | RS (args[1]) | RA (args[0])
1257 | (args[2] & 0xffff));
1258 else if ((args[2] & 0xffff0000) == args[2])
1259 tcg_out32 (s, XORIS | RS (args[1]) | RA (args[0])
1260 | ((args[2] >> 16) & 0xffff));
1261 else {
1262 tcg_out_movi (s, TCG_TYPE_I32, 0, args[2]);
1263 tcg_out32 (s, XOR | SAB (args[1], args[0], 0));
1266 else
1267 tcg_out32 (s, XOR | SAB (args[1], args[0], args[2]));
1268 break;
1270 case INDEX_op_mul_i32:
1271 if (const_args[2]) {
1272 if (args[2] == (int16_t) args[2])
1273 tcg_out32 (s, MULLI | RT (args[0]) | RA (args[1])
1274 | (args[2] & 0xffff));
1275 else {
1276 tcg_out_movi (s, TCG_TYPE_I32, 0, args[2]);
1277 tcg_out32 (s, MULLW | TAB (args[0], args[1], 0));
1280 else
1281 tcg_out32 (s, MULLW | TAB (args[0], args[1], args[2]));
1282 break;
1284 case INDEX_op_div_i32:
1285 tcg_out32 (s, DIVW | TAB (args[0], args[1], args[2]));
1286 break;
1288 case INDEX_op_divu_i32:
1289 tcg_out32 (s, DIVWU | TAB (args[0], args[1], args[2]));
1290 break;
1292 case INDEX_op_rem_i32:
1293 tcg_out32 (s, DIVW | TAB (0, args[1], args[2]));
1294 tcg_out32 (s, MULLW | TAB (0, 0, args[2]));
1295 tcg_out32 (s, SUBF | TAB (args[0], 0, args[1]));
1296 break;
1298 case INDEX_op_remu_i32:
1299 tcg_out32 (s, DIVWU | TAB (0, args[1], args[2]));
1300 tcg_out32 (s, MULLW | TAB (0, 0, args[2]));
1301 tcg_out32 (s, SUBF | TAB (args[0], 0, args[1]));
1302 break;
1304 case INDEX_op_mulu2_i32:
1305 if (args[0] == args[2] || args[0] == args[3]) {
1306 tcg_out32 (s, MULLW | TAB (0, args[2], args[3]));
1307 tcg_out32 (s, MULHWU | TAB (args[1], args[2], args[3]));
1308 tcg_out_mov (s, args[0], 0);
1310 else {
1311 tcg_out32 (s, MULLW | TAB (args[0], args[2], args[3]));
1312 tcg_out32 (s, MULHWU | TAB (args[1], args[2], args[3]));
1314 break;
1316 case INDEX_op_shl_i32:
1317 if (const_args[2]) {
1318 tcg_out32 (s, (RLWINM
1319 | RA (args[0])
1320 | RS (args[1])
1321 | SH (args[2])
1322 | MB (0)
1323 | ME (31 - args[2])
1327 else
1328 tcg_out32 (s, SLW | SAB (args[1], args[0], args[2]));
1329 break;
1330 case INDEX_op_shr_i32:
1331 if (const_args[2]) {
1332 tcg_out32 (s, (RLWINM
1333 | RA (args[0])
1334 | RS (args[1])
1335 | SH (32 - args[2])
1336 | MB (args[2])
1337 | ME (31)
1341 else
1342 tcg_out32 (s, SRW | SAB (args[1], args[0], args[2]));
1343 break;
1344 case INDEX_op_sar_i32:
1345 if (const_args[2])
1346 tcg_out32 (s, SRAWI | RS (args[1]) | RA (args[0]) | SH (args[2]));
1347 else
1348 tcg_out32 (s, SRAW | SAB (args[1], args[0], args[2]));
1349 break;
1351 case INDEX_op_add2_i32:
1352 if (args[0] == args[3] || args[0] == args[5]) {
1353 tcg_out32 (s, ADDC | TAB (0, args[2], args[4]));
1354 tcg_out32 (s, ADDE | TAB (args[1], args[3], args[5]));
1355 tcg_out_mov (s, args[0], 0);
1357 else {
1358 tcg_out32 (s, ADDC | TAB (args[0], args[2], args[4]));
1359 tcg_out32 (s, ADDE | TAB (args[1], args[3], args[5]));
1361 break;
1362 case INDEX_op_sub2_i32:
1363 if (args[0] == args[3] || args[0] == args[5]) {
1364 tcg_out32 (s, SUBFC | TAB (0, args[4], args[2]));
1365 tcg_out32 (s, SUBFE | TAB (args[1], args[5], args[3]));
1366 tcg_out_mov (s, args[0], 0);
1368 else {
1369 tcg_out32 (s, SUBFC | TAB (args[0], args[4], args[2]));
1370 tcg_out32 (s, SUBFE | TAB (args[1], args[5], args[3]));
1372 break;
1374 case INDEX_op_brcond_i32:
1376 args[0] = r0
1377 args[1] = r1
1378 args[2] = cond
1379 args[3] = r1 is const
1380 args[4] = label_index
1382 tcg_out_brcond (s, args[2], args[0], args[1], const_args[1], args[3]);
1383 break;
1384 case INDEX_op_brcond2_i32:
1385 tcg_out_brcond2(s, args, const_args);
1386 break;
1388 case INDEX_op_neg_i32:
1389 tcg_out32 (s, NEG | RT (args[0]) | RA (args[1]));
1390 break;
1392 case INDEX_op_qemu_ld8u:
1393 tcg_out_qemu_ld(s, args, 0);
1394 break;
1395 case INDEX_op_qemu_ld8s:
1396 tcg_out_qemu_ld(s, args, 0 | 4);
1397 break;
1398 case INDEX_op_qemu_ld16u:
1399 tcg_out_qemu_ld(s, args, 1);
1400 break;
1401 case INDEX_op_qemu_ld16s:
1402 tcg_out_qemu_ld(s, args, 1 | 4);
1403 break;
1404 case INDEX_op_qemu_ld32u:
1405 tcg_out_qemu_ld(s, args, 2);
1406 break;
1407 case INDEX_op_qemu_ld64:
1408 tcg_out_qemu_ld(s, args, 3);
1409 break;
1410 case INDEX_op_qemu_st8:
1411 tcg_out_qemu_st(s, args, 0);
1412 break;
1413 case INDEX_op_qemu_st16:
1414 tcg_out_qemu_st(s, args, 1);
1415 break;
1416 case INDEX_op_qemu_st32:
1417 tcg_out_qemu_st(s, args, 2);
1418 break;
1419 case INDEX_op_qemu_st64:
1420 tcg_out_qemu_st(s, args, 3);
1421 break;
1423 case INDEX_op_ext8s_i32:
1424 tcg_out32 (s, EXTSB | RS (args[1]) | RA (args[0]));
1425 break;
1426 case INDEX_op_ext16s_i32:
1427 tcg_out32 (s, EXTSH | RS (args[1]) | RA (args[0]));
1428 break;
1430 default:
1431 tcg_dump_ops (s, stderr);
1432 tcg_abort ();
1436 static const TCGTargetOpDef ppc_op_defs[] = {
1437 { INDEX_op_exit_tb, { } },
1438 { INDEX_op_goto_tb, { } },
1439 { INDEX_op_call, { "ri" } },
1440 { INDEX_op_jmp, { "ri" } },
1441 { INDEX_op_br, { } },
1443 { INDEX_op_mov_i32, { "r", "r" } },
1444 { INDEX_op_movi_i32, { "r" } },
1445 { INDEX_op_ld8u_i32, { "r", "r" } },
1446 { INDEX_op_ld8s_i32, { "r", "r" } },
1447 { INDEX_op_ld16u_i32, { "r", "r" } },
1448 { INDEX_op_ld16s_i32, { "r", "r" } },
1449 { INDEX_op_ld_i32, { "r", "r" } },
1450 { INDEX_op_st8_i32, { "r", "r" } },
1451 { INDEX_op_st16_i32, { "r", "r" } },
1452 { INDEX_op_st_i32, { "r", "r" } },
1454 { INDEX_op_add_i32, { "r", "r", "ri" } },
1455 { INDEX_op_mul_i32, { "r", "r", "ri" } },
1456 { INDEX_op_div_i32, { "r", "r", "r" } },
1457 { INDEX_op_divu_i32, { "r", "r", "r" } },
1458 { INDEX_op_rem_i32, { "r", "r", "r" } },
1459 { INDEX_op_remu_i32, { "r", "r", "r" } },
1460 { INDEX_op_mulu2_i32, { "r", "r", "r", "r" } },
1461 { INDEX_op_sub_i32, { "r", "r", "ri" } },
1462 { INDEX_op_and_i32, { "r", "r", "ri" } },
1463 { INDEX_op_or_i32, { "r", "r", "ri" } },
1464 { INDEX_op_xor_i32, { "r", "r", "ri" } },
1466 { INDEX_op_shl_i32, { "r", "r", "ri" } },
1467 { INDEX_op_shr_i32, { "r", "r", "ri" } },
1468 { INDEX_op_sar_i32, { "r", "r", "ri" } },
1470 { INDEX_op_brcond_i32, { "r", "ri" } },
1472 { INDEX_op_add2_i32, { "r", "r", "r", "r", "r", "r" } },
1473 { INDEX_op_sub2_i32, { "r", "r", "r", "r", "r", "r" } },
1474 { INDEX_op_brcond2_i32, { "r", "r", "r", "r" } },
1476 { INDEX_op_neg_i32, { "r", "r" } },
1478 #if TARGET_LONG_BITS == 32
1479 { INDEX_op_qemu_ld8u, { "r", "L" } },
1480 { INDEX_op_qemu_ld8s, { "r", "L" } },
1481 { INDEX_op_qemu_ld16u, { "r", "L" } },
1482 { INDEX_op_qemu_ld16s, { "r", "L" } },
1483 { INDEX_op_qemu_ld32u, { "r", "L" } },
1484 { INDEX_op_qemu_ld32s, { "r", "L" } },
1485 { INDEX_op_qemu_ld64, { "r", "r", "L" } },
1487 { INDEX_op_qemu_st8, { "K", "K" } },
1488 { INDEX_op_qemu_st16, { "K", "K" } },
1489 { INDEX_op_qemu_st32, { "K", "K" } },
1490 { INDEX_op_qemu_st64, { "M", "M", "M" } },
1491 #else
1492 { INDEX_op_qemu_ld8u, { "r", "L", "L" } },
1493 { INDEX_op_qemu_ld8s, { "r", "L", "L" } },
1494 { INDEX_op_qemu_ld16u, { "r", "L", "L" } },
1495 { INDEX_op_qemu_ld16s, { "r", "L", "L" } },
1496 { INDEX_op_qemu_ld32u, { "r", "L", "L" } },
1497 { INDEX_op_qemu_ld32s, { "r", "L", "L" } },
1498 { INDEX_op_qemu_ld64, { "r", "L", "L", "L" } },
1500 { INDEX_op_qemu_st8, { "K", "K", "K" } },
1501 { INDEX_op_qemu_st16, { "K", "K", "K" } },
1502 { INDEX_op_qemu_st32, { "K", "K", "K" } },
1503 { INDEX_op_qemu_st64, { "M", "M", "M", "M" } },
1504 #endif
1506 { INDEX_op_ext8s_i32, { "r", "r" } },
1507 { INDEX_op_ext16s_i32, { "r", "r" } },
1509 { -1 },
1512 void tcg_target_init(TCGContext *s)
1514 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffffffff);
1515 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1516 (1 << TCG_REG_R0) |
1517 #ifdef __APPLE__
1518 (1 << TCG_REG_R2) |
1519 #endif
1520 (1 << TCG_REG_R3) |
1521 (1 << TCG_REG_R4) |
1522 (1 << TCG_REG_R5) |
1523 (1 << TCG_REG_R6) |
1524 (1 << TCG_REG_R7) |
1525 (1 << TCG_REG_R8) |
1526 (1 << TCG_REG_R9) |
1527 (1 << TCG_REG_R10) |
1528 (1 << TCG_REG_R11) |
1529 (1 << TCG_REG_R12)
1532 tcg_regset_clear(s->reserved_regs);
1533 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R0);
1534 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R1);
1535 #ifndef __APPLE__
1536 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R2);
1537 #endif
1538 #ifdef __linux__
1539 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R13);
1540 #endif
1542 tcg_add_target_add_op_defs(ppc_op_defs);