R13 is reserved for small data area pointer by SVR4 PPC ABI
[qemu/mini2440.git] / tcg / ppc / tcg-target.c
blob60563f736887b70cef2a764b0f307b6a8713c950
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
25 static uint8_t *tb_ret_addr;
27 #ifdef __APPLE__
28 #define LINKAGE_AREA_SIZE 24
29 #define LR_OFFSET 8
30 #elif defined _AIX
31 #define LINKAGE_AREA_SIZE 52
32 #define LR_OFFSET 8
33 #else
34 #define LINKAGE_AREA_SIZE 8
35 #define LR_OFFSET 4
36 #endif
38 #define FAST_PATH
39 #if TARGET_PHYS_ADDR_BITS <= 32
40 #define ADDEND_OFFSET 0
41 #else
42 #define ADDEND_OFFSET 4
43 #endif
45 #ifndef NDEBUG
46 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
47 "r0",
48 "r1",
49 "rp",
50 "r3",
51 "r4",
52 "r5",
53 "r6",
54 "r7",
55 "r8",
56 "r9",
57 "r10",
58 "r11",
59 "r12",
60 "r13",
61 "r14",
62 "r15",
63 "r16",
64 "r17",
65 "r18",
66 "r19",
67 "r20",
68 "r21",
69 "r22",
70 "r23",
71 "r24",
72 "r25",
73 "r26",
74 "r27",
75 "r28",
76 "r29",
77 "r30",
78 "r31"
80 #endif
82 static const int tcg_target_reg_alloc_order[] = {
83 TCG_REG_R14,
84 TCG_REG_R15,
85 TCG_REG_R16,
86 TCG_REG_R17,
87 TCG_REG_R18,
88 TCG_REG_R19,
89 TCG_REG_R20,
90 TCG_REG_R21,
91 TCG_REG_R22,
92 TCG_REG_R23,
93 TCG_REG_R28,
94 TCG_REG_R29,
95 TCG_REG_R30,
96 TCG_REG_R31,
97 #ifdef __APPLE__
98 TCG_REG_R2,
99 #endif
100 TCG_REG_R3,
101 TCG_REG_R4,
102 TCG_REG_R5,
103 TCG_REG_R6,
104 TCG_REG_R7,
105 TCG_REG_R8,
106 TCG_REG_R9,
107 TCG_REG_R10,
108 #ifndef __APPLE__
109 TCG_REG_R11,
110 #endif
111 TCG_REG_R12,
112 #ifndef __linux__
113 TCG_REG_R13,
114 #endif
115 TCG_REG_R0,
116 TCG_REG_R1,
117 TCG_REG_R2,
118 TCG_REG_R24,
119 TCG_REG_R25,
120 TCG_REG_R26,
121 TCG_REG_R27
124 static const int tcg_target_call_iarg_regs[] = {
125 TCG_REG_R3,
126 TCG_REG_R4,
127 TCG_REG_R5,
128 TCG_REG_R6,
129 TCG_REG_R7,
130 TCG_REG_R8,
131 TCG_REG_R9,
132 TCG_REG_R10
135 static const int tcg_target_call_oarg_regs[2] = {
136 TCG_REG_R3,
137 TCG_REG_R4
140 static const int tcg_target_callee_save_regs[] = {
141 #ifdef __APPLE__
142 TCG_REG_R11,
143 TCG_REG_R13,
144 #endif
145 #ifdef _AIX
146 TCG_REG_R13,
147 #endif
148 TCG_REG_R14,
149 TCG_REG_R15,
150 TCG_REG_R16,
151 TCG_REG_R17,
152 TCG_REG_R18,
153 TCG_REG_R19,
154 TCG_REG_R20,
155 TCG_REG_R21,
156 TCG_REG_R22,
157 TCG_REG_R23,
158 TCG_REG_R28,
159 TCG_REG_R29,
160 TCG_REG_R30,
161 TCG_REG_R31
164 static uint32_t reloc_pc24_val (void *pc, tcg_target_long target)
166 tcg_target_long disp;
168 disp = target - (tcg_target_long) pc;
169 if ((disp << 6) >> 6 != disp)
170 tcg_abort ();
172 return disp & 0x3fffffc;
175 static void reloc_pc24 (void *pc, tcg_target_long target)
177 *(uint32_t *) pc = (*(uint32_t *) pc & ~0x3fffffc)
178 | reloc_pc24_val (pc, target);
181 static uint16_t reloc_pc14_val (void *pc, tcg_target_long target)
183 tcg_target_long disp;
185 disp = target - (tcg_target_long) pc;
186 if (disp != (int16_t) disp)
187 tcg_abort ();
189 return disp & 0xfffc;
192 static void reloc_pc14 (void *pc, tcg_target_long target)
194 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xfffc)
195 | reloc_pc14_val (pc, target);
198 static void patch_reloc(uint8_t *code_ptr, int type,
199 tcg_target_long value, tcg_target_long addend)
201 value += addend;
202 switch (type) {
203 case R_PPC_REL14:
204 reloc_pc14 (code_ptr, value);
205 break;
206 case R_PPC_REL24:
207 reloc_pc24 (code_ptr, value);
208 break;
209 default:
210 tcg_abort();
214 /* maximum number of register used for input function arguments */
215 static int tcg_target_get_call_iarg_regs_count(int flags)
217 return ARRAY_SIZE (tcg_target_call_iarg_regs);
220 /* parse target specific constraints */
221 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
223 const char *ct_str;
225 ct_str = *pct_str;
226 switch (ct_str[0]) {
227 case 'A': case 'B': case 'C': case 'D':
228 ct->ct |= TCG_CT_REG;
229 tcg_regset_set_reg(ct->u.regs, 3 + ct_str[0] - 'A');
230 break;
231 case 'r':
232 ct->ct |= TCG_CT_REG;
233 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
234 break;
235 #ifdef CONFIG_SOFTMMU
236 case 'L': /* qemu_ld constraint */
237 ct->ct |= TCG_CT_REG;
238 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
239 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
240 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4);
241 break;
242 case 'K': /* qemu_st[8..32] constraint */
243 ct->ct |= TCG_CT_REG;
244 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
245 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
246 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4);
247 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R5);
248 #if TARGET_LONG_BITS == 64
249 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R6);
250 #endif
251 break;
252 case 'M': /* qemu_st64 constraint */
253 ct->ct |= TCG_CT_REG;
254 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
255 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
256 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4);
257 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R5);
258 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R6);
259 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R7);
260 break;
261 #else
262 case 'L':
263 case 'K':
264 ct->ct |= TCG_CT_REG;
265 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
266 break;
267 case 'M':
268 ct->ct |= TCG_CT_REG;
269 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
270 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
271 break;
272 #endif
273 default:
274 return -1;
276 ct_str++;
277 *pct_str = ct_str;
278 return 0;
281 /* test if a constant matches the constraint */
282 static int tcg_target_const_match(tcg_target_long val,
283 const TCGArgConstraint *arg_ct)
285 int ct;
287 ct = arg_ct->ct;
288 if (ct & TCG_CT_CONST)
289 return 1;
290 return 0;
293 #define OPCD(opc) ((opc)<<26)
294 #define XO31(opc) (OPCD(31)|((opc)<<1))
295 #define XO19(opc) (OPCD(19)|((opc)<<1))
297 #define B OPCD(18)
298 #define BC OPCD(16)
299 #define LBZ OPCD(34)
300 #define LHZ OPCD(40)
301 #define LHA OPCD(42)
302 #define LWZ OPCD(32)
303 #define STB OPCD(38)
304 #define STH OPCD(44)
305 #define STW OPCD(36)
307 #define ADDI OPCD(14)
308 #define ADDIS OPCD(15)
309 #define ORI OPCD(24)
310 #define ORIS OPCD(25)
311 #define XORI OPCD(26)
312 #define XORIS OPCD(27)
313 #define ANDI OPCD(28)
314 #define ANDIS OPCD(29)
315 #define MULLI OPCD( 7)
316 #define CMPLI OPCD(10)
317 #define CMPI OPCD(11)
319 #define LWZU OPCD(33)
320 #define STWU OPCD(37)
322 #define RLWINM OPCD(21)
324 #define BCLR XO19( 16)
325 #define BCCTR XO19(528)
326 #define CRAND XO19(257)
327 #define CRANDC XO19(129)
328 #define CRNAND XO19(225)
329 #define CROR XO19(449)
331 #define EXTSB XO31(954)
332 #define EXTSH XO31(922)
333 #define ADD XO31(266)
334 #define ADDE XO31(138)
335 #define ADDC XO31( 10)
336 #define AND XO31( 28)
337 #define SUBF XO31( 40)
338 #define SUBFC XO31( 8)
339 #define SUBFE XO31(136)
340 #define OR XO31(444)
341 #define XOR XO31(316)
342 #define MULLW XO31(235)
343 #define MULHWU XO31( 11)
344 #define DIVW XO31(491)
345 #define DIVWU XO31(459)
346 #define CMP XO31( 0)
347 #define CMPL XO31( 32)
348 #define LHBRX XO31(790)
349 #define LWBRX XO31(534)
350 #define STHBRX XO31(918)
351 #define STWBRX XO31(662)
352 #define MFSPR XO31(339)
353 #define MTSPR XO31(467)
354 #define SRAWI XO31(824)
355 #define NEG XO31(104)
357 #define LBZX XO31( 87)
358 #define LHZX XO31(276)
359 #define LHAX XO31(343)
360 #define LWZX XO31( 23)
361 #define STBX XO31(215)
362 #define STHX XO31(407)
363 #define STWX XO31(151)
365 #define SPR(a,b) ((((a)<<5)|(b))<<11)
366 #define LR SPR(8, 0)
367 #define CTR SPR(9, 0)
369 #define SLW XO31( 24)
370 #define SRW XO31(536)
371 #define SRAW XO31(792)
373 #define LMW OPCD(46)
374 #define STMW OPCD(47)
376 #define TW XO31(4)
377 #define TRAP (TW | TO (31))
379 #define RT(r) ((r)<<21)
380 #define RS(r) ((r)<<21)
381 #define RA(r) ((r)<<16)
382 #define RB(r) ((r)<<11)
383 #define TO(t) ((t)<<21)
384 #define SH(s) ((s)<<11)
385 #define MB(b) ((b)<<6)
386 #define ME(e) ((e)<<1)
387 #define BO(o) ((o)<<21)
389 #define LK 1
391 #define TAB(t,a,b) (RT(t) | RA(a) | RB(b))
392 #define SAB(s,a,b) (RS(s) | RA(a) | RB(b))
394 #define BF(n) ((n)<<23)
395 #define BI(n, c) (((c)+((n)*4))<<16)
396 #define BT(n, c) (((c)+((n)*4))<<21)
397 #define BA(n, c) (((c)+((n)*4))<<16)
398 #define BB(n, c) (((c)+((n)*4))<<11)
400 #define BO_COND_TRUE BO (12)
401 #define BO_COND_FALSE BO (4)
402 #define BO_ALWAYS BO (20)
404 enum {
405 CR_LT,
406 CR_GT,
407 CR_EQ,
408 CR_SO
411 static const uint32_t tcg_to_bc[10] = {
412 [TCG_COND_EQ] = BC | BI (7, CR_EQ) | BO_COND_TRUE,
413 [TCG_COND_NE] = BC | BI (7, CR_EQ) | BO_COND_FALSE,
414 [TCG_COND_LT] = BC | BI (7, CR_LT) | BO_COND_TRUE,
415 [TCG_COND_GE] = BC | BI (7, CR_LT) | BO_COND_FALSE,
416 [TCG_COND_LE] = BC | BI (7, CR_GT) | BO_COND_FALSE,
417 [TCG_COND_GT] = BC | BI (7, CR_GT) | BO_COND_TRUE,
418 [TCG_COND_LTU] = BC | BI (7, CR_LT) | BO_COND_TRUE,
419 [TCG_COND_GEU] = BC | BI (7, CR_LT) | BO_COND_FALSE,
420 [TCG_COND_LEU] = BC | BI (7, CR_GT) | BO_COND_FALSE,
421 [TCG_COND_GTU] = BC | BI (7, CR_GT) | BO_COND_TRUE,
424 static void tcg_out_mov(TCGContext *s, int ret, int arg)
426 tcg_out32 (s, OR | SAB (arg, ret, arg));
429 static void tcg_out_movi(TCGContext *s, TCGType type,
430 int ret, tcg_target_long arg)
432 if (arg == (int16_t) arg)
433 tcg_out32 (s, ADDI | RT (ret) | RA (0) | (arg & 0xffff));
434 else {
435 tcg_out32 (s, ADDIS | RT (ret) | RA (0) | ((arg >> 16) & 0xffff));
436 if (arg & 0xffff)
437 tcg_out32 (s, ORI | RS (ret) | RA (ret) | (arg & 0xffff));
441 static void tcg_out_ldst (TCGContext *s, int ret, int addr,
442 int offset, int op1, int op2)
444 if (offset == (int16_t) offset)
445 tcg_out32 (s, op1 | RT (ret) | RA (addr) | (offset & 0xffff));
446 else {
447 tcg_out_movi (s, TCG_TYPE_I32, 0, offset);
448 tcg_out32 (s, op2 | RT (ret) | RA (addr) | RB (0));
452 static void tcg_out_b (TCGContext *s, int mask, tcg_target_long target)
454 tcg_target_long disp;
456 disp = target - (tcg_target_long) s->code_ptr;
457 if ((disp << 6) >> 6 == disp)
458 tcg_out32 (s, B | (disp & 0x3fffffc) | mask);
459 else {
460 tcg_out_movi (s, TCG_TYPE_I32, 0, (tcg_target_long) target);
461 tcg_out32 (s, MTSPR | RS (0) | CTR);
462 tcg_out32 (s, BCCTR | BO_ALWAYS | mask);
466 #ifdef _AIX
467 static void tcg_out_call (TCGContext *s, tcg_target_long arg, int const_arg)
469 int reg;
471 if (const_arg) {
472 reg = 2;
473 tcg_out_movi (s, TCG_TYPE_I32, reg, arg);
475 else reg = arg;
477 tcg_out32 (s, LWZ | RT (0) | RA (reg));
478 tcg_out32 (s, MTSPR | RA (0) | CTR);
479 tcg_out32 (s, LWZ | RT (2) | RA (reg) | 4);
480 tcg_out32 (s, BCCTR | BO_ALWAYS | LK);
482 #endif
484 #if defined(CONFIG_SOFTMMU)
486 #include "../../softmmu_defs.h"
488 static void *qemu_ld_helpers[4] = {
489 __ldb_mmu,
490 __ldw_mmu,
491 __ldl_mmu,
492 __ldq_mmu,
495 static void *qemu_st_helpers[4] = {
496 __stb_mmu,
497 __stw_mmu,
498 __stl_mmu,
499 __stq_mmu,
501 #endif
503 static void tcg_out_qemu_ld (TCGContext *s, const TCGArg *args, int opc)
505 int addr_reg, data_reg, data_reg2, r0, r1, mem_index, s_bits, bswap;
506 #ifdef CONFIG_SOFTMMU
507 int r2;
508 void *label1_ptr, *label2_ptr;
509 #endif
510 #if TARGET_LONG_BITS == 64
511 int addr_reg2;
512 #endif
514 data_reg = *args++;
515 if (opc == 3)
516 data_reg2 = *args++;
517 else
518 data_reg2 = 0;
519 addr_reg = *args++;
520 #if TARGET_LONG_BITS == 64
521 addr_reg2 = *args++;
522 #endif
523 mem_index = *args;
524 s_bits = opc & 3;
526 #ifdef CONFIG_SOFTMMU
527 r0 = 3;
528 r1 = 4;
529 r2 = 0;
531 tcg_out32 (s, (RLWINM
532 | RA (r0)
533 | RS (addr_reg)
534 | SH (32 - (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS))
535 | MB (32 - (CPU_TLB_BITS + CPU_TLB_ENTRY_BITS))
536 | ME (31 - CPU_TLB_ENTRY_BITS)
539 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (TCG_AREG0));
540 tcg_out32 (s, (LWZU
541 | RT (r1)
542 | RA (r0)
543 | offsetof (CPUState, tlb_table[mem_index][0].addr_read)
546 tcg_out32 (s, (RLWINM
547 | RA (r2)
548 | RS (addr_reg)
549 | SH (0)
550 | MB ((32 - s_bits) & 31)
551 | ME (31 - TARGET_PAGE_BITS)
555 tcg_out32 (s, CMP | BF (7) | RA (r2) | RB (r1));
556 #if TARGET_LONG_BITS == 64
557 tcg_out32 (s, LWZ | RT (r1) | RA (r0) | 4);
558 tcg_out32 (s, CMP | BF (6) | RA (addr_reg2) | RB (r1));
559 tcg_out32 (s, CRAND | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, CR_EQ));
560 #endif
562 label1_ptr = s->code_ptr;
563 #ifdef FAST_PATH
564 tcg_out32 (s, BC | BI (7, CR_EQ) | BO_COND_TRUE);
565 #endif
567 /* slow path */
568 #if TARGET_LONG_BITS == 32
569 tcg_out_mov (s, 3, addr_reg);
570 tcg_out_movi (s, TCG_TYPE_I32, 4, mem_index);
571 #else
572 tcg_out_mov (s, 3, addr_reg2);
573 tcg_out_mov (s, 4, addr_reg);
574 tcg_out_movi (s, TCG_TYPE_I32, 5, mem_index);
575 #endif
577 #ifdef _AIX
578 tcg_out_call (s, (tcg_target_long) qemu_ld_helpers[s_bits], 1);
579 #else
580 tcg_out_b (s, LK, (tcg_target_long) qemu_ld_helpers[s_bits]);
581 #endif
582 switch (opc) {
583 case 0|4:
584 tcg_out32 (s, EXTSB | RA (data_reg) | RS (3));
585 break;
586 case 1|4:
587 tcg_out32 (s, EXTSH | RA (data_reg) | RS (3));
588 break;
589 case 0:
590 case 1:
591 case 2:
592 if (data_reg != 3)
593 tcg_out_mov (s, data_reg, 3);
594 break;
595 case 3:
596 if (data_reg == 3) {
597 if (data_reg2 == 4) {
598 tcg_out_mov (s, 0, 4);
599 tcg_out_mov (s, 4, 3);
600 tcg_out_mov (s, 3, 0);
602 else {
603 tcg_out_mov (s, data_reg2, 3);
604 tcg_out_mov (s, 3, 4);
607 else {
608 if (data_reg != 4) tcg_out_mov (s, data_reg, 4);
609 if (data_reg2 != 3) tcg_out_mov (s, data_reg2, 3);
611 break;
613 label2_ptr = s->code_ptr;
614 tcg_out32 (s, B);
616 /* label1: fast path */
617 #ifdef FAST_PATH
618 reloc_pc14 (label1_ptr, (tcg_target_long) s->code_ptr);
619 #endif
621 /* r0 now contains &env->tlb_table[mem_index][index].addr_read */
622 tcg_out32 (s, (LWZ
623 | RT (r0)
624 | RA (r0)
625 | (ADDEND_OFFSET + offsetof (CPUTLBEntry, addend)
626 - offsetof (CPUTLBEntry, addr_read))
628 /* r0 = env->tlb_table[mem_index][index].addend */
629 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (addr_reg));
630 /* r0 = env->tlb_table[mem_index][index].addend + addr */
632 #else /* !CONFIG_SOFTMMU */
633 r0 = addr_reg;
634 r1 = 3;
635 #endif
637 #ifdef TARGET_WORDS_BIGENDIAN
638 bswap = 0;
639 #else
640 bswap = 1;
641 #endif
642 switch (opc) {
643 default:
644 case 0:
645 tcg_out32 (s, LBZ | RT (data_reg) | RA (r0));
646 break;
647 case 0|4:
648 tcg_out32 (s, LBZ | RT (data_reg) | RA (r0));
649 tcg_out32 (s, EXTSB | RA (data_reg) | RS (data_reg));
650 break;
651 case 1:
652 if (bswap) tcg_out32 (s, LHBRX | RT (data_reg) | RB (r0));
653 else tcg_out32 (s, LHZ | RT (data_reg) | RA (r0));
654 break;
655 case 1|4:
656 if (bswap) {
657 tcg_out32 (s, LHBRX | RT (data_reg) | RB (r0));
658 tcg_out32 (s, EXTSH | RA (data_reg) | RS (data_reg));
660 else tcg_out32 (s, LHA | RT (data_reg) | RA (r0));
661 break;
662 case 2:
663 if (bswap) tcg_out32 (s, LWBRX | RT (data_reg) | RB (r0));
664 else tcg_out32 (s, LWZ | RT (data_reg)| RA (r0));
665 break;
666 case 3:
667 if (bswap) {
668 tcg_out32 (s, ADDI | RT (r1) | RA (r0) | 4);
669 tcg_out32 (s, LWBRX | RT (data_reg) | RB (r0));
670 tcg_out32 (s, LWBRX | RT (data_reg2) | RB (r1));
672 else {
673 if (r0 == data_reg2) {
674 tcg_out32 (s, LWZ | RT (0) | RA (r0));
675 tcg_out32 (s, LWZ | RT (data_reg) | RA (r0) | 4);
676 tcg_out_mov (s, data_reg2, 0);
678 else {
679 tcg_out32 (s, LWZ | RT (data_reg2) | RA (r0));
680 tcg_out32 (s, LWZ | RT (data_reg) | RA (r0) | 4);
683 break;
686 #ifdef CONFIG_SOFTMMU
687 reloc_pc24 (label2_ptr, (tcg_target_long) s->code_ptr);
688 #endif
691 static void tcg_out_qemu_st (TCGContext *s, const TCGArg *args, int opc)
693 int addr_reg, r0, r1, data_reg, data_reg2, mem_index, bswap;
694 #ifdef CONFIG_SOFTMMU
695 int r2, ir;
696 void *label1_ptr, *label2_ptr;
697 #endif
698 #if TARGET_LONG_BITS == 64
699 int addr_reg2;
700 #endif
702 data_reg = *args++;
703 if (opc == 3)
704 data_reg2 = *args++;
705 else
706 data_reg2 = 0;
707 addr_reg = *args++;
708 #if TARGET_LONG_BITS == 64
709 addr_reg2 = *args++;
710 #endif
711 mem_index = *args;
713 #ifdef CONFIG_SOFTMMU
714 r0 = 3;
715 r1 = 4;
716 r2 = 0;
718 tcg_out32 (s, (RLWINM
719 | RA (r0)
720 | RS (addr_reg)
721 | SH (32 - (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS))
722 | MB (32 - (CPU_TLB_ENTRY_BITS + CPU_TLB_BITS))
723 | ME (31 - CPU_TLB_ENTRY_BITS)
726 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (TCG_AREG0));
727 tcg_out32 (s, (LWZU
728 | RT (r1)
729 | RA (r0)
730 | offsetof (CPUState, tlb_table[mem_index][0].addr_write)
733 tcg_out32 (s, (RLWINM
734 | RA (r2)
735 | RS (addr_reg)
736 | SH (0)
737 | MB ((32 - opc) & 31)
738 | ME (31 - TARGET_PAGE_BITS)
742 tcg_out32 (s, CMP | (7 << 23) | RA (r2) | RB (r1));
743 #if TARGET_LONG_BITS == 64
744 tcg_out32 (s, LWZ | RT (r1) | RA (r0) | 4);
745 tcg_out32 (s, CMP | BF (6) | RA (addr_reg2) | RB (r1));
746 tcg_out32 (s, CRAND | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, CR_EQ));
747 #endif
749 label1_ptr = s->code_ptr;
750 #ifdef FAST_PATH
751 tcg_out32 (s, BC | BI (7, CR_EQ) | BO_COND_TRUE);
752 #endif
754 /* slow path */
755 #if TARGET_LONG_BITS == 32
756 tcg_out_mov (s, 3, addr_reg);
757 ir = 4;
758 #else
759 tcg_out_mov (s, 3, addr_reg2);
760 tcg_out_mov (s, 4, addr_reg);
761 #ifdef TCG_TARGET_CALL_ALIGN_ARGS
762 ir = 5;
763 #else
764 ir = 4;
765 #endif
766 #endif
768 switch (opc) {
769 case 0:
770 tcg_out32 (s, (RLWINM
771 | RA (ir)
772 | RS (data_reg)
773 | SH (0)
774 | MB (24)
775 | ME (31)));
776 break;
777 case 1:
778 tcg_out32 (s, (RLWINM
779 | RA (ir)
780 | RS (data_reg)
781 | SH (0)
782 | MB (16)
783 | ME (31)));
784 break;
785 case 2:
786 tcg_out_mov (s, ir, data_reg);
787 break;
788 case 3:
789 #ifdef TCG_TARGET_CALL_ALIGN_ARGS
790 ir = 5;
791 #endif
792 tcg_out_mov (s, ir++, data_reg2);
793 tcg_out_mov (s, ir, data_reg);
794 break;
796 ir++;
798 tcg_out_movi (s, TCG_TYPE_I32, ir, mem_index);
799 #ifdef _AIX
800 tcg_out_call (s, (tcg_target_long) qemu_st_helpers[opc], 1);
801 #else
802 tcg_out_b (s, LK, (tcg_target_long) qemu_st_helpers[opc]);
803 #endif
804 label2_ptr = s->code_ptr;
805 tcg_out32 (s, B);
807 /* label1: fast path */
808 #ifdef FAST_PATH
809 reloc_pc14 (label1_ptr, (tcg_target_long) s->code_ptr);
810 #endif
812 tcg_out32 (s, (LWZ
813 | RT (r0)
814 | RA (r0)
815 | (ADDEND_OFFSET + offsetof (CPUTLBEntry, addend)
816 - offsetof (CPUTLBEntry, addr_write))
818 /* r0 = env->tlb_table[mem_index][index].addend */
819 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (addr_reg));
820 /* r0 = env->tlb_table[mem_index][index].addend + addr */
822 #else /* !CONFIG_SOFTMMU */
823 r1 = 3;
824 r0 = addr_reg;
825 #endif
827 #ifdef TARGET_WORDS_BIGENDIAN
828 bswap = 0;
829 #else
830 bswap = 1;
831 #endif
832 switch (opc) {
833 case 0:
834 tcg_out32 (s, STB | RS (data_reg) | RA (r0));
835 break;
836 case 1:
837 if (bswap) tcg_out32 (s, STHBRX | RS (data_reg) | RA (0) | RB (r0));
838 else tcg_out32 (s, STH | RS (data_reg) | RA (r0));
839 break;
840 case 2:
841 if (bswap) tcg_out32 (s, STWBRX | RS (data_reg) | RA (0) | RB (r0));
842 else tcg_out32 (s, STW | RS (data_reg) | RA (r0));
843 break;
844 case 3:
845 if (bswap) {
846 tcg_out32 (s, ADDI | RT (r1) | RA (r0) | 4);
847 tcg_out32 (s, STWBRX | RS (data_reg) | RA (0) | RB (r0));
848 tcg_out32 (s, STWBRX | RS (data_reg2) | RA (0) | RB (r1));
850 else {
851 tcg_out32 (s, STW | RS (data_reg2) | RA (r0));
852 tcg_out32 (s, STW | RS (data_reg) | RA (r0) | 4);
854 break;
857 #ifdef CONFIG_SOFTMMU
858 reloc_pc24 (label2_ptr, (tcg_target_long) s->code_ptr);
859 #endif
862 void tcg_target_qemu_prologue (TCGContext *s)
864 int i, frame_size;
866 frame_size = 0
867 + LINKAGE_AREA_SIZE
868 + TCG_STATIC_CALL_ARGS_SIZE
869 + ARRAY_SIZE (tcg_target_callee_save_regs) * 4
871 frame_size = (frame_size + 15) & ~15;
873 #ifdef _AIX
875 uint32_t addr;
877 /* First emit adhoc function descriptor */
878 addr = (uint32_t) s->code_ptr + 12;
879 tcg_out32 (s, addr); /* entry point */
880 s->code_ptr += 8; /* skip TOC and environment pointer */
882 #endif
883 tcg_out32 (s, MFSPR | RT (0) | LR);
884 tcg_out32 (s, STWU | RS (1) | RA (1) | (-frame_size & 0xffff));
885 for (i = 0; i < ARRAY_SIZE (tcg_target_callee_save_regs); ++i)
886 tcg_out32 (s, (STW
887 | RS (tcg_target_callee_save_regs[i])
888 | RA (1)
889 | (i * 4 + LINKAGE_AREA_SIZE + TCG_STATIC_CALL_ARGS_SIZE)
892 tcg_out32 (s, STW | RS (0) | RA (1) | (frame_size + LR_OFFSET));
894 tcg_out32 (s, MTSPR | RS (3) | CTR);
895 tcg_out32 (s, BCCTR | BO_ALWAYS);
896 tb_ret_addr = s->code_ptr;
898 for (i = 0; i < ARRAY_SIZE (tcg_target_callee_save_regs); ++i)
899 tcg_out32 (s, (LWZ
900 | RT (tcg_target_callee_save_regs[i])
901 | RA (1)
902 | (i * 4 + LINKAGE_AREA_SIZE + TCG_STATIC_CALL_ARGS_SIZE)
905 tcg_out32 (s, LWZ | RT (0) | RA (1) | (frame_size + LR_OFFSET));
906 tcg_out32 (s, MTSPR | RS (0) | LR);
907 tcg_out32 (s, ADDI | RT (1) | RA (1) | frame_size);
908 tcg_out32 (s, BCLR | BO_ALWAYS);
911 static void tcg_out_ld (TCGContext *s, TCGType type, int ret, int arg1,
912 tcg_target_long arg2)
914 tcg_out_ldst (s, ret, arg1, arg2, LWZ, LWZX);
917 static void tcg_out_st (TCGContext *s, TCGType type, int arg, int arg1,
918 tcg_target_long arg2)
920 tcg_out_ldst (s, arg, arg1, arg2, STW, STWX);
923 static void ppc_addi (TCGContext *s, int rt, int ra, tcg_target_long si)
925 if (!si && rt == ra)
926 return;
928 if (si == (int16_t) si)
929 tcg_out32 (s, ADDI | RT (rt) | RA (ra) | (si & 0xffff));
930 else {
931 uint16_t h = ((si >> 16) & 0xffff) + ((uint16_t) si >> 15);
932 tcg_out32 (s, ADDIS | RT (rt) | RA (ra) | h);
933 tcg_out32 (s, ADDI | RT (rt) | RA (rt) | (si & 0xffff));
937 static void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
939 ppc_addi (s, reg, reg, val);
942 static void tcg_out_cmp (TCGContext *s, int cond, TCGArg arg1, TCGArg arg2,
943 int const_arg2, int cr)
945 int imm;
946 uint32_t op;
948 switch (cond) {
949 case TCG_COND_EQ:
950 case TCG_COND_NE:
951 if (const_arg2) {
952 if ((int16_t) arg2 == arg2) {
953 op = CMPI;
954 imm = 1;
955 break;
957 else if ((uint16_t) arg2 == arg2) {
958 op = CMPLI;
959 imm = 1;
960 break;
963 op = CMPL;
964 imm = 0;
965 break;
967 case TCG_COND_LT:
968 case TCG_COND_GE:
969 case TCG_COND_LE:
970 case TCG_COND_GT:
971 if (const_arg2) {
972 if ((int16_t) arg2 == arg2) {
973 op = CMPI;
974 imm = 1;
975 break;
978 op = CMP;
979 imm = 0;
980 break;
982 case TCG_COND_LTU:
983 case TCG_COND_GEU:
984 case TCG_COND_LEU:
985 case TCG_COND_GTU:
986 if (const_arg2) {
987 if ((uint16_t) arg2 == arg2) {
988 op = CMPLI;
989 imm = 1;
990 break;
993 op = CMPL;
994 imm = 0;
995 break;
997 default:
998 tcg_abort ();
1000 op |= BF (cr);
1002 if (imm)
1003 tcg_out32 (s, op | RA (arg1) | (arg2 & 0xffff));
1004 else {
1005 if (const_arg2) {
1006 tcg_out_movi (s, TCG_TYPE_I32, 0, arg2);
1007 tcg_out32 (s, op | RA (arg1) | RB (0));
1009 else
1010 tcg_out32 (s, op | RA (arg1) | RB (arg2));
1015 static void tcg_out_bc (TCGContext *s, int bc, int label_index)
1017 TCGLabel *l = &s->labels[label_index];
1019 if (l->has_value)
1020 tcg_out32 (s, bc | reloc_pc14_val (s->code_ptr, l->u.value));
1021 else {
1022 uint16_t val = *(uint16_t *) &s->code_ptr[2];
1024 /* Thanks to Andrzej Zaborowski */
1025 tcg_out32 (s, bc | (val & 0xfffc));
1026 tcg_out_reloc (s, s->code_ptr - 4, R_PPC_REL14, label_index, 0);
1030 static void tcg_out_brcond (TCGContext *s, int cond,
1031 TCGArg arg1, TCGArg arg2, int const_arg2,
1032 int label_index)
1034 tcg_out_cmp (s, cond, arg1, arg2, const_arg2, 7);
1035 tcg_out_bc (s, tcg_to_bc[cond], label_index);
1038 /* XXX: we implement it at the target level to avoid having to
1039 handle cross basic blocks temporaries */
1040 static void tcg_out_brcond2 (TCGContext *s, const TCGArg *args,
1041 const int *const_args)
1043 int cond = args[4], label_index = args[5], op;
1044 struct { int bit1; int bit2; int cond2; } bits[] = {
1045 [TCG_COND_LT ] = { CR_LT, CR_LT, TCG_COND_LT },
1046 [TCG_COND_LE ] = { CR_LT, CR_GT, TCG_COND_LT },
1047 [TCG_COND_GT ] = { CR_GT, CR_GT, TCG_COND_GT },
1048 [TCG_COND_GE ] = { CR_GT, CR_LT, TCG_COND_GT },
1049 [TCG_COND_LTU] = { CR_LT, CR_LT, TCG_COND_LTU },
1050 [TCG_COND_LEU] = { CR_LT, CR_GT, TCG_COND_LTU },
1051 [TCG_COND_GTU] = { CR_GT, CR_GT, TCG_COND_GTU },
1052 [TCG_COND_GEU] = { CR_GT, CR_LT, TCG_COND_GTU },
1053 }, *b = &bits[cond];
1055 switch (cond) {
1056 case TCG_COND_EQ:
1057 case TCG_COND_NE:
1058 op = (cond == TCG_COND_EQ) ? CRAND : CRNAND;
1059 tcg_out_cmp (s, cond, args[0], args[2], const_args[2], 6);
1060 tcg_out_cmp (s, cond, args[1], args[3], const_args[3], 7);
1061 tcg_out32 (s, op | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, CR_EQ));
1062 break;
1063 case TCG_COND_LT:
1064 case TCG_COND_LE:
1065 case TCG_COND_GT:
1066 case TCG_COND_GE:
1067 case TCG_COND_LTU:
1068 case TCG_COND_LEU:
1069 case TCG_COND_GTU:
1070 case TCG_COND_GEU:
1071 op = (b->bit1 != b->bit2) ? CRANDC : CRAND;
1072 tcg_out_cmp (s, b->cond2, args[1], args[3], const_args[3], 5);
1073 tcg_out_cmp (s, TCG_COND_EQ, args[1], args[3], const_args[3], 6);
1074 tcg_out_cmp (s, cond, args[0], args[2], const_args[2], 7);
1075 tcg_out32 (s, op | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, b->bit2));
1076 tcg_out32 (s, CROR | BT (7, CR_EQ) | BA (5, b->bit1) | BB (7, CR_EQ));
1077 break;
1078 default:
1079 tcg_abort();
1082 tcg_out_bc (s, (BC | BI (7, CR_EQ) | BO_COND_TRUE), label_index);
1085 void ppc_tb_set_jmp_target (unsigned long jmp_addr, unsigned long addr)
1087 uint32_t *ptr;
1088 long disp = addr - jmp_addr;
1089 unsigned long patch_size;
1091 ptr = (uint32_t *)jmp_addr;
1093 if ((disp << 6) >> 6 != disp) {
1094 ptr[0] = 0x3c000000 | (addr >> 16); /* lis 0,addr@ha */
1095 ptr[1] = 0x60000000 | (addr & 0xffff); /* la 0,addr@l(0) */
1096 ptr[2] = 0x7c0903a6; /* mtctr 0 */
1097 ptr[3] = 0x4e800420; /* brctr */
1098 patch_size = 16;
1099 } else {
1100 /* patch the branch destination */
1101 if (disp != 16) {
1102 *ptr = 0x48000000 | (disp & 0x03fffffc); /* b disp */
1103 patch_size = 4;
1104 } else {
1105 ptr[0] = 0x60000000; /* nop */
1106 ptr[1] = 0x60000000;
1107 ptr[2] = 0x60000000;
1108 ptr[3] = 0x60000000;
1109 patch_size = 16;
1112 /* flush icache */
1113 flush_icache_range(jmp_addr, jmp_addr + patch_size);
1116 static void tcg_out_op(TCGContext *s, int opc, const TCGArg *args,
1117 const int *const_args)
1119 switch (opc) {
1120 case INDEX_op_exit_tb:
1121 tcg_out_movi (s, TCG_TYPE_I32, TCG_REG_R3, args[0]);
1122 tcg_out_b (s, 0, (tcg_target_long) tb_ret_addr);
1123 break;
1124 case INDEX_op_goto_tb:
1125 if (s->tb_jmp_offset) {
1126 /* direct jump method */
1128 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
1129 s->code_ptr += 16;
1131 else {
1132 tcg_abort ();
1134 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1135 break;
1136 case INDEX_op_br:
1138 TCGLabel *l = &s->labels[args[0]];
1140 if (l->has_value) {
1141 tcg_out_b (s, 0, l->u.value);
1143 else {
1144 uint32_t val = *(uint32_t *) s->code_ptr;
1146 /* Thanks to Andrzej Zaborowski */
1147 tcg_out32 (s, B | (val & 0x3fffffc));
1148 tcg_out_reloc (s, s->code_ptr - 4, R_PPC_REL24, args[0], 0);
1151 break;
1152 case INDEX_op_call:
1153 #ifdef _AIX
1154 tcg_out_call (s, args[0], const_args[0]);
1155 #else
1156 if (const_args[0]) {
1157 tcg_out_b (s, LK, args[0]);
1159 else {
1160 tcg_out32 (s, MTSPR | RS (args[0]) | LR);
1161 tcg_out32 (s, BCLR | BO_ALWAYS | LK);
1163 #endif
1164 break;
1165 case INDEX_op_jmp:
1166 if (const_args[0]) {
1167 tcg_out_b (s, 0, args[0]);
1169 else {
1170 tcg_out32 (s, MTSPR | RS (args[0]) | CTR);
1171 tcg_out32 (s, BCCTR | BO_ALWAYS);
1173 break;
1174 case INDEX_op_movi_i32:
1175 tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
1176 break;
1177 case INDEX_op_ld8u_i32:
1178 tcg_out_ldst (s, args[0], args[1], args[2], LBZ, LBZX);
1179 break;
1180 case INDEX_op_ld8s_i32:
1181 tcg_out_ldst (s, args[0], args[1], args[2], LBZ, LBZX);
1182 tcg_out32 (s, EXTSB | RS (args[0]) | RA (args[0]));
1183 break;
1184 case INDEX_op_ld16u_i32:
1185 tcg_out_ldst (s, args[0], args[1], args[2], LHZ, LHZX);
1186 break;
1187 case INDEX_op_ld16s_i32:
1188 tcg_out_ldst (s, args[0], args[1], args[2], LHA, LHAX);
1189 break;
1190 case INDEX_op_ld_i32:
1191 tcg_out_ldst (s, args[0], args[1], args[2], LWZ, LWZX);
1192 break;
1193 case INDEX_op_st8_i32:
1194 tcg_out_ldst (s, args[0], args[1], args[2], STB, STBX);
1195 break;
1196 case INDEX_op_st16_i32:
1197 tcg_out_ldst (s, args[0], args[1], args[2], STH, STHX);
1198 break;
1199 case INDEX_op_st_i32:
1200 tcg_out_ldst (s, args[0], args[1], args[2], STW, STWX);
1201 break;
1203 case INDEX_op_add_i32:
1204 if (const_args[2])
1205 ppc_addi (s, args[0], args[1], args[2]);
1206 else
1207 tcg_out32 (s, ADD | TAB (args[0], args[1], args[2]));
1208 break;
1209 case INDEX_op_sub_i32:
1210 if (const_args[2])
1211 ppc_addi (s, args[0], args[1], -args[2]);
1212 else
1213 tcg_out32 (s, SUBF | TAB (args[0], args[2], args[1]));
1214 break;
1216 case INDEX_op_and_i32:
1217 if (const_args[2]) {
1218 if ((args[2] & 0xffff) == args[2])
1219 tcg_out32 (s, ANDI | RS (args[1]) | RA (args[0]) | args[2]);
1220 else if ((args[2] & 0xffff0000) == args[2])
1221 tcg_out32 (s, ANDIS | RS (args[1]) | RA (args[0])
1222 | ((args[2] >> 16) & 0xffff));
1223 else {
1224 tcg_out_movi (s, TCG_TYPE_I32, 0, args[2]);
1225 tcg_out32 (s, AND | SAB (args[1], args[0], 0));
1228 else
1229 tcg_out32 (s, AND | SAB (args[1], args[0], args[2]));
1230 break;
1231 case INDEX_op_or_i32:
1232 if (const_args[2]) {
1233 if (args[2] & 0xffff) {
1234 tcg_out32 (s, ORI | RS (args[1]) | RA (args[0])
1235 | (args[2] & 0xffff));
1236 if (args[2] >> 16)
1237 tcg_out32 (s, ORIS | RS (args[0]) | RA (args[0])
1238 | ((args[2] >> 16) & 0xffff));
1240 else {
1241 tcg_out32 (s, ORIS | RS (args[1]) | RA (args[0])
1242 | ((args[2] >> 16) & 0xffff));
1245 else
1246 tcg_out32 (s, OR | SAB (args[1], args[0], args[2]));
1247 break;
1248 case INDEX_op_xor_i32:
1249 if (const_args[2]) {
1250 if ((args[2] & 0xffff) == args[2])
1251 tcg_out32 (s, XORI | RS (args[1]) | RA (args[0])
1252 | (args[2] & 0xffff));
1253 else if ((args[2] & 0xffff0000) == args[2])
1254 tcg_out32 (s, XORIS | RS (args[1]) | RA (args[0])
1255 | ((args[2] >> 16) & 0xffff));
1256 else {
1257 tcg_out_movi (s, TCG_TYPE_I32, 0, args[2]);
1258 tcg_out32 (s, XOR | SAB (args[1], args[0], 0));
1261 else
1262 tcg_out32 (s, XOR | SAB (args[1], args[0], args[2]));
1263 break;
1265 case INDEX_op_mul_i32:
1266 if (const_args[2]) {
1267 if (args[2] == (int16_t) args[2])
1268 tcg_out32 (s, MULLI | RT (args[0]) | RA (args[1])
1269 | (args[2] & 0xffff));
1270 else {
1271 tcg_out_movi (s, TCG_TYPE_I32, 0, args[2]);
1272 tcg_out32 (s, MULLW | TAB (args[0], args[1], 0));
1275 else
1276 tcg_out32 (s, MULLW | TAB (args[0], args[1], args[2]));
1277 break;
1279 case INDEX_op_div_i32:
1280 tcg_out32 (s, DIVW | TAB (args[0], args[1], args[2]));
1281 break;
1283 case INDEX_op_divu_i32:
1284 tcg_out32 (s, DIVWU | TAB (args[0], args[1], args[2]));
1285 break;
1287 case INDEX_op_rem_i32:
1288 tcg_out32 (s, DIVW | TAB (0, args[1], args[2]));
1289 tcg_out32 (s, MULLW | TAB (0, 0, args[2]));
1290 tcg_out32 (s, SUBF | TAB (args[0], 0, args[1]));
1291 break;
1293 case INDEX_op_remu_i32:
1294 tcg_out32 (s, DIVWU | TAB (0, args[1], args[2]));
1295 tcg_out32 (s, MULLW | TAB (0, 0, args[2]));
1296 tcg_out32 (s, SUBF | TAB (args[0], 0, args[1]));
1297 break;
1299 case INDEX_op_mulu2_i32:
1300 if (args[0] == args[2] || args[0] == args[3]) {
1301 tcg_out32 (s, MULLW | TAB (0, args[2], args[3]));
1302 tcg_out32 (s, MULHWU | TAB (args[1], args[2], args[3]));
1303 tcg_out_mov (s, args[0], 0);
1305 else {
1306 tcg_out32 (s, MULLW | TAB (args[0], args[2], args[3]));
1307 tcg_out32 (s, MULHWU | TAB (args[1], args[2], args[3]));
1309 break;
1311 case INDEX_op_shl_i32:
1312 if (const_args[2]) {
1313 tcg_out32 (s, (RLWINM
1314 | RA (args[0])
1315 | RS (args[1])
1316 | SH (args[2])
1317 | MB (0)
1318 | ME (31 - args[2])
1322 else
1323 tcg_out32 (s, SLW | SAB (args[1], args[0], args[2]));
1324 break;
1325 case INDEX_op_shr_i32:
1326 if (const_args[2]) {
1327 tcg_out32 (s, (RLWINM
1328 | RA (args[0])
1329 | RS (args[1])
1330 | SH (32 - args[2])
1331 | MB (args[2])
1332 | ME (31)
1336 else
1337 tcg_out32 (s, SRW | SAB (args[1], args[0], args[2]));
1338 break;
1339 case INDEX_op_sar_i32:
1340 if (const_args[2])
1341 tcg_out32 (s, SRAWI | RS (args[1]) | RA (args[0]) | SH (args[2]));
1342 else
1343 tcg_out32 (s, SRAW | SAB (args[1], args[0], args[2]));
1344 break;
1346 case INDEX_op_add2_i32:
1347 if (args[0] == args[3] || args[0] == args[5]) {
1348 tcg_out32 (s, ADDC | TAB (0, args[2], args[4]));
1349 tcg_out32 (s, ADDE | TAB (args[1], args[3], args[5]));
1350 tcg_out_mov (s, args[0], 0);
1352 else {
1353 tcg_out32 (s, ADDC | TAB (args[0], args[2], args[4]));
1354 tcg_out32 (s, ADDE | TAB (args[1], args[3], args[5]));
1356 break;
1357 case INDEX_op_sub2_i32:
1358 if (args[0] == args[3] || args[0] == args[5]) {
1359 tcg_out32 (s, SUBFC | TAB (0, args[4], args[2]));
1360 tcg_out32 (s, SUBFE | TAB (args[1], args[5], args[3]));
1361 tcg_out_mov (s, args[0], 0);
1363 else {
1364 tcg_out32 (s, SUBFC | TAB (args[0], args[4], args[2]));
1365 tcg_out32 (s, SUBFE | TAB (args[1], args[5], args[3]));
1367 break;
1369 case INDEX_op_brcond_i32:
1371 args[0] = r0
1372 args[1] = r1
1373 args[2] = cond
1374 args[3] = r1 is const
1375 args[4] = label_index
1377 tcg_out_brcond (s, args[2], args[0], args[1], const_args[1], args[3]);
1378 break;
1379 case INDEX_op_brcond2_i32:
1380 tcg_out_brcond2(s, args, const_args);
1381 break;
1383 case INDEX_op_neg_i32:
1384 tcg_out32 (s, NEG | RT (args[0]) | RA (args[1]));
1385 break;
1387 case INDEX_op_qemu_ld8u:
1388 tcg_out_qemu_ld(s, args, 0);
1389 break;
1390 case INDEX_op_qemu_ld8s:
1391 tcg_out_qemu_ld(s, args, 0 | 4);
1392 break;
1393 case INDEX_op_qemu_ld16u:
1394 tcg_out_qemu_ld(s, args, 1);
1395 break;
1396 case INDEX_op_qemu_ld16s:
1397 tcg_out_qemu_ld(s, args, 1 | 4);
1398 break;
1399 case INDEX_op_qemu_ld32u:
1400 tcg_out_qemu_ld(s, args, 2);
1401 break;
1402 case INDEX_op_qemu_ld64:
1403 tcg_out_qemu_ld(s, args, 3);
1404 break;
1405 case INDEX_op_qemu_st8:
1406 tcg_out_qemu_st(s, args, 0);
1407 break;
1408 case INDEX_op_qemu_st16:
1409 tcg_out_qemu_st(s, args, 1);
1410 break;
1411 case INDEX_op_qemu_st32:
1412 tcg_out_qemu_st(s, args, 2);
1413 break;
1414 case INDEX_op_qemu_st64:
1415 tcg_out_qemu_st(s, args, 3);
1416 break;
1418 case INDEX_op_ext8s_i32:
1419 tcg_out32 (s, EXTSB | RS (args[1]) | RA (args[0]));
1420 break;
1421 case INDEX_op_ext16s_i32:
1422 tcg_out32 (s, EXTSH | RS (args[1]) | RA (args[0]));
1423 break;
1425 default:
1426 tcg_dump_ops (s, stderr);
1427 tcg_abort ();
1431 static const TCGTargetOpDef ppc_op_defs[] = {
1432 { INDEX_op_exit_tb, { } },
1433 { INDEX_op_goto_tb, { } },
1434 { INDEX_op_call, { "ri" } },
1435 { INDEX_op_jmp, { "ri" } },
1436 { INDEX_op_br, { } },
1438 { INDEX_op_mov_i32, { "r", "r" } },
1439 { INDEX_op_movi_i32, { "r" } },
1440 { INDEX_op_ld8u_i32, { "r", "r" } },
1441 { INDEX_op_ld8s_i32, { "r", "r" } },
1442 { INDEX_op_ld16u_i32, { "r", "r" } },
1443 { INDEX_op_ld16s_i32, { "r", "r" } },
1444 { INDEX_op_ld_i32, { "r", "r" } },
1445 { INDEX_op_st8_i32, { "r", "r" } },
1446 { INDEX_op_st16_i32, { "r", "r" } },
1447 { INDEX_op_st_i32, { "r", "r" } },
1449 { INDEX_op_add_i32, { "r", "r", "ri" } },
1450 { INDEX_op_mul_i32, { "r", "r", "ri" } },
1451 { INDEX_op_div_i32, { "r", "r", "r" } },
1452 { INDEX_op_divu_i32, { "r", "r", "r" } },
1453 { INDEX_op_rem_i32, { "r", "r", "r" } },
1454 { INDEX_op_remu_i32, { "r", "r", "r" } },
1455 { INDEX_op_mulu2_i32, { "r", "r", "r", "r" } },
1456 { INDEX_op_sub_i32, { "r", "r", "ri" } },
1457 { INDEX_op_and_i32, { "r", "r", "ri" } },
1458 { INDEX_op_or_i32, { "r", "r", "ri" } },
1459 { INDEX_op_xor_i32, { "r", "r", "ri" } },
1461 { INDEX_op_shl_i32, { "r", "r", "ri" } },
1462 { INDEX_op_shr_i32, { "r", "r", "ri" } },
1463 { INDEX_op_sar_i32, { "r", "r", "ri" } },
1465 { INDEX_op_brcond_i32, { "r", "ri" } },
1467 { INDEX_op_add2_i32, { "r", "r", "r", "r", "r", "r" } },
1468 { INDEX_op_sub2_i32, { "r", "r", "r", "r", "r", "r" } },
1469 { INDEX_op_brcond2_i32, { "r", "r", "r", "r" } },
1471 { INDEX_op_neg_i32, { "r", "r" } },
1473 #if TARGET_LONG_BITS == 32
1474 { INDEX_op_qemu_ld8u, { "r", "L" } },
1475 { INDEX_op_qemu_ld8s, { "r", "L" } },
1476 { INDEX_op_qemu_ld16u, { "r", "L" } },
1477 { INDEX_op_qemu_ld16s, { "r", "L" } },
1478 { INDEX_op_qemu_ld32u, { "r", "L" } },
1479 { INDEX_op_qemu_ld32s, { "r", "L" } },
1480 { INDEX_op_qemu_ld64, { "r", "r", "L" } },
1482 { INDEX_op_qemu_st8, { "K", "K" } },
1483 { INDEX_op_qemu_st16, { "K", "K" } },
1484 { INDEX_op_qemu_st32, { "K", "K" } },
1485 { INDEX_op_qemu_st64, { "M", "M", "M" } },
1486 #else
1487 { INDEX_op_qemu_ld8u, { "r", "L", "L" } },
1488 { INDEX_op_qemu_ld8s, { "r", "L", "L" } },
1489 { INDEX_op_qemu_ld16u, { "r", "L", "L" } },
1490 { INDEX_op_qemu_ld16s, { "r", "L", "L" } },
1491 { INDEX_op_qemu_ld32u, { "r", "L", "L" } },
1492 { INDEX_op_qemu_ld32s, { "r", "L", "L" } },
1493 { INDEX_op_qemu_ld64, { "r", "L", "L", "L" } },
1495 { INDEX_op_qemu_st8, { "K", "K", "K" } },
1496 { INDEX_op_qemu_st16, { "K", "K", "K" } },
1497 { INDEX_op_qemu_st32, { "K", "K", "K" } },
1498 { INDEX_op_qemu_st64, { "M", "M", "M", "M" } },
1499 #endif
1501 { INDEX_op_ext8s_i32, { "r", "r" } },
1502 { INDEX_op_ext16s_i32, { "r", "r" } },
1504 { -1 },
1507 void tcg_target_init(TCGContext *s)
1509 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffffffff);
1510 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1511 (1 << TCG_REG_R0) |
1512 #ifdef __APPLE__
1513 (1 << TCG_REG_R2) |
1514 #endif
1515 (1 << TCG_REG_R3) |
1516 (1 << TCG_REG_R4) |
1517 (1 << TCG_REG_R5) |
1518 (1 << TCG_REG_R6) |
1519 (1 << TCG_REG_R7) |
1520 (1 << TCG_REG_R8) |
1521 (1 << TCG_REG_R9) |
1522 (1 << TCG_REG_R10) |
1523 (1 << TCG_REG_R11) |
1524 (1 << TCG_REG_R12)
1527 tcg_regset_clear(s->reserved_regs);
1528 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R0);
1529 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R1);
1530 #ifndef __APPLE__
1531 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R2);
1532 #endif
1533 #ifdef __linux__
1534 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R13);
1535 #endif
1537 tcg_add_target_add_op_defs(ppc_op_defs);