Silence gcc warning about constant overflow
[qemu/mini2440.git] / tcg / ppc / tcg-target.c
blobad174681f3f822748398a9a88537faed8d50a96a
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
25 static uint8_t *tb_ret_addr;
27 #ifdef __APPLE__
28 #define LINKAGE_AREA_SIZE 24
29 #define BACK_CHAIN_OFFSET 8
30 #else
31 #define LINKAGE_AREA_SIZE 8
32 #define BACK_CHAIN_OFFSET 4
33 #endif
35 #define FAST_PATH
36 #if TARGET_PHYS_ADDR_BITS <= 32
37 #define ADDEND_OFFSET 0
38 #else
39 #define ADDEND_OFFSET 4
40 #endif
42 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
43 "r0",
44 "r1",
45 "rp",
46 "r3",
47 "r4",
48 "r5",
49 "r6",
50 "r7",
51 "r8",
52 "r9",
53 "r10",
54 "r11",
55 "r12",
56 "r13",
57 "r14",
58 "r15",
59 "r16",
60 "r17",
61 "r18",
62 "r19",
63 "r20",
64 "r21",
65 "r22",
66 "r23",
67 "r24",
68 "r25",
69 "r26",
70 "r27",
71 "r28",
72 "r29",
73 "r30",
74 "r31"
77 static const int tcg_target_reg_alloc_order[] = {
78 TCG_REG_R14,
79 TCG_REG_R15,
80 TCG_REG_R16,
81 TCG_REG_R17,
82 TCG_REG_R18,
83 TCG_REG_R19,
84 TCG_REG_R20,
85 TCG_REG_R21,
86 TCG_REG_R22,
87 TCG_REG_R23,
88 TCG_REG_R28,
89 TCG_REG_R29,
90 TCG_REG_R30,
91 TCG_REG_R31,
92 #ifdef __APPLE__
93 TCG_REG_R2,
94 #endif
95 TCG_REG_R3,
96 TCG_REG_R4,
97 TCG_REG_R5,
98 TCG_REG_R6,
99 TCG_REG_R7,
100 TCG_REG_R8,
101 TCG_REG_R9,
102 TCG_REG_R10,
103 #ifndef __APPLE__
104 TCG_REG_R11,
105 #endif
106 TCG_REG_R12,
107 TCG_REG_R13,
108 TCG_REG_R0,
109 TCG_REG_R1,
110 TCG_REG_R2,
111 TCG_REG_R24,
112 TCG_REG_R25,
113 TCG_REG_R26,
114 TCG_REG_R27
117 static const int tcg_target_call_iarg_regs[] = {
118 TCG_REG_R3,
119 TCG_REG_R4,
120 TCG_REG_R5,
121 TCG_REG_R6,
122 TCG_REG_R7,
123 TCG_REG_R8,
124 TCG_REG_R9,
125 TCG_REG_R10
128 static const int tcg_target_call_oarg_regs[2] = {
129 TCG_REG_R3,
130 TCG_REG_R4
133 static const int tcg_target_callee_save_regs[] = {
134 #ifdef __APPLE__
135 TCG_REG_R11,
136 TCG_REG_R13,
137 #endif
138 TCG_REG_R14,
139 TCG_REG_R15,
140 TCG_REG_R16,
141 TCG_REG_R17,
142 TCG_REG_R18,
143 TCG_REG_R19,
144 TCG_REG_R20,
145 TCG_REG_R21,
146 TCG_REG_R22,
147 TCG_REG_R23,
148 TCG_REG_R28,
149 TCG_REG_R29,
150 TCG_REG_R30,
151 TCG_REG_R31
154 static uint32_t reloc_pc24_val (void *pc, tcg_target_long target)
156 tcg_target_long disp;
158 disp = target - (tcg_target_long) pc;
159 if ((disp << 6) >> 6 != disp)
160 tcg_abort ();
162 return disp & 0x3fffffc;
165 static void reloc_pc24 (void *pc, tcg_target_long target)
167 *(uint32_t *) pc = (*(uint32_t *) pc & ~0x3fffffc)
168 | reloc_pc24_val (pc, target);
171 static uint16_t reloc_pc14_val (void *pc, tcg_target_long target)
173 tcg_target_long disp;
175 disp = target - (tcg_target_long) pc;
176 if (disp != (int16_t) disp)
177 tcg_abort ();
179 return disp & 0xfffc;
182 static void reloc_pc14 (void *pc, tcg_target_long target)
184 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xfffc)
185 | reloc_pc14_val (pc, target);
188 static void patch_reloc(uint8_t *code_ptr, int type,
189 tcg_target_long value, tcg_target_long addend)
191 value += addend;
192 switch (type) {
193 case R_PPC_REL14:
194 reloc_pc14 (code_ptr, value);
195 break;
196 case R_PPC_REL24:
197 reloc_pc24 (code_ptr, value);
198 break;
199 default:
200 tcg_abort();
204 /* maximum number of register used for input function arguments */
205 static int tcg_target_get_call_iarg_regs_count(int flags)
207 return sizeof (tcg_target_call_iarg_regs) / sizeof (tcg_target_call_iarg_regs[0]);
210 /* parse target specific constraints */
211 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
213 const char *ct_str;
215 ct_str = *pct_str;
216 switch (ct_str[0]) {
217 case 'A': case 'B': case 'C': case 'D':
218 ct->ct |= TCG_CT_REG;
219 tcg_regset_set_reg(ct->u.regs, 3 + ct_str[0] - 'A');
220 break;
221 case 'r':
222 ct->ct |= TCG_CT_REG;
223 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
224 break;
225 #ifdef CONFIG_SOFTMMU
226 case 'L': /* qemu_ld constraint */
227 ct->ct |= TCG_CT_REG;
228 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
229 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
230 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4);
231 break;
232 case 'K': /* qemu_st[8..32] constraint */
233 ct->ct |= TCG_CT_REG;
234 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
235 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
236 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4);
237 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R5);
238 #if TARGET_LONG_BITS == 64
239 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R6);
240 #endif
241 break;
242 case 'M': /* qemu_st64 constraint */
243 ct->ct |= TCG_CT_REG;
244 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
245 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
246 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4);
247 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R5);
248 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R6);
249 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R7);
250 break;
251 #else
252 case 'L':
253 case 'K':
254 ct->ct |= TCG_CT_REG;
255 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
256 break;
257 case 'M':
258 ct->ct |= TCG_CT_REG;
259 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
260 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
261 break;
262 #endif
263 default:
264 return -1;
266 ct_str++;
267 *pct_str = ct_str;
268 return 0;
271 /* test if a constant matches the constraint */
272 static int tcg_target_const_match(tcg_target_long val,
273 const TCGArgConstraint *arg_ct)
275 int ct;
277 ct = arg_ct->ct;
278 if (ct & TCG_CT_CONST)
279 return 1;
280 return 0;
283 #define OPCD(opc) ((opc)<<26)
284 #define XO31(opc) (OPCD(31)|((opc)<<1))
285 #define XO19(opc) (OPCD(19)|((opc)<<1))
287 #define B OPCD(18)
288 #define BC OPCD(16)
289 #define LBZ OPCD(34)
290 #define LHZ OPCD(40)
291 #define LHA OPCD(42)
292 #define LWZ OPCD(32)
293 #define STB OPCD(38)
294 #define STH OPCD(44)
295 #define STW OPCD(36)
297 #define ADDI OPCD(14)
298 #define ADDIS OPCD(15)
299 #define ORI OPCD(24)
300 #define ORIS OPCD(25)
301 #define XORI OPCD(26)
302 #define XORIS OPCD(27)
303 #define ANDI OPCD(28)
304 #define ANDIS OPCD(29)
305 #define MULLI OPCD( 7)
306 #define CMPLI OPCD(10)
307 #define CMPI OPCD(11)
309 #define LWZU OPCD(33)
310 #define STWU OPCD(37)
312 #define RLWINM OPCD(21)
314 #define BCLR XO19( 16)
315 #define BCCTR XO19(528)
316 #define CRAND XO19(257)
317 #define CRANDC XO19(129)
318 #define CRNAND XO19(225)
319 #define CROR XO19(449)
321 #define EXTSB XO31(954)
322 #define EXTSH XO31(922)
323 #define ADD XO31(266)
324 #define ADDE XO31(138)
325 #define ADDC XO31( 10)
326 #define AND XO31( 28)
327 #define SUBF XO31( 40)
328 #define SUBFC XO31( 8)
329 #define SUBFE XO31(136)
330 #define OR XO31(444)
331 #define XOR XO31(316)
332 #define MULLW XO31(235)
333 #define MULHWU XO31( 11)
334 #define DIVW XO31(491)
335 #define DIVWU XO31(459)
336 #define CMP XO31( 0)
337 #define CMPL XO31( 32)
338 #define LHBRX XO31(790)
339 #define LWBRX XO31(534)
340 #define STHBRX XO31(918)
341 #define STWBRX XO31(662)
342 #define MFSPR XO31(339)
343 #define MTSPR XO31(467)
344 #define SRAWI XO31(824)
345 #define NEG XO31(104)
347 #define LBZX XO31( 87)
348 #define LHZX XO31(276)
349 #define LHAX XO31(343)
350 #define LWZX XO31( 23)
351 #define STBX XO31(215)
352 #define STHX XO31(407)
353 #define STWX XO31(151)
355 #define SPR(a,b) ((((a)<<5)|(b))<<11)
356 #define LR SPR(8, 0)
357 #define CTR SPR(9, 0)
359 #define SLW XO31( 24)
360 #define SRW XO31(536)
361 #define SRAW XO31(792)
363 #define LMW OPCD(46)
364 #define STMW OPCD(47)
366 #define TW XO31(4)
367 #define TRAP (TW | TO (31))
369 #define RT(r) ((r)<<21)
370 #define RS(r) ((r)<<21)
371 #define RA(r) ((r)<<16)
372 #define RB(r) ((r)<<11)
373 #define TO(t) ((t)<<21)
374 #define SH(s) ((s)<<11)
375 #define MB(b) ((b)<<6)
376 #define ME(e) ((e)<<1)
377 #define BO(o) ((o)<<21)
379 #define LK 1
381 #define TAB(t,a,b) (RT(t) | RA(a) | RB(b))
382 #define SAB(s,a,b) (RS(s) | RA(a) | RB(b))
384 #define BF(n) ((n)<<23)
385 #define BI(n, c) (((c)+((n)*4))<<16)
386 #define BT(n, c) (((c)+((n)*4))<<21)
387 #define BA(n, c) (((c)+((n)*4))<<16)
388 #define BB(n, c) (((c)+((n)*4))<<11)
390 #define BO_COND_TRUE BO (12)
391 #define BO_COND_FALSE BO (4)
392 #define BO_ALWAYS BO (20)
394 enum {
395 CR_LT,
396 CR_GT,
397 CR_EQ,
398 CR_SO
401 static const uint32_t tcg_to_bc[10] = {
402 [TCG_COND_EQ] = BC | BI (7, CR_EQ) | BO_COND_TRUE,
403 [TCG_COND_NE] = BC | BI (7, CR_EQ) | BO_COND_FALSE,
404 [TCG_COND_LT] = BC | BI (7, CR_LT) | BO_COND_TRUE,
405 [TCG_COND_GE] = BC | BI (7, CR_LT) | BO_COND_FALSE,
406 [TCG_COND_LE] = BC | BI (7, CR_GT) | BO_COND_FALSE,
407 [TCG_COND_GT] = BC | BI (7, CR_GT) | BO_COND_TRUE,
408 [TCG_COND_LTU] = BC | BI (7, CR_LT) | BO_COND_TRUE,
409 [TCG_COND_GEU] = BC | BI (7, CR_LT) | BO_COND_FALSE,
410 [TCG_COND_LEU] = BC | BI (7, CR_GT) | BO_COND_FALSE,
411 [TCG_COND_GTU] = BC | BI (7, CR_GT) | BO_COND_TRUE,
414 static void tcg_out_mov(TCGContext *s, int ret, int arg)
416 tcg_out32 (s, OR | SAB (arg, ret, arg));
419 static void tcg_out_movi(TCGContext *s, TCGType type,
420 int ret, tcg_target_long arg)
422 if (arg == (int16_t) arg)
423 tcg_out32 (s, ADDI | RT (ret) | RA (0) | (arg & 0xffff));
424 else {
425 tcg_out32 (s, ADDIS | RT (ret) | RA (0) | ((arg >> 16) & 0xffff));
426 if (arg & 0xffff)
427 tcg_out32 (s, ORI | RS (ret) | RA (ret) | (arg & 0xffff));
431 static void tcg_out_ldst (TCGContext *s, int ret, int addr,
432 int offset, int op1, int op2)
434 if (offset == (int16_t) offset)
435 tcg_out32 (s, op1 | RT (ret) | RA (addr) | (offset & 0xffff));
436 else {
437 tcg_out_movi (s, TCG_TYPE_I32, 0, offset);
438 tcg_out32 (s, op2 | RT (ret) | RA (addr) | RB (0));
442 static void tcg_out_b (TCGContext *s, int mask, tcg_target_long target)
444 tcg_target_long disp;
446 disp = target - (tcg_target_long) s->code_ptr;
447 if ((disp << 6) >> 6 == disp)
448 tcg_out32 (s, B | (disp & 0x3fffffc) | mask);
449 else {
450 tcg_out_movi (s, TCG_TYPE_I32, 0, (tcg_target_long) target);
451 tcg_out32 (s, MTSPR | RS (0) | CTR);
452 tcg_out32 (s, BCCTR | BO_ALWAYS | mask);
456 #if defined(CONFIG_SOFTMMU)
458 #include "../../softmmu_defs.h"
460 static void *qemu_ld_helpers[4] = {
461 __ldb_mmu,
462 __ldw_mmu,
463 __ldl_mmu,
464 __ldq_mmu,
467 static void *qemu_st_helpers[4] = {
468 __stb_mmu,
469 __stw_mmu,
470 __stl_mmu,
471 __stq_mmu,
473 #endif
475 static void tcg_out_qemu_ld (TCGContext *s, const TCGArg *args, int opc)
477 int addr_reg, data_reg, data_reg2, r0, mem_index, s_bits, bswap;
478 #ifdef CONFIG_SOFTMMU
479 int r1, r2;
480 void *label1_ptr, *label2_ptr;
481 #endif
482 #if TARGET_LONG_BITS == 64
483 int addr_reg2;
484 #endif
486 data_reg = *args++;
487 if (opc == 3)
488 data_reg2 = *args++;
489 else
490 data_reg2 = 0;
491 addr_reg = *args++;
492 #if TARGET_LONG_BITS == 64
493 addr_reg2 = *args++;
494 #endif
495 mem_index = *args;
496 s_bits = opc & 3;
498 #ifdef CONFIG_SOFTMMU
499 r0 = 3;
500 r1 = 4;
501 r2 = 0;
503 tcg_out32 (s, (RLWINM
504 | RA (r0)
505 | RS (addr_reg)
506 | SH (32 - (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS))
507 | MB (32 - (CPU_TLB_BITS + CPU_TLB_ENTRY_BITS))
508 | ME (31 - CPU_TLB_ENTRY_BITS)
511 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (TCG_AREG0));
512 tcg_out32 (s, (LWZU
513 | RT (r1)
514 | RA (r0)
515 | offsetof (CPUState, tlb_table[mem_index][0].addr_read)
518 tcg_out32 (s, (RLWINM
519 | RA (r2)
520 | RS (addr_reg)
521 | SH (0)
522 | MB ((32 - s_bits) & 31)
523 | ME (31 - TARGET_PAGE_BITS)
527 tcg_out32 (s, CMP | BF (7) | RA (r2) | RB (r1));
528 #if TARGET_LONG_BITS == 64
529 tcg_out32 (s, LWZ | RT (r1) | RA (r0) | 4);
530 tcg_out32 (s, CMP | BF (6) | RA (addr_reg2) | RB (r1));
531 tcg_out32 (s, CRAND | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, CR_EQ));
532 #endif
534 label1_ptr = s->code_ptr;
535 #ifdef FAST_PATH
536 tcg_out32 (s, BC | BI (7, CR_EQ) | BO_COND_TRUE);
537 #endif
539 /* slow path */
540 #if TARGET_LONG_BITS == 32
541 tcg_out_mov (s, 3, addr_reg);
542 tcg_out_movi (s, TCG_TYPE_I32, 4, mem_index);
543 #else
544 tcg_out_mov (s, 3, addr_reg2);
545 tcg_out_mov (s, 4, addr_reg);
546 tcg_out_movi (s, TCG_TYPE_I32, 5, mem_index);
547 #endif
549 tcg_out_b (s, LK, (tcg_target_long) qemu_ld_helpers[s_bits]);
550 switch (opc) {
551 case 0|4:
552 tcg_out32 (s, EXTSB | RA (data_reg) | RS (3));
553 break;
554 case 1|4:
555 tcg_out32 (s, EXTSH | RA (data_reg) | RS (3));
556 break;
557 case 0:
558 case 1:
559 case 2:
560 if (data_reg != 3)
561 tcg_out_mov (s, data_reg, 3);
562 break;
563 case 3:
564 if (data_reg == 3) {
565 if (data_reg2 == 4) {
566 tcg_out_mov (s, 0, 4);
567 tcg_out_mov (s, 4, 3);
568 tcg_out_mov (s, 3, 0);
570 else {
571 tcg_out_mov (s, data_reg2, 3);
572 tcg_out_mov (s, 3, 4);
575 else {
576 if (data_reg != 4) tcg_out_mov (s, data_reg, 4);
577 if (data_reg2 != 3) tcg_out_mov (s, data_reg2, 3);
579 break;
581 label2_ptr = s->code_ptr;
582 tcg_out32 (s, B);
584 /* label1: fast path */
585 #ifdef FAST_PATH
586 reloc_pc14 (label1_ptr, (tcg_target_long) s->code_ptr);
587 #endif
589 /* r0 now contains &env->tlb_table[mem_index][index].addr_read */
590 tcg_out32 (s, (LWZ
591 | RT (r0)
592 | RA (r0)
593 | (ADDEND_OFFSET + offsetof (CPUTLBEntry, addend)
594 - offsetof (CPUTLBEntry, addr_read))
596 /* r0 = env->tlb_table[mem_index][index].addend */
597 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (addr_reg));
598 /* r0 = env->tlb_table[mem_index][index].addend + addr */
600 #else /* !CONFIG_SOFTMMU */
601 r0 = addr_reg;
602 #endif
604 #ifdef TARGET_WORDS_BIGENDIAN
605 bswap = 0;
606 #else
607 bswap = 1;
608 #endif
609 switch (opc) {
610 default:
611 case 0:
612 tcg_out32 (s, LBZ | RT (data_reg) | RA (r0));
613 break;
614 case 0|4:
615 tcg_out32 (s, LBZ | RT (data_reg) | RA (r0));
616 tcg_out32 (s, EXTSB | RA (data_reg) | RS (data_reg));
617 break;
618 case 1:
619 if (bswap) tcg_out32 (s, LHBRX | RT (data_reg) | RB (r0));
620 else tcg_out32 (s, LHZ | RT (data_reg) | RA (r0));
621 break;
622 case 1|4:
623 if (bswap) {
624 tcg_out32 (s, LHBRX | RT (data_reg) | RB (r0));
625 tcg_out32 (s, EXTSH | RA (data_reg) | RS (data_reg));
627 else tcg_out32 (s, LHA | RT (data_reg) | RA (r0));
628 break;
629 case 2:
630 if (bswap) tcg_out32 (s, LWBRX | RT (data_reg) | RB (r0));
631 else tcg_out32 (s, LWZ | RT (data_reg)| RA (r0));
632 break;
633 case 3:
634 if (bswap) {
635 if (r0 == data_reg) {
636 tcg_out32 (s, LWBRX | RT (0) | RB (r0));
637 tcg_out32 (s, ADDI | RT (r0) | RA (r0) | 4);
638 tcg_out32 (s, LWBRX | RT (data_reg2) | RB (r0));
639 tcg_out_mov (s, data_reg, 0);
641 else {
642 tcg_out32 (s, LWBRX | RT (data_reg) | RB (r0));
643 tcg_out32 (s, ADDI | RT (r0) | RA (r0) | 4);
644 tcg_out32 (s, LWBRX | RT (data_reg2) | RB (r0));
647 else {
648 if (r0 == data_reg2) {
649 tcg_out32 (s, LWZ | RT (0) | RA (r0));
650 tcg_out32 (s, LWZ | RT (data_reg) | RA (r0) | 4);
651 tcg_out_mov (s, data_reg2, 0);
653 else {
654 tcg_out32 (s, LWZ | RT (data_reg2) | RA (r0));
655 tcg_out32 (s, LWZ | RT (data_reg) | RA (r0) | 4);
658 break;
661 #ifdef CONFIG_SOFTMMU
662 reloc_pc24 (label2_ptr, (tcg_target_long) s->code_ptr);
663 #endif
666 static void tcg_out_qemu_st (TCGContext *s, const TCGArg *args, int opc)
668 int addr_reg, r0, r1, data_reg, data_reg2, mem_index, bswap;
669 #ifdef CONFIG_SOFTMMU
670 int r2, ir;
671 void *label1_ptr, *label2_ptr;
672 #endif
673 #if TARGET_LONG_BITS == 64
674 int addr_reg2;
675 #endif
677 data_reg = *args++;
678 if (opc == 3)
679 data_reg2 = *args++;
680 else
681 data_reg2 = 0;
682 addr_reg = *args++;
683 #if TARGET_LONG_BITS == 64
684 addr_reg2 = *args++;
685 #endif
686 mem_index = *args;
688 #ifdef CONFIG_SOFTMMU
689 r0 = 3;
690 r1 = 4;
691 r2 = 0;
693 tcg_out32 (s, (RLWINM
694 | RA (r0)
695 | RS (addr_reg)
696 | SH (32 - (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS))
697 | MB (32 - (CPU_TLB_ENTRY_BITS + CPU_TLB_BITS))
698 | ME (31 - CPU_TLB_ENTRY_BITS)
701 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (TCG_AREG0));
702 tcg_out32 (s, (LWZU
703 | RT (r1)
704 | RA (r0)
705 | offsetof (CPUState, tlb_table[mem_index][0].addr_write)
708 tcg_out32 (s, (RLWINM
709 | RA (r2)
710 | RS (addr_reg)
711 | SH (0)
712 | MB ((32 - opc) & 31)
713 | ME (31 - TARGET_PAGE_BITS)
717 tcg_out32 (s, CMP | (7 << 23) | RA (r2) | RB (r1));
718 #if TARGET_LONG_BITS == 64
719 tcg_out32 (s, LWZ | RT (r1) | RA (r0) | 4);
720 tcg_out32 (s, CMP | BF (6) | RA (addr_reg2) | RB (r1));
721 tcg_out32 (s, CRAND | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, CR_EQ));
722 #endif
724 label1_ptr = s->code_ptr;
725 #ifdef FAST_PATH
726 tcg_out32 (s, BC | BI (7, CR_EQ) | BO_COND_TRUE);
727 #endif
729 /* slow path */
730 #if TARGET_LONG_BITS == 32
731 tcg_out_mov (s, 3, addr_reg);
732 ir = 4;
733 #else
734 tcg_out_mov (s, 3, addr_reg2);
735 tcg_out_mov (s, 4, addr_reg);
736 #ifdef TCG_TARGET_CALL_ALIGN_ARGS
737 ir = 5;
738 #else
739 ir = 4;
740 #endif
741 #endif
743 switch (opc) {
744 case 0:
745 tcg_out32 (s, (RLWINM
746 | RA (ir)
747 | RS (data_reg)
748 | SH (0)
749 | MB (24)
750 | ME (31)));
751 break;
752 case 1:
753 tcg_out32 (s, (RLWINM
754 | RA (ir)
755 | RS (data_reg)
756 | SH (0)
757 | MB (16)
758 | ME (31)));
759 break;
760 case 2:
761 tcg_out_mov (s, ir, data_reg);
762 break;
763 case 3:
764 #ifdef TCG_TARGET_CALL_ALIGN_ARGS
765 ir = 5;
766 #endif
767 tcg_out_mov (s, ir++, data_reg2);
768 tcg_out_mov (s, ir, data_reg);
769 break;
771 ir++;
773 tcg_out_movi (s, TCG_TYPE_I32, ir, mem_index);
774 tcg_out_b (s, LK, (tcg_target_long) qemu_st_helpers[opc]);
775 label2_ptr = s->code_ptr;
776 tcg_out32 (s, B);
778 /* label1: fast path */
779 #ifdef FAST_PATH
780 reloc_pc14 (label1_ptr, (tcg_target_long) s->code_ptr);
781 #endif
783 tcg_out32 (s, (LWZ
784 | RT (r0)
785 | RA (r0)
786 | (ADDEND_OFFSET + offsetof (CPUTLBEntry, addend)
787 - offsetof (CPUTLBEntry, addr_write))
789 /* r0 = env->tlb_table[mem_index][index].addend */
790 tcg_out32 (s, ADD | RT (r0) | RA (r0) | RB (addr_reg));
791 /* r0 = env->tlb_table[mem_index][index].addend + addr */
793 #else /* !CONFIG_SOFTMMU */
794 r1 = 3;
795 r0 = addr_reg;
796 #endif
798 #ifdef TARGET_WORDS_BIGENDIAN
799 bswap = 0;
800 #else
801 bswap = 1;
802 #endif
803 switch (opc) {
804 case 0:
805 tcg_out32 (s, STB | RS (data_reg) | RA (r0));
806 break;
807 case 1:
808 if (bswap) tcg_out32 (s, STHBRX | RS (data_reg) | RA (0) | RB (r0));
809 else tcg_out32 (s, STH | RS (data_reg) | RA (r0));
810 break;
811 case 2:
812 if (bswap) tcg_out32 (s, STWBRX | RS (data_reg) | RA (0) | RB (r0));
813 else tcg_out32 (s, STW | RS (data_reg) | RA (r0));
814 break;
815 case 3:
816 if (bswap) {
817 tcg_out32 (s, ADDI | RT (r1) | RA (r0) | 4);
818 tcg_out32 (s, STWBRX | RS (data_reg) | RA (0) | RB (r0));
819 tcg_out32 (s, STWBRX | RS (data_reg2) | RA (0) | RB (r1));
821 else {
822 tcg_out32 (s, STW | RS (data_reg2) | RA (r0));
823 tcg_out32 (s, STW | RS (data_reg) | RA (r0) | 4);
825 break;
828 #ifdef CONFIG_SOFTMMU
829 reloc_pc24 (label2_ptr, (tcg_target_long) s->code_ptr);
830 #endif
833 void tcg_target_qemu_prologue (TCGContext *s)
835 int i, frame_size;
837 frame_size = 0
838 + LINKAGE_AREA_SIZE
839 + TCG_STATIC_CALL_ARGS_SIZE
840 + ARRAY_SIZE (tcg_target_callee_save_regs) * 4
842 frame_size = (frame_size + 15) & ~15;
844 tcg_out32 (s, MFSPR | RT (0) | LR);
845 tcg_out32 (s, STWU | RS (1) | RA (1) | (-frame_size & 0xffff));
846 for (i = 0; i < ARRAY_SIZE (tcg_target_callee_save_regs); ++i)
847 tcg_out32 (s, (STW
848 | RS (tcg_target_callee_save_regs[i])
849 | RA (1)
850 | (i * 4 + LINKAGE_AREA_SIZE + TCG_STATIC_CALL_ARGS_SIZE)
853 tcg_out32 (s, STW | RS (0) | RA (1) | (frame_size + BACK_CHAIN_OFFSET));
855 tcg_out32 (s, MTSPR | RS (3) | CTR);
856 tcg_out32 (s, BCCTR | BO_ALWAYS);
857 tb_ret_addr = s->code_ptr;
859 for (i = 0; i < ARRAY_SIZE (tcg_target_callee_save_regs); ++i)
860 tcg_out32 (s, (LWZ
861 | RT (tcg_target_callee_save_regs[i])
862 | RA (1)
863 | (i * 4 + LINKAGE_AREA_SIZE + TCG_STATIC_CALL_ARGS_SIZE)
866 tcg_out32 (s, LWZ | RT (0) | RA (1) | (frame_size + BACK_CHAIN_OFFSET));
867 tcg_out32 (s, MTSPR | RS (0) | LR);
868 tcg_out32 (s, ADDI | RT (1) | RA (1) | frame_size);
869 tcg_out32 (s, BCLR | BO_ALWAYS);
872 static void tcg_out_ld (TCGContext *s, TCGType type, int ret, int arg1,
873 tcg_target_long arg2)
875 tcg_out_ldst (s, ret, arg1, arg2, LWZ, LWZX);
878 static void tcg_out_st (TCGContext *s, TCGType type, int arg, int arg1,
879 tcg_target_long arg2)
881 tcg_out_ldst (s, arg, arg1, arg2, STW, STWX);
884 static void ppc_addi (TCGContext *s, int rt, int ra, tcg_target_long si)
886 if (!si && rt == ra)
887 return;
889 if (si == (int16_t) si)
890 tcg_out32 (s, ADDI | RT (rt) | RA (ra) | (si & 0xffff));
891 else {
892 uint16_t h = ((si >> 16) & 0xffff) + ((uint16_t) si >> 15);
893 tcg_out32 (s, ADDIS | RT (rt) | RA (ra) | h);
894 tcg_out32 (s, ADDI | RT (rt) | RA (rt) | (si & 0xffff));
898 static void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
900 ppc_addi (s, reg, reg, val);
903 static void tcg_out_cmp (TCGContext *s, int cond, TCGArg arg1, TCGArg arg2,
904 int const_arg2, int cr)
906 int imm;
907 uint32_t op;
909 switch (cond) {
910 case TCG_COND_EQ:
911 case TCG_COND_NE:
912 if (const_arg2) {
913 if ((int16_t) arg2 == arg2) {
914 op = CMPI;
915 imm = 1;
916 break;
918 else if ((uint16_t) arg2 == arg2) {
919 op = CMPLI;
920 imm = 1;
921 break;
924 op = CMPL;
925 imm = 0;
926 break;
928 case TCG_COND_LT:
929 case TCG_COND_GE:
930 case TCG_COND_LE:
931 case TCG_COND_GT:
932 if (const_arg2) {
933 if ((int16_t) arg2 == arg2) {
934 op = CMPI;
935 imm = 1;
936 break;
939 op = CMP;
940 imm = 0;
941 break;
943 case TCG_COND_LTU:
944 case TCG_COND_GEU:
945 case TCG_COND_LEU:
946 case TCG_COND_GTU:
947 if (const_arg2) {
948 if ((uint16_t) arg2 == arg2) {
949 op = CMPLI;
950 imm = 1;
951 break;
954 op = CMPL;
955 imm = 0;
956 break;
958 default:
959 tcg_abort ();
961 op |= BF (cr);
963 if (imm)
964 tcg_out32 (s, op | RA (arg1) | (arg2 & 0xffff));
965 else {
966 if (const_arg2) {
967 tcg_out_movi (s, TCG_TYPE_I32, 0, arg2);
968 tcg_out32 (s, op | RA (arg1) | RB (0));
970 else
971 tcg_out32 (s, op | RA (arg1) | RB (arg2));
976 static void tcg_out_bc (TCGContext *s, int bc, int label_index)
978 TCGLabel *l = &s->labels[label_index];
980 if (l->has_value)
981 tcg_out32 (s, bc | reloc_pc14_val (s->code_ptr, l->u.value));
982 else {
983 uint16_t val = *(uint16_t *) &s->code_ptr[2];
985 /* Thanks to Andrzej Zaborowski */
986 tcg_out32 (s, bc | (val & 0xfffc));
987 tcg_out_reloc (s, s->code_ptr - 4, R_PPC_REL14, label_index, 0);
991 static void tcg_out_brcond (TCGContext *s, int cond,
992 TCGArg arg1, TCGArg arg2, int const_arg2,
993 int label_index)
995 tcg_out_cmp (s, cond, arg1, arg2, const_arg2, 7);
996 tcg_out_bc (s, tcg_to_bc[cond], label_index);
999 /* XXX: we implement it at the target level to avoid having to
1000 handle cross basic blocks temporaries */
1001 static void tcg_out_brcond2 (TCGContext *s, const TCGArg *args,
1002 const int *const_args)
1004 int cond = args[4], label_index = args[5], op;
1005 struct { int bit1; int bit2; int cond2; } bits[] = {
1006 [TCG_COND_LT ] = { CR_LT, CR_LT, TCG_COND_LT },
1007 [TCG_COND_LE ] = { CR_LT, CR_GT, TCG_COND_LT },
1008 [TCG_COND_GT ] = { CR_GT, CR_GT, TCG_COND_GT },
1009 [TCG_COND_GE ] = { CR_GT, CR_LT, TCG_COND_GT },
1010 [TCG_COND_LTU] = { CR_LT, CR_LT, TCG_COND_LTU },
1011 [TCG_COND_LEU] = { CR_LT, CR_GT, TCG_COND_LTU },
1012 [TCG_COND_GTU] = { CR_GT, CR_GT, TCG_COND_GTU },
1013 [TCG_COND_GEU] = { CR_GT, CR_LT, TCG_COND_GTU },
1014 }, *b = &bits[cond];
1016 switch (cond) {
1017 case TCG_COND_EQ:
1018 case TCG_COND_NE:
1019 op = (cond == TCG_COND_EQ) ? CRAND : CRNAND;
1020 tcg_out_cmp (s, cond, args[0], args[2], const_args[2], 6);
1021 tcg_out_cmp (s, cond, args[1], args[3], const_args[3], 7);
1022 tcg_out32 (s, op | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, CR_EQ));
1023 break;
1024 case TCG_COND_LT:
1025 case TCG_COND_LE:
1026 case TCG_COND_GT:
1027 case TCG_COND_GE:
1028 case TCG_COND_LTU:
1029 case TCG_COND_LEU:
1030 case TCG_COND_GTU:
1031 case TCG_COND_GEU:
1032 op = (b->bit1 != b->bit2) ? CRANDC : CRAND;
1033 tcg_out_cmp (s, b->cond2, args[1], args[3], const_args[3], 5);
1034 tcg_out_cmp (s, TCG_COND_EQ, args[1], args[3], const_args[3], 6);
1035 tcg_out_cmp (s, cond, args[0], args[2], const_args[2], 7);
1036 tcg_out32 (s, op | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, b->bit2));
1037 tcg_out32 (s, CROR | BT (7, CR_EQ) | BA (5, b->bit1) | BB (7, CR_EQ));
1038 break;
1039 default:
1040 tcg_abort();
1043 tcg_out_bc (s, (BC | BI (7, CR_EQ) | BO_COND_TRUE), label_index);
1046 void ppc_tb_set_jmp_target (unsigned long jmp_addr, unsigned long addr)
1048 uint32_t *ptr;
1049 long disp = addr - jmp_addr;
1050 unsigned long patch_size;
1052 ptr = (uint32_t *)jmp_addr;
1054 if ((disp << 6) >> 6 != disp) {
1055 ptr[0] = 0x3c000000 | (addr >> 16); /* lis 0,addr@ha */
1056 ptr[1] = 0x60000000 | (addr & 0xffff); /* la 0,addr@l(0) */
1057 ptr[2] = 0x7c0903a6; /* mtctr 0 */
1058 ptr[3] = 0x4e800420; /* brctr */
1059 patch_size = 16;
1060 } else {
1061 /* patch the branch destination */
1062 if (disp != 16) {
1063 *ptr = 0x48000000 | (disp & 0x03fffffc); /* b disp */
1064 patch_size = 4;
1065 } else {
1066 ptr[0] = 0x60000000; /* nop */
1067 ptr[1] = 0x60000000;
1068 ptr[2] = 0x60000000;
1069 ptr[3] = 0x60000000;
1070 patch_size = 16;
1073 /* flush icache */
1074 flush_icache_range(jmp_addr, jmp_addr + patch_size);
1077 static void tcg_out_op(TCGContext *s, int opc, const TCGArg *args,
1078 const int *const_args)
1080 switch (opc) {
1081 case INDEX_op_exit_tb:
1082 tcg_out_movi (s, TCG_TYPE_I32, TCG_REG_R3, args[0]);
1083 tcg_out_b (s, 0, (tcg_target_long) tb_ret_addr);
1084 break;
1085 case INDEX_op_goto_tb:
1086 if (s->tb_jmp_offset) {
1087 /* direct jump method */
1089 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
1090 s->code_ptr += 16;
1092 else {
1093 tcg_abort ();
1095 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1096 break;
1097 case INDEX_op_br:
1099 TCGLabel *l = &s->labels[args[0]];
1101 if (l->has_value) {
1102 tcg_out_b (s, 0, l->u.value);
1104 else {
1105 uint32_t val = *(uint32_t *) s->code_ptr;
1107 /* Thanks to Andrzej Zaborowski */
1108 tcg_out32 (s, B | (val & 0x3fffffc));
1109 tcg_out_reloc (s, s->code_ptr - 4, R_PPC_REL24, args[0], 0);
1112 break;
1113 case INDEX_op_call:
1114 if (const_args[0]) {
1115 tcg_out_b (s, LK, args[0]);
1117 else {
1118 tcg_out32 (s, MTSPR | RS (args[0]) | LR);
1119 tcg_out32 (s, BCLR | BO_ALWAYS | LK);
1121 break;
1122 case INDEX_op_jmp:
1123 if (const_args[0]) {
1124 tcg_out_b (s, 0, args[0]);
1126 else {
1127 tcg_out32 (s, MTSPR | RS (args[0]) | CTR);
1128 tcg_out32 (s, BCCTR | BO_ALWAYS);
1130 break;
1131 case INDEX_op_movi_i32:
1132 tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
1133 break;
1134 case INDEX_op_ld8u_i32:
1135 tcg_out_ldst (s, args[0], args[1], args[2], LBZ, LBZX);
1136 break;
1137 case INDEX_op_ld8s_i32:
1138 tcg_out_ldst (s, args[0], args[1], args[2], LBZ, LBZX);
1139 tcg_out32 (s, EXTSB | RS (args[0]) | RA (args[0]));
1140 break;
1141 case INDEX_op_ld16u_i32:
1142 tcg_out_ldst (s, args[0], args[1], args[2], LHZ, LHZX);
1143 break;
1144 case INDEX_op_ld16s_i32:
1145 tcg_out_ldst (s, args[0], args[1], args[2], LHA, LHAX);
1146 break;
1147 case INDEX_op_ld_i32:
1148 tcg_out_ldst (s, args[0], args[1], args[2], LWZ, LWZX);
1149 break;
1150 case INDEX_op_st8_i32:
1151 tcg_out_ldst (s, args[0], args[1], args[2], STB, STBX);
1152 break;
1153 case INDEX_op_st16_i32:
1154 tcg_out_ldst (s, args[0], args[1], args[2], STH, STHX);
1155 break;
1156 case INDEX_op_st_i32:
1157 tcg_out_ldst (s, args[0], args[1], args[2], STW, STWX);
1158 break;
1160 case INDEX_op_add_i32:
1161 if (const_args[2])
1162 ppc_addi (s, args[0], args[1], args[2]);
1163 else
1164 tcg_out32 (s, ADD | TAB (args[0], args[1], args[2]));
1165 break;
1166 case INDEX_op_sub_i32:
1167 if (const_args[2])
1168 ppc_addi (s, args[0], args[1], -args[2]);
1169 else
1170 tcg_out32 (s, SUBF | TAB (args[0], args[2], args[1]));
1171 break;
1173 case INDEX_op_and_i32:
1174 if (const_args[2]) {
1175 if ((args[2] & 0xffff) == args[2])
1176 tcg_out32 (s, ANDI | RS (args[1]) | RA (args[0]) | args[2]);
1177 else if ((args[2] & 0xffff0000) == args[2])
1178 tcg_out32 (s, ANDIS | RS (args[1]) | RA (args[0])
1179 | ((args[2] >> 16) & 0xffff));
1180 else {
1181 tcg_out_movi (s, TCG_TYPE_I32, 0, args[2]);
1182 tcg_out32 (s, AND | SAB (args[1], args[0], 0));
1185 else
1186 tcg_out32 (s, AND | SAB (args[1], args[0], args[2]));
1187 break;
1188 case INDEX_op_or_i32:
1189 if (const_args[2]) {
1190 if (args[2] & 0xffff) {
1191 tcg_out32 (s, ORI | RS (args[1]) | RA (args[0])
1192 | (args[2] & 0xffff));
1193 if (args[2] >> 16)
1194 tcg_out32 (s, ORIS | RS (args[0]) | RA (args[0])
1195 | ((args[2] >> 16) & 0xffff));
1197 else {
1198 tcg_out32 (s, ORIS | RS (args[1]) | RA (args[0])
1199 | ((args[2] >> 16) & 0xffff));
1202 else
1203 tcg_out32 (s, OR | SAB (args[1], args[0], args[2]));
1204 break;
1205 case INDEX_op_xor_i32:
1206 if (const_args[2]) {
1207 if ((args[2] & 0xffff) == args[2])
1208 tcg_out32 (s, XORI | RS (args[1]) | RA (args[0])
1209 | (args[2] & 0xffff));
1210 else if ((args[2] & 0xffff0000) == args[2])
1211 tcg_out32 (s, XORIS | RS (args[1]) | RA (args[0])
1212 | ((args[2] >> 16) & 0xffff));
1213 else {
1214 tcg_out_movi (s, TCG_TYPE_I32, 0, args[2]);
1215 tcg_out32 (s, XOR | SAB (args[1], args[0], 0));
1218 else
1219 tcg_out32 (s, XOR | SAB (args[1], args[0], args[2]));
1220 break;
1222 case INDEX_op_mul_i32:
1223 if (const_args[2]) {
1224 if (args[2] == (int16_t) args[2])
1225 tcg_out32 (s, MULLI | RT (args[0]) | RA (args[1])
1226 | (args[2] & 0xffff));
1227 else {
1228 tcg_out_movi (s, TCG_TYPE_I32, 0, args[2]);
1229 tcg_out32 (s, MULLW | TAB (args[0], args[1], 0));
1232 else
1233 tcg_out32 (s, MULLW | TAB (args[0], args[1], args[2]));
1234 break;
1236 case INDEX_op_div_i32:
1237 tcg_out32 (s, DIVW | TAB (args[0], args[1], args[2]));
1238 break;
1240 case INDEX_op_divu_i32:
1241 tcg_out32 (s, DIVWU | TAB (args[0], args[1], args[2]));
1242 break;
1244 case INDEX_op_rem_i32:
1245 tcg_out32 (s, DIVW | TAB (0, args[1], args[2]));
1246 tcg_out32 (s, MULLW | TAB (0, 0, args[2]));
1247 tcg_out32 (s, SUBF | TAB (args[0], 0, args[1]));
1248 break;
1250 case INDEX_op_remu_i32:
1251 tcg_out32 (s, DIVWU | TAB (0, args[1], args[2]));
1252 tcg_out32 (s, MULLW | TAB (0, 0, args[2]));
1253 tcg_out32 (s, SUBF | TAB (args[0], 0, args[1]));
1254 break;
1256 case INDEX_op_mulu2_i32:
1257 if (args[0] == args[2] || args[0] == args[3]) {
1258 tcg_out32 (s, MULLW | TAB (0, args[2], args[3]));
1259 tcg_out32 (s, MULHWU | TAB (args[1], args[2], args[3]));
1260 tcg_out_mov (s, args[0], 0);
1262 else {
1263 tcg_out32 (s, MULLW | TAB (args[0], args[2], args[3]));
1264 tcg_out32 (s, MULHWU | TAB (args[1], args[2], args[3]));
1266 break;
1268 case INDEX_op_shl_i32:
1269 if (const_args[2]) {
1270 tcg_out32 (s, (RLWINM
1271 | RA (args[0])
1272 | RS (args[1])
1273 | SH (args[2])
1274 | MB (0)
1275 | ME (31 - args[2])
1279 else
1280 tcg_out32 (s, SLW | SAB (args[1], args[0], args[2]));
1281 break;
1282 case INDEX_op_shr_i32:
1283 if (const_args[2]) {
1284 tcg_out32 (s, (RLWINM
1285 | RA (args[0])
1286 | RS (args[1])
1287 | SH (32 - args[2])
1288 | MB (args[2])
1289 | ME (31)
1293 else
1294 tcg_out32 (s, SRW | SAB (args[1], args[0], args[2]));
1295 break;
1296 case INDEX_op_sar_i32:
1297 if (const_args[2])
1298 tcg_out32 (s, SRAWI | RS (args[1]) | RA (args[0]) | SH (args[2]));
1299 else
1300 tcg_out32 (s, SRAW | SAB (args[1], args[0], args[2]));
1301 break;
1303 case INDEX_op_add2_i32:
1304 if (args[0] == args[3] || args[0] == args[5]) {
1305 tcg_out32 (s, ADDC | TAB (0, args[2], args[4]));
1306 tcg_out32 (s, ADDE | TAB (args[1], args[3], args[5]));
1307 tcg_out_mov (s, args[0], 0);
1309 else {
1310 tcg_out32 (s, ADDC | TAB (args[0], args[2], args[4]));
1311 tcg_out32 (s, ADDE | TAB (args[1], args[3], args[5]));
1313 break;
1314 case INDEX_op_sub2_i32:
1315 if (args[0] == args[3] || args[0] == args[5]) {
1316 tcg_out32 (s, SUBFC | TAB (0, args[4], args[2]));
1317 tcg_out32 (s, SUBFE | TAB (args[1], args[5], args[3]));
1318 tcg_out_mov (s, args[0], 0);
1320 else {
1321 tcg_out32 (s, SUBFC | TAB (args[0], args[4], args[2]));
1322 tcg_out32 (s, SUBFE | TAB (args[1], args[5], args[3]));
1324 break;
1326 case INDEX_op_brcond_i32:
1328 args[0] = r0
1329 args[1] = r1
1330 args[2] = cond
1331 args[3] = r1 is const
1332 args[4] = label_index
1334 tcg_out_brcond (s, args[2], args[0], args[1], const_args[1], args[3]);
1335 break;
1336 case INDEX_op_brcond2_i32:
1337 tcg_out_brcond2(s, args, const_args);
1338 break;
1340 case INDEX_op_neg_i32:
1341 tcg_out32 (s, NEG | RT (args[0]) | RA (args[1]));
1342 break;
1344 case INDEX_op_qemu_ld8u:
1345 tcg_out_qemu_ld(s, args, 0);
1346 break;
1347 case INDEX_op_qemu_ld8s:
1348 tcg_out_qemu_ld(s, args, 0 | 4);
1349 break;
1350 case INDEX_op_qemu_ld16u:
1351 tcg_out_qemu_ld(s, args, 1);
1352 break;
1353 case INDEX_op_qemu_ld16s:
1354 tcg_out_qemu_ld(s, args, 1 | 4);
1355 break;
1356 case INDEX_op_qemu_ld32u:
1357 tcg_out_qemu_ld(s, args, 2);
1358 break;
1359 case INDEX_op_qemu_ld64:
1360 tcg_out_qemu_ld(s, args, 3);
1361 break;
1362 case INDEX_op_qemu_st8:
1363 tcg_out_qemu_st(s, args, 0);
1364 break;
1365 case INDEX_op_qemu_st16:
1366 tcg_out_qemu_st(s, args, 1);
1367 break;
1368 case INDEX_op_qemu_st32:
1369 tcg_out_qemu_st(s, args, 2);
1370 break;
1371 case INDEX_op_qemu_st64:
1372 tcg_out_qemu_st(s, args, 3);
1373 break;
1375 case INDEX_op_ext8s_i32:
1376 tcg_out32 (s, EXTSB | RS (args[1]) | RA (args[0]));
1377 break;
1378 case INDEX_op_ext16s_i32:
1379 tcg_out32 (s, EXTSH | RS (args[1]) | RA (args[0]));
1380 break;
1382 default:
1383 tcg_dump_ops (s, stderr);
1384 tcg_abort ();
1388 static const TCGTargetOpDef ppc_op_defs[] = {
1389 { INDEX_op_exit_tb, { } },
1390 { INDEX_op_goto_tb, { } },
1391 { INDEX_op_call, { "ri" } },
1392 { INDEX_op_jmp, { "ri" } },
1393 { INDEX_op_br, { } },
1395 { INDEX_op_mov_i32, { "r", "r" } },
1396 { INDEX_op_movi_i32, { "r" } },
1397 { INDEX_op_ld8u_i32, { "r", "r" } },
1398 { INDEX_op_ld8s_i32, { "r", "r" } },
1399 { INDEX_op_ld16u_i32, { "r", "r" } },
1400 { INDEX_op_ld16s_i32, { "r", "r" } },
1401 { INDEX_op_ld_i32, { "r", "r" } },
1402 { INDEX_op_st8_i32, { "r", "r" } },
1403 { INDEX_op_st16_i32, { "r", "r" } },
1404 { INDEX_op_st_i32, { "r", "r" } },
1406 { INDEX_op_add_i32, { "r", "r", "ri" } },
1407 { INDEX_op_mul_i32, { "r", "r", "ri" } },
1408 { INDEX_op_div_i32, { "r", "r", "r" } },
1409 { INDEX_op_divu_i32, { "r", "r", "r" } },
1410 { INDEX_op_rem_i32, { "r", "r", "r" } },
1411 { INDEX_op_remu_i32, { "r", "r", "r" } },
1412 { INDEX_op_mulu2_i32, { "r", "r", "r", "r" } },
1413 { INDEX_op_sub_i32, { "r", "r", "ri" } },
1414 { INDEX_op_and_i32, { "r", "r", "ri" } },
1415 { INDEX_op_or_i32, { "r", "r", "ri" } },
1416 { INDEX_op_xor_i32, { "r", "r", "ri" } },
1418 { INDEX_op_shl_i32, { "r", "r", "ri" } },
1419 { INDEX_op_shr_i32, { "r", "r", "ri" } },
1420 { INDEX_op_sar_i32, { "r", "r", "ri" } },
1422 { INDEX_op_brcond_i32, { "r", "ri" } },
1424 { INDEX_op_add2_i32, { "r", "r", "r", "r", "r", "r" } },
1425 { INDEX_op_sub2_i32, { "r", "r", "r", "r", "r", "r" } },
1426 { INDEX_op_brcond2_i32, { "r", "r", "r", "r" } },
1428 { INDEX_op_neg_i32, { "r", "r" } },
1430 #if TARGET_LONG_BITS == 32
1431 { INDEX_op_qemu_ld8u, { "r", "L" } },
1432 { INDEX_op_qemu_ld8s, { "r", "L" } },
1433 { INDEX_op_qemu_ld16u, { "r", "L" } },
1434 { INDEX_op_qemu_ld16s, { "r", "L" } },
1435 { INDEX_op_qemu_ld32u, { "r", "L" } },
1436 { INDEX_op_qemu_ld32s, { "r", "L" } },
1437 { INDEX_op_qemu_ld64, { "r", "r", "L" } },
1439 { INDEX_op_qemu_st8, { "K", "K" } },
1440 { INDEX_op_qemu_st16, { "K", "K" } },
1441 { INDEX_op_qemu_st32, { "K", "K" } },
1442 { INDEX_op_qemu_st64, { "M", "M", "M" } },
1443 #else
1444 { INDEX_op_qemu_ld8u, { "r", "L", "L" } },
1445 { INDEX_op_qemu_ld8s, { "r", "L", "L" } },
1446 { INDEX_op_qemu_ld16u, { "r", "L", "L" } },
1447 { INDEX_op_qemu_ld16s, { "r", "L", "L" } },
1448 { INDEX_op_qemu_ld32u, { "r", "L", "L" } },
1449 { INDEX_op_qemu_ld32s, { "r", "L", "L" } },
1450 { INDEX_op_qemu_ld64, { "r", "L", "L", "L" } },
1452 { INDEX_op_qemu_st8, { "K", "K", "K" } },
1453 { INDEX_op_qemu_st16, { "K", "K", "K" } },
1454 { INDEX_op_qemu_st32, { "K", "K", "K" } },
1455 { INDEX_op_qemu_st64, { "M", "M", "M", "M" } },
1456 #endif
1458 { INDEX_op_ext8s_i32, { "r", "r" } },
1459 { INDEX_op_ext16s_i32, { "r", "r" } },
1461 { -1 },
1464 void tcg_target_init(TCGContext *s)
1466 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffffffff);
1467 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1468 (1 << TCG_REG_R0) |
1469 #ifdef __APPLE__
1470 (1 << TCG_REG_R2) |
1471 #endif
1472 (1 << TCG_REG_R3) |
1473 (1 << TCG_REG_R4) |
1474 (1 << TCG_REG_R5) |
1475 (1 << TCG_REG_R6) |
1476 (1 << TCG_REG_R7) |
1477 (1 << TCG_REG_R8) |
1478 (1 << TCG_REG_R9) |
1479 (1 << TCG_REG_R10) |
1480 (1 << TCG_REG_R11) |
1481 (1 << TCG_REG_R12)
1484 tcg_regset_clear(s->reserved_regs);
1485 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R0);
1486 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R1);
1487 #ifndef __APPLE__
1488 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R2);
1489 #endif
1491 tcg_add_target_add_op_defs(ppc_op_defs);