s390x/kvm: Add missing SIGP CPU RESET order
[qemu/kevin.git] / tcg / ppc / tcg-target.c
blobdc2c2df890c23042051c4109a4b94e3a1bf1f6f5
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
25 #include "tcg-be-ldst.h"
27 static uint8_t *tb_ret_addr;
29 #if defined _CALL_DARWIN || defined __APPLE__
30 #define TCG_TARGET_CALL_DARWIN
31 #endif
33 #ifdef TCG_TARGET_CALL_DARWIN
34 #define LINKAGE_AREA_SIZE 24
35 #define LR_OFFSET 8
36 #elif defined _CALL_AIX
37 #define LINKAGE_AREA_SIZE 52
38 #define LR_OFFSET 8
39 #else
40 #define LINKAGE_AREA_SIZE 8
41 #define LR_OFFSET 4
42 #endif
44 #ifndef GUEST_BASE
45 #define GUEST_BASE 0
46 #endif
48 #ifdef CONFIG_USE_GUEST_BASE
49 #define TCG_GUEST_BASE_REG 30
50 #else
51 #define TCG_GUEST_BASE_REG 0
52 #endif
54 #ifndef NDEBUG
55 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
56 "r0",
57 "r1",
58 "r2",
59 "r3",
60 "r4",
61 "r5",
62 "r6",
63 "r7",
64 "r8",
65 "r9",
66 "r10",
67 "r11",
68 "r12",
69 "r13",
70 "r14",
71 "r15",
72 "r16",
73 "r17",
74 "r18",
75 "r19",
76 "r20",
77 "r21",
78 "r22",
79 "r23",
80 "r24",
81 "r25",
82 "r26",
83 "r27",
84 "r28",
85 "r29",
86 "r30",
87 "r31"
89 #endif
91 static const int tcg_target_reg_alloc_order[] = {
92 TCG_REG_R14,
93 TCG_REG_R15,
94 TCG_REG_R16,
95 TCG_REG_R17,
96 TCG_REG_R18,
97 TCG_REG_R19,
98 TCG_REG_R20,
99 TCG_REG_R21,
100 TCG_REG_R22,
101 TCG_REG_R23,
102 TCG_REG_R28,
103 TCG_REG_R29,
104 TCG_REG_R30,
105 TCG_REG_R31,
106 #ifdef TCG_TARGET_CALL_DARWIN
107 TCG_REG_R2,
108 #endif
109 TCG_REG_R3,
110 TCG_REG_R4,
111 TCG_REG_R5,
112 TCG_REG_R6,
113 TCG_REG_R7,
114 TCG_REG_R8,
115 TCG_REG_R9,
116 TCG_REG_R10,
117 #ifndef TCG_TARGET_CALL_DARWIN
118 TCG_REG_R11,
119 #endif
120 TCG_REG_R12,
121 #ifndef _CALL_SYSV
122 TCG_REG_R13,
123 #endif
124 TCG_REG_R24,
125 TCG_REG_R25,
126 TCG_REG_R26,
127 TCG_REG_R27
130 static const int tcg_target_call_iarg_regs[] = {
131 TCG_REG_R3,
132 TCG_REG_R4,
133 TCG_REG_R5,
134 TCG_REG_R6,
135 TCG_REG_R7,
136 TCG_REG_R8,
137 TCG_REG_R9,
138 TCG_REG_R10
141 static const int tcg_target_call_oarg_regs[2] = {
142 TCG_REG_R3,
143 TCG_REG_R4
146 static const int tcg_target_callee_save_regs[] = {
147 #ifdef TCG_TARGET_CALL_DARWIN
148 TCG_REG_R11,
149 TCG_REG_R13,
150 #endif
151 #ifdef _CALL_AIX
152 TCG_REG_R13,
153 #endif
154 TCG_REG_R14,
155 TCG_REG_R15,
156 TCG_REG_R16,
157 TCG_REG_R17,
158 TCG_REG_R18,
159 TCG_REG_R19,
160 TCG_REG_R20,
161 TCG_REG_R21,
162 TCG_REG_R22,
163 TCG_REG_R23,
164 TCG_REG_R24,
165 TCG_REG_R25,
166 TCG_REG_R26,
167 TCG_REG_R27, /* currently used for the global env */
168 TCG_REG_R28,
169 TCG_REG_R29,
170 TCG_REG_R30,
171 TCG_REG_R31
174 static uint32_t reloc_pc24_val (void *pc, tcg_target_long target)
176 tcg_target_long disp;
178 disp = target - (tcg_target_long) pc;
179 if ((disp << 6) >> 6 != disp)
180 tcg_abort ();
182 return disp & 0x3fffffc;
185 static void reloc_pc24 (void *pc, tcg_target_long target)
187 *(uint32_t *) pc = (*(uint32_t *) pc & ~0x3fffffc)
188 | reloc_pc24_val (pc, target);
191 static uint16_t reloc_pc14_val (void *pc, tcg_target_long target)
193 tcg_target_long disp;
195 disp = target - (tcg_target_long) pc;
196 if (disp != (int16_t) disp)
197 tcg_abort ();
199 return disp & 0xfffc;
202 static void reloc_pc14 (void *pc, tcg_target_long target)
204 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xfffc)
205 | reloc_pc14_val (pc, target);
208 static void patch_reloc(uint8_t *code_ptr, int type,
209 intptr_t value, intptr_t addend)
211 value += addend;
212 switch (type) {
213 case R_PPC_REL14:
214 reloc_pc14 (code_ptr, value);
215 break;
216 case R_PPC_REL24:
217 reloc_pc24 (code_ptr, value);
218 break;
219 default:
220 tcg_abort();
224 /* parse target specific constraints */
225 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
227 const char *ct_str;
229 ct_str = *pct_str;
230 switch (ct_str[0]) {
231 case 'A': case 'B': case 'C': case 'D':
232 ct->ct |= TCG_CT_REG;
233 tcg_regset_set_reg(ct->u.regs, 3 + ct_str[0] - 'A');
234 break;
235 case 'r':
236 ct->ct |= TCG_CT_REG;
237 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
238 break;
239 #ifdef CONFIG_SOFTMMU
240 case 'L': /* qemu_ld constraint */
241 ct->ct |= TCG_CT_REG;
242 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
243 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
244 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4);
245 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R5);
246 #if TARGET_LONG_BITS == 64
247 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R6);
248 #ifdef TCG_TARGET_CALL_ALIGN_ARGS
249 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R7);
250 #endif
251 #endif
252 break;
253 case 'K': /* qemu_st[8..32] constraint */
254 ct->ct |= TCG_CT_REG;
255 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
256 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
257 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4);
258 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R5);
259 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R6);
260 #if TARGET_LONG_BITS == 64
261 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R7);
262 #ifdef TCG_TARGET_CALL_ALIGN_ARGS
263 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R8);
264 #endif
265 #endif
266 break;
267 case 'M': /* qemu_st64 constraint */
268 ct->ct |= TCG_CT_REG;
269 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
270 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
271 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4);
272 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R5);
273 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R6);
274 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R7);
275 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R8);
276 #ifdef TCG_TARGET_CALL_ALIGN_ARGS
277 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R9);
278 #endif
279 break;
280 #else
281 case 'L':
282 case 'K':
283 ct->ct |= TCG_CT_REG;
284 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
285 break;
286 case 'M':
287 ct->ct |= TCG_CT_REG;
288 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
289 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3);
290 break;
291 #endif
292 default:
293 return -1;
295 ct_str++;
296 *pct_str = ct_str;
297 return 0;
300 /* test if a constant matches the constraint */
301 static int tcg_target_const_match(tcg_target_long val,
302 const TCGArgConstraint *arg_ct)
304 int ct;
306 ct = arg_ct->ct;
307 if (ct & TCG_CT_CONST)
308 return 1;
309 return 0;
312 #define OPCD(opc) ((opc)<<26)
313 #define XO31(opc) (OPCD(31)|((opc)<<1))
314 #define XO19(opc) (OPCD(19)|((opc)<<1))
316 #define B OPCD(18)
317 #define BC OPCD(16)
318 #define LBZ OPCD(34)
319 #define LHZ OPCD(40)
320 #define LHA OPCD(42)
321 #define LWZ OPCD(32)
322 #define STB OPCD(38)
323 #define STH OPCD(44)
324 #define STW OPCD(36)
326 #define ADDIC OPCD(12)
327 #define ADDI OPCD(14)
328 #define ADDIS OPCD(15)
329 #define ORI OPCD(24)
330 #define ORIS OPCD(25)
331 #define XORI OPCD(26)
332 #define XORIS OPCD(27)
333 #define ANDI OPCD(28)
334 #define ANDIS OPCD(29)
335 #define MULLI OPCD( 7)
336 #define CMPLI OPCD(10)
337 #define CMPI OPCD(11)
338 #define SUBFIC OPCD( 8)
340 #define LWZU OPCD(33)
341 #define STWU OPCD(37)
343 #define RLWIMI OPCD(20)
344 #define RLWINM OPCD(21)
345 #define RLWNM OPCD(23)
347 #define BCLR XO19( 16)
348 #define BCCTR XO19(528)
349 #define CRAND XO19(257)
350 #define CRANDC XO19(129)
351 #define CRNAND XO19(225)
352 #define CROR XO19(449)
353 #define CRNOR XO19( 33)
355 #define EXTSB XO31(954)
356 #define EXTSH XO31(922)
357 #define ADD XO31(266)
358 #define ADDE XO31(138)
359 #define ADDC XO31( 10)
360 #define AND XO31( 28)
361 #define SUBF XO31( 40)
362 #define SUBFC XO31( 8)
363 #define SUBFE XO31(136)
364 #define OR XO31(444)
365 #define XOR XO31(316)
366 #define MULLW XO31(235)
367 #define MULHWU XO31( 11)
368 #define DIVW XO31(491)
369 #define DIVWU XO31(459)
370 #define CMP XO31( 0)
371 #define CMPL XO31( 32)
372 #define LHBRX XO31(790)
373 #define LWBRX XO31(534)
374 #define STHBRX XO31(918)
375 #define STWBRX XO31(662)
376 #define MFSPR XO31(339)
377 #define MTSPR XO31(467)
378 #define SRAWI XO31(824)
379 #define NEG XO31(104)
380 #define MFCR XO31( 19)
381 #define CNTLZW XO31( 26)
382 #define NOR XO31(124)
383 #define ANDC XO31( 60)
384 #define ORC XO31(412)
385 #define EQV XO31(284)
386 #define NAND XO31(476)
387 #define ISEL XO31( 15)
389 #define LBZX XO31( 87)
390 #define LHZX XO31(279)
391 #define LHAX XO31(343)
392 #define LWZX XO31( 23)
393 #define STBX XO31(215)
394 #define STHX XO31(407)
395 #define STWX XO31(151)
397 #define SPR(a,b) ((((a)<<5)|(b))<<11)
398 #define LR SPR(8, 0)
399 #define CTR SPR(9, 0)
401 #define SLW XO31( 24)
402 #define SRW XO31(536)
403 #define SRAW XO31(792)
405 #define TW XO31(4)
406 #define TRAP (TW | TO (31))
408 #define RT(r) ((r)<<21)
409 #define RS(r) ((r)<<21)
410 #define RA(r) ((r)<<16)
411 #define RB(r) ((r)<<11)
412 #define TO(t) ((t)<<21)
413 #define SH(s) ((s)<<11)
414 #define MB(b) ((b)<<6)
415 #define ME(e) ((e)<<1)
416 #define BO(o) ((o)<<21)
418 #define LK 1
420 #define TAB(t,a,b) (RT(t) | RA(a) | RB(b))
421 #define SAB(s,a,b) (RS(s) | RA(a) | RB(b))
423 #define BF(n) ((n)<<23)
424 #define BI(n, c) (((c)+((n)*4))<<16)
425 #define BT(n, c) (((c)+((n)*4))<<21)
426 #define BA(n, c) (((c)+((n)*4))<<16)
427 #define BB(n, c) (((c)+((n)*4))<<11)
429 #define BO_COND_TRUE BO (12)
430 #define BO_COND_FALSE BO (4)
431 #define BO_ALWAYS BO (20)
433 enum {
434 CR_LT,
435 CR_GT,
436 CR_EQ,
437 CR_SO
440 static const uint32_t tcg_to_bc[] = {
441 [TCG_COND_EQ] = BC | BI (7, CR_EQ) | BO_COND_TRUE,
442 [TCG_COND_NE] = BC | BI (7, CR_EQ) | BO_COND_FALSE,
443 [TCG_COND_LT] = BC | BI (7, CR_LT) | BO_COND_TRUE,
444 [TCG_COND_GE] = BC | BI (7, CR_LT) | BO_COND_FALSE,
445 [TCG_COND_LE] = BC | BI (7, CR_GT) | BO_COND_FALSE,
446 [TCG_COND_GT] = BC | BI (7, CR_GT) | BO_COND_TRUE,
447 [TCG_COND_LTU] = BC | BI (7, CR_LT) | BO_COND_TRUE,
448 [TCG_COND_GEU] = BC | BI (7, CR_LT) | BO_COND_FALSE,
449 [TCG_COND_LEU] = BC | BI (7, CR_GT) | BO_COND_FALSE,
450 [TCG_COND_GTU] = BC | BI (7, CR_GT) | BO_COND_TRUE,
453 static void tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
455 if (ret != arg) {
456 tcg_out32(s, OR | SAB(arg, ret, arg));
460 static void tcg_out_movi(TCGContext *s, TCGType type,
461 TCGReg ret, tcg_target_long arg)
463 if (arg == (int16_t) arg)
464 tcg_out32 (s, ADDI | RT (ret) | RA (0) | (arg & 0xffff));
465 else {
466 tcg_out32 (s, ADDIS | RT (ret) | RA (0) | ((arg >> 16) & 0xffff));
467 if (arg & 0xffff)
468 tcg_out32 (s, ORI | RS (ret) | RA (ret) | (arg & 0xffff));
472 static void tcg_out_ldst (TCGContext *s, int ret, int addr,
473 int offset, int op1, int op2)
475 if (offset == (int16_t) offset)
476 tcg_out32 (s, op1 | RT (ret) | RA (addr) | (offset & 0xffff));
477 else {
478 tcg_out_movi (s, TCG_TYPE_I32, 0, offset);
479 tcg_out32 (s, op2 | RT (ret) | RA (addr) | RB (0));
483 static void tcg_out_b (TCGContext *s, int mask, tcg_target_long target)
485 tcg_target_long disp;
487 disp = target - (tcg_target_long) s->code_ptr;
488 if ((disp << 6) >> 6 == disp)
489 tcg_out32 (s, B | (disp & 0x3fffffc) | mask);
490 else {
491 tcg_out_movi (s, TCG_TYPE_I32, 0, (tcg_target_long) target);
492 tcg_out32 (s, MTSPR | RS (0) | CTR);
493 tcg_out32 (s, BCCTR | BO_ALWAYS | mask);
497 static void tcg_out_call (TCGContext *s, tcg_target_long arg, int const_arg,
498 int lk)
500 #ifdef _CALL_AIX
501 int reg;
503 if (const_arg) {
504 reg = 2;
505 tcg_out_movi (s, TCG_TYPE_I32, reg, arg);
507 else reg = arg;
509 tcg_out32 (s, LWZ | RT (0) | RA (reg));
510 tcg_out32 (s, MTSPR | RA (0) | CTR);
511 tcg_out32 (s, LWZ | RT (2) | RA (reg) | 4);
512 tcg_out32 (s, BCCTR | BO_ALWAYS | lk);
513 #else
514 if (const_arg) {
515 tcg_out_b (s, lk, arg);
517 else {
518 tcg_out32 (s, MTSPR | RS (arg) | LR);
519 tcg_out32 (s, BCLR | BO_ALWAYS | lk);
521 #endif
524 #if defined(CONFIG_SOFTMMU)
526 static void add_qemu_ldst_label (TCGContext *s,
527 int is_ld,
528 TCGMemOp opc,
529 int data_reg,
530 int data_reg2,
531 int addrlo_reg,
532 int addrhi_reg,
533 int mem_index,
534 uint8_t *raddr,
535 uint8_t *label_ptr)
537 TCGLabelQemuLdst *label = new_ldst_label(s);
539 label->is_ld = is_ld;
540 label->opc = opc;
541 label->datalo_reg = data_reg;
542 label->datahi_reg = data_reg2;
543 label->addrlo_reg = addrlo_reg;
544 label->addrhi_reg = addrhi_reg;
545 label->mem_index = mem_index;
546 label->raddr = raddr;
547 label->label_ptr[0] = label_ptr;
550 /* helper signature: helper_ret_ld_mmu(CPUState *env, target_ulong addr,
551 * int mmu_idx, uintptr_t ra)
553 static const void * const qemu_ld_helpers[16] = {
554 [MO_UB] = helper_ret_ldub_mmu,
555 [MO_LEUW] = helper_le_lduw_mmu,
556 [MO_LEUL] = helper_le_ldul_mmu,
557 [MO_LEQ] = helper_le_ldq_mmu,
558 [MO_BEUW] = helper_be_lduw_mmu,
559 [MO_BEUL] = helper_be_ldul_mmu,
560 [MO_BEQ] = helper_be_ldq_mmu,
563 /* helper signature: helper_ret_st_mmu(CPUState *env, target_ulong addr,
564 * uintxx_t val, int mmu_idx, uintptr_t ra)
566 static const void * const qemu_st_helpers[16] = {
567 [MO_UB] = helper_ret_stb_mmu,
568 [MO_LEUW] = helper_le_stw_mmu,
569 [MO_LEUL] = helper_le_stl_mmu,
570 [MO_LEQ] = helper_le_stq_mmu,
571 [MO_BEUW] = helper_be_stw_mmu,
572 [MO_BEUL] = helper_be_stl_mmu,
573 [MO_BEQ] = helper_be_stq_mmu,
576 static void *ld_trampolines[16];
577 static void *st_trampolines[16];
579 /* Perform the TLB load and compare. Branches to the slow path, placing the
580 address of the branch in *LABEL_PTR. Loads the addend of the TLB into R0.
581 Clobbers R1 and R2. */
583 static void tcg_out_tlb_check(TCGContext *s, TCGReg r0, TCGReg r1, TCGReg r2,
584 TCGReg addrlo, TCGReg addrhi, TCGMemOp s_bits,
585 int mem_index, int is_load, uint8_t **label_ptr)
587 int cmp_off =
588 (is_load
589 ? offsetof(CPUArchState, tlb_table[mem_index][0].addr_read)
590 : offsetof(CPUArchState, tlb_table[mem_index][0].addr_write));
591 int add_off = offsetof(CPUArchState, tlb_table[mem_index][0].addend);
592 uint16_t retranst;
593 TCGReg base = TCG_AREG0;
595 /* Extract the page index, shifted into place for tlb index. */
596 tcg_out32(s, (RLWINM
597 | RA(r0)
598 | RS(addrlo)
599 | SH(32 - (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS))
600 | MB(32 - (CPU_TLB_BITS + CPU_TLB_ENTRY_BITS))
601 | ME(31 - CPU_TLB_ENTRY_BITS)));
603 /* Compensate for very large offsets. */
604 if (add_off >= 0x8000) {
605 /* Most target env are smaller than 32k; none are larger than 64k.
606 Simplify the logic here merely to offset by 0x7ff0, giving us a
607 range just shy of 64k. Check this assumption. */
608 QEMU_BUILD_BUG_ON(offsetof(CPUArchState,
609 tlb_table[NB_MMU_MODES - 1][1])
610 > 0x7ff0 + 0x7fff);
611 tcg_out32(s, ADDI | RT(r1) | RA(base) | 0x7ff0);
612 base = r1;
613 cmp_off -= 0x7ff0;
614 add_off -= 0x7ff0;
617 /* Clear the non-page, non-alignment bits from the address. */
618 tcg_out32(s, (RLWINM
619 | RA(r2)
620 | RS(addrlo)
621 | SH(0)
622 | MB((32 - s_bits) & 31)
623 | ME(31 - TARGET_PAGE_BITS)));
625 tcg_out32(s, ADD | RT(r0) | RA(r0) | RB(base));
626 base = r0;
628 /* Load the tlb comparator. */
629 tcg_out32(s, LWZ | RT(r1) | RA(base) | (cmp_off & 0xffff));
631 tcg_out32(s, CMP | BF(7) | RA(r2) | RB(r1));
633 if (TARGET_LONG_BITS == 64) {
634 tcg_out32(s, LWZ | RT(r1) | RA(base) | ((cmp_off + 4) & 0xffff));
637 /* Load the tlb addend for use on the fast path.
638 Do this asap to minimize load delay. */
639 tcg_out32(s, LWZ | RT(r0) | RA(base) | (add_off & 0xffff));
641 if (TARGET_LONG_BITS == 64) {
642 tcg_out32(s, CMP | BF(6) | RA(addrhi) | RB(r1));
643 tcg_out32(s, CRAND | BT(7, CR_EQ) | BA(6, CR_EQ) | BB(7, CR_EQ));
646 /* Use a conditional branch-and-link so that we load a pointer to
647 somewhere within the current opcode, for passing on to the helper.
648 This address cannot be used for a tail call, but it's shorter
649 than forming an address from scratch. */
650 *label_ptr = s->code_ptr;
651 retranst = ((uint16_t *) s->code_ptr)[1] & ~3;
652 tcg_out32(s, BC | BI(7, CR_EQ) | retranst | BO_COND_FALSE | LK);
654 #endif
656 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, bool is64)
658 TCGReg addrlo, datalo, datahi, rbase, addrhi __attribute__((unused));
659 TCGMemOp opc, bswap;
660 #ifdef CONFIG_SOFTMMU
661 int mem_index;
662 uint8_t *label_ptr;
663 #endif
665 datalo = *args++;
666 datahi = (is64 ? *args++ : 0);
667 addrlo = *args++;
668 addrhi = (TARGET_LONG_BITS == 64 ? *args++ : 0);
669 opc = *args++;
670 bswap = opc & MO_BSWAP;
672 #ifdef CONFIG_SOFTMMU
673 mem_index = *args;
674 tcg_out_tlb_check(s, TCG_REG_R3, TCG_REG_R4, TCG_REG_R0, addrlo,
675 addrhi, opc & MO_SIZE, mem_index, 0, &label_ptr);
676 rbase = TCG_REG_R3;
677 #else /* !CONFIG_SOFTMMU */
678 rbase = GUEST_BASE ? TCG_GUEST_BASE_REG : 0;
679 #endif
681 switch (opc & MO_SSIZE) {
682 default:
683 case MO_UB:
684 tcg_out32(s, LBZX | TAB(datalo, rbase, addrlo));
685 break;
686 case MO_SB:
687 tcg_out32(s, LBZX | TAB(datalo, rbase, addrlo));
688 tcg_out32(s, EXTSB | RA(datalo) | RS(datalo));
689 break;
690 case MO_UW:
691 tcg_out32(s, (bswap ? LHBRX : LHZX) | TAB(datalo, rbase, addrlo));
692 break;
693 case MO_SW:
694 if (bswap) {
695 tcg_out32(s, LHBRX | TAB(datalo, rbase, addrlo));
696 tcg_out32(s, EXTSH | RA(datalo) | RS(datalo));
697 } else {
698 tcg_out32(s, LHAX | TAB(datalo, rbase, addrlo));
700 break;
701 case MO_UL:
702 tcg_out32(s, (bswap ? LWBRX : LWZX) | TAB(datalo, rbase, addrlo));
703 break;
704 case MO_Q:
705 if (bswap) {
706 tcg_out32(s, ADDI | RT(TCG_REG_R0) | RA(addrlo) | 4);
707 tcg_out32(s, LWBRX | TAB(datalo, rbase, addrlo));
708 tcg_out32(s, LWBRX | TAB(datahi, rbase, TCG_REG_R0));
709 } else if (rbase != 0) {
710 tcg_out32(s, ADDI | RT(TCG_REG_R0) | RA(addrlo) | 4);
711 tcg_out32(s, LWZX | TAB(datahi, rbase, addrlo));
712 tcg_out32(s, LWZX | TAB(datalo, rbase, TCG_REG_R0));
713 } else if (addrlo == datahi) {
714 tcg_out32(s, LWZ | RT(datalo) | RA(addrlo) | 4);
715 tcg_out32(s, LWZ | RT(datahi) | RA(addrlo));
716 } else {
717 tcg_out32(s, LWZ | RT(datahi) | RA(addrlo));
718 tcg_out32(s, LWZ | RT(datalo) | RA(addrlo) | 4);
720 break;
722 #ifdef CONFIG_SOFTMMU
723 add_qemu_ldst_label(s, 1, opc, datalo, datahi, addrlo,
724 addrhi, mem_index, s->code_ptr, label_ptr);
725 #endif
728 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, bool is64)
730 TCGReg addrlo, datalo, datahi, rbase, addrhi __attribute__((unused));
731 TCGMemOp opc, bswap, s_bits;
732 #ifdef CONFIG_SOFTMMU
733 int mem_index;
734 uint8_t *label_ptr;
735 #endif
737 datalo = *args++;
738 datahi = (is64 ? *args++ : 0);
739 addrlo = *args++;
740 addrhi = (TARGET_LONG_BITS == 64 ? *args++ : 0);
741 opc = *args++;
742 bswap = opc & MO_BSWAP;
743 s_bits = opc & MO_SIZE;
745 #ifdef CONFIG_SOFTMMU
746 mem_index = *args;
747 tcg_out_tlb_check(s, TCG_REG_R3, TCG_REG_R4, TCG_REG_R0, addrlo,
748 addrhi, s_bits, mem_index, 0, &label_ptr);
749 rbase = TCG_REG_R3;
750 #else /* !CONFIG_SOFTMMU */
751 rbase = GUEST_BASE ? TCG_GUEST_BASE_REG : 0;
752 #endif
754 switch (s_bits) {
755 case MO_8:
756 tcg_out32(s, STBX | SAB(datalo, rbase, addrlo));
757 break;
758 case MO_16:
759 tcg_out32(s, (bswap ? STHBRX : STHX) | SAB(datalo, rbase, addrlo));
760 break;
761 case MO_32:
762 default:
763 tcg_out32(s, (bswap ? STWBRX : STWX) | SAB(datalo, rbase, addrlo));
764 break;
765 case MO_64:
766 if (bswap) {
767 tcg_out32(s, ADDI | RT(TCG_REG_R0) | RA(addrlo) | 4);
768 tcg_out32(s, STWBRX | SAB(datalo, rbase, addrlo));
769 tcg_out32(s, STWBRX | SAB(datahi, rbase, TCG_REG_R0));
770 } else if (rbase != 0) {
771 tcg_out32(s, ADDI | RT(TCG_REG_R0) | RA(addrlo) | 4);
772 tcg_out32(s, STWX | SAB(datahi, rbase, addrlo));
773 tcg_out32(s, STWX | SAB(datalo, rbase, TCG_REG_R0));
774 } else {
775 tcg_out32(s, STW | RS(datahi) | RA(addrlo));
776 tcg_out32(s, STW | RS(datalo) | RA(addrlo) | 4);
778 break;
781 #ifdef CONFIG_SOFTMMU
782 add_qemu_ldst_label(s, 0, opc, datalo, datahi, addrlo, addrhi,
783 mem_index, s->code_ptr, label_ptr);
784 #endif
787 #if defined(CONFIG_SOFTMMU)
788 static void tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
790 TCGReg ir, datalo, datahi;
791 TCGMemOp opc = l->opc;
793 reloc_pc14 (l->label_ptr[0], (uintptr_t)s->code_ptr);
795 ir = TCG_REG_R4;
796 if (TARGET_LONG_BITS == 32) {
797 tcg_out_mov(s, TCG_TYPE_I32, ir++, l->addrlo_reg);
798 } else {
799 #ifdef TCG_TARGET_CALL_ALIGN_ARGS
800 ir |= 1;
801 #endif
802 tcg_out_mov(s, TCG_TYPE_I32, ir++, l->addrhi_reg);
803 tcg_out_mov(s, TCG_TYPE_I32, ir++, l->addrlo_reg);
805 tcg_out_movi(s, TCG_TYPE_I32, ir++, l->mem_index);
806 tcg_out32(s, MFSPR | RT(ir++) | LR);
807 tcg_out_b(s, LK, (uintptr_t)ld_trampolines[opc & ~MO_SIGN]);
809 datalo = l->datalo_reg;
810 switch (opc & MO_SSIZE) {
811 case MO_SB:
812 tcg_out32(s, EXTSB | RA(datalo) | RS(TCG_REG_R3));
813 break;
814 case MO_SW:
815 tcg_out32(s, EXTSH | RA(datalo) | RS(TCG_REG_R3));
816 break;
817 default:
818 tcg_out_mov(s, TCG_TYPE_I32, datalo, TCG_REG_R3);
819 break;
820 case MO_Q:
821 datahi = l->datahi_reg;
822 if (datalo != TCG_REG_R3) {
823 tcg_out_mov(s, TCG_TYPE_I32, datalo, TCG_REG_R4);
824 tcg_out_mov(s, TCG_TYPE_I32, datahi, TCG_REG_R3);
825 } else if (datahi != TCG_REG_R4) {
826 tcg_out_mov(s, TCG_TYPE_I32, datahi, TCG_REG_R3);
827 tcg_out_mov(s, TCG_TYPE_I32, datalo, TCG_REG_R4);
828 } else {
829 tcg_out_mov(s, TCG_TYPE_I32, TCG_REG_R0, TCG_REG_R4);
830 tcg_out_mov(s, TCG_TYPE_I32, datahi, TCG_REG_R3);
831 tcg_out_mov(s, TCG_TYPE_I32, datalo, TCG_REG_R0);
833 break;
835 tcg_out_b (s, 0, (uintptr_t)l->raddr);
838 static void tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
840 TCGReg ir, datalo;
841 TCGMemOp opc = l->opc;
843 reloc_pc14 (l->label_ptr[0], (tcg_target_long) s->code_ptr);
845 ir = TCG_REG_R4;
846 if (TARGET_LONG_BITS == 32) {
847 tcg_out_mov (s, TCG_TYPE_I32, ir++, l->addrlo_reg);
848 } else {
849 #ifdef TCG_TARGET_CALL_ALIGN_ARGS
850 ir |= 1;
851 #endif
852 tcg_out_mov (s, TCG_TYPE_I32, ir++, l->addrhi_reg);
853 tcg_out_mov (s, TCG_TYPE_I32, ir++, l->addrlo_reg);
856 datalo = l->datalo_reg;
857 switch (opc & MO_SIZE) {
858 case MO_8:
859 tcg_out32(s, (RLWINM | RA (ir) | RS (datalo)
860 | SH (0) | MB (24) | ME (31)));
861 break;
862 case MO_16:
863 tcg_out32(s, (RLWINM | RA (ir) | RS (datalo)
864 | SH (0) | MB (16) | ME (31)));
865 break;
866 default:
867 tcg_out_mov(s, TCG_TYPE_I32, ir, datalo);
868 break;
869 case MO_64:
870 #ifdef TCG_TARGET_CALL_ALIGN_ARGS
871 ir |= 1;
872 #endif
873 tcg_out_mov(s, TCG_TYPE_I32, ir++, l->datahi_reg);
874 tcg_out_mov(s, TCG_TYPE_I32, ir, datalo);
875 break;
877 ir++;
879 tcg_out_movi(s, TCG_TYPE_I32, ir++, l->mem_index);
880 tcg_out32(s, MFSPR | RT(ir++) | LR);
881 tcg_out_b(s, LK, (uintptr_t)st_trampolines[opc]);
882 tcg_out_b(s, 0, (uintptr_t)l->raddr);
884 #endif
886 #ifdef CONFIG_SOFTMMU
887 static void emit_ldst_trampoline (TCGContext *s, const void *ptr)
889 tcg_out_mov (s, TCG_TYPE_I32, 3, TCG_AREG0);
890 tcg_out_call (s, (tcg_target_long) ptr, 1, 0);
892 #endif
894 static void tcg_target_qemu_prologue (TCGContext *s)
896 int i, frame_size;
898 frame_size = 0
899 + LINKAGE_AREA_SIZE
900 + TCG_STATIC_CALL_ARGS_SIZE
901 + ARRAY_SIZE (tcg_target_callee_save_regs) * 4
902 + CPU_TEMP_BUF_NLONGS * sizeof(long)
904 frame_size = (frame_size + 15) & ~15;
906 tcg_set_frame(s, TCG_REG_CALL_STACK, frame_size
907 - CPU_TEMP_BUF_NLONGS * sizeof(long),
908 CPU_TEMP_BUF_NLONGS * sizeof(long));
910 #ifdef _CALL_AIX
912 uint32_t addr;
914 /* First emit adhoc function descriptor */
915 addr = (uint32_t) s->code_ptr + 12;
916 tcg_out32 (s, addr); /* entry point */
917 s->code_ptr += 8; /* skip TOC and environment pointer */
919 #endif
920 tcg_out32 (s, MFSPR | RT (0) | LR);
921 tcg_out32 (s, STWU | RS (1) | RA (1) | (-frame_size & 0xffff));
922 for (i = 0; i < ARRAY_SIZE (tcg_target_callee_save_regs); ++i)
923 tcg_out32 (s, (STW
924 | RS (tcg_target_callee_save_regs[i])
925 | RA (1)
926 | (i * 4 + LINKAGE_AREA_SIZE + TCG_STATIC_CALL_ARGS_SIZE)
929 tcg_out32 (s, STW | RS (0) | RA (1) | (frame_size + LR_OFFSET));
931 #ifdef CONFIG_USE_GUEST_BASE
932 if (GUEST_BASE) {
933 tcg_out_movi (s, TCG_TYPE_I32, TCG_GUEST_BASE_REG, GUEST_BASE);
934 tcg_regset_set_reg(s->reserved_regs, TCG_GUEST_BASE_REG);
936 #endif
938 tcg_out_mov (s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]);
939 tcg_out32 (s, MTSPR | RS (tcg_target_call_iarg_regs[1]) | CTR);
940 tcg_out32 (s, BCCTR | BO_ALWAYS);
941 tb_ret_addr = s->code_ptr;
943 for (i = 0; i < ARRAY_SIZE (tcg_target_callee_save_regs); ++i)
944 tcg_out32 (s, (LWZ
945 | RT (tcg_target_callee_save_regs[i])
946 | RA (1)
947 | (i * 4 + LINKAGE_AREA_SIZE + TCG_STATIC_CALL_ARGS_SIZE)
950 tcg_out32 (s, LWZ | RT (0) | RA (1) | (frame_size + LR_OFFSET));
951 tcg_out32 (s, MTSPR | RS (0) | LR);
952 tcg_out32 (s, ADDI | RT (1) | RA (1) | frame_size);
953 tcg_out32 (s, BCLR | BO_ALWAYS);
955 #ifdef CONFIG_SOFTMMU
956 for (i = 0; i < 16; ++i) {
957 if (qemu_ld_helpers[i]) {
958 ld_trampolines[i] = s->code_ptr;
959 emit_ldst_trampoline(s, qemu_ld_helpers[i]);
961 if (qemu_st_helpers[i]) {
962 st_trampolines[i] = s->code_ptr;
963 emit_ldst_trampoline(s, qemu_st_helpers[i]);
966 #endif
969 static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
970 intptr_t arg2)
972 tcg_out_ldst (s, ret, arg1, arg2, LWZ, LWZX);
975 static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
976 intptr_t arg2)
978 tcg_out_ldst (s, arg, arg1, arg2, STW, STWX);
981 static void ppc_addi (TCGContext *s, int rt, int ra, tcg_target_long si)
983 if (!si && rt == ra)
984 return;
986 if (si == (int16_t) si)
987 tcg_out32 (s, ADDI | RT (rt) | RA (ra) | (si & 0xffff));
988 else {
989 uint16_t h = ((si >> 16) & 0xffff) + ((uint16_t) si >> 15);
990 tcg_out32 (s, ADDIS | RT (rt) | RA (ra) | h);
991 tcg_out32 (s, ADDI | RT (rt) | RA (rt) | (si & 0xffff));
995 static void tcg_out_cmp (TCGContext *s, int cond, TCGArg arg1, TCGArg arg2,
996 int const_arg2, int cr)
998 int imm;
999 uint32_t op;
1001 switch (cond) {
1002 case TCG_COND_EQ:
1003 case TCG_COND_NE:
1004 if (const_arg2) {
1005 if ((int16_t) arg2 == arg2) {
1006 op = CMPI;
1007 imm = 1;
1008 break;
1010 else if ((uint16_t) arg2 == arg2) {
1011 op = CMPLI;
1012 imm = 1;
1013 break;
1016 op = CMPL;
1017 imm = 0;
1018 break;
1020 case TCG_COND_LT:
1021 case TCG_COND_GE:
1022 case TCG_COND_LE:
1023 case TCG_COND_GT:
1024 if (const_arg2) {
1025 if ((int16_t) arg2 == arg2) {
1026 op = CMPI;
1027 imm = 1;
1028 break;
1031 op = CMP;
1032 imm = 0;
1033 break;
1035 case TCG_COND_LTU:
1036 case TCG_COND_GEU:
1037 case TCG_COND_LEU:
1038 case TCG_COND_GTU:
1039 if (const_arg2) {
1040 if ((uint16_t) arg2 == arg2) {
1041 op = CMPLI;
1042 imm = 1;
1043 break;
1046 op = CMPL;
1047 imm = 0;
1048 break;
1050 default:
1051 tcg_abort ();
1053 op |= BF (cr);
1055 if (imm)
1056 tcg_out32 (s, op | RA (arg1) | (arg2 & 0xffff));
1057 else {
1058 if (const_arg2) {
1059 tcg_out_movi (s, TCG_TYPE_I32, 0, arg2);
1060 tcg_out32 (s, op | RA (arg1) | RB (0));
1062 else
1063 tcg_out32 (s, op | RA (arg1) | RB (arg2));
1068 static void tcg_out_bc (TCGContext *s, int bc, int label_index)
1070 TCGLabel *l = &s->labels[label_index];
1072 if (l->has_value)
1073 tcg_out32 (s, bc | reloc_pc14_val (s->code_ptr, l->u.value));
1074 else {
1075 uint16_t val = *(uint16_t *) &s->code_ptr[2];
1077 /* Thanks to Andrzej Zaborowski */
1078 tcg_out32 (s, bc | (val & 0xfffc));
1079 tcg_out_reloc (s, s->code_ptr - 4, R_PPC_REL14, label_index, 0);
1083 static void tcg_out_cr7eq_from_cond (TCGContext *s, const TCGArg *args,
1084 const int *const_args)
1086 TCGCond cond = args[4];
1087 int op;
1088 struct { int bit1; int bit2; int cond2; } bits[] = {
1089 [TCG_COND_LT ] = { CR_LT, CR_LT, TCG_COND_LT },
1090 [TCG_COND_LE ] = { CR_LT, CR_GT, TCG_COND_LT },
1091 [TCG_COND_GT ] = { CR_GT, CR_GT, TCG_COND_GT },
1092 [TCG_COND_GE ] = { CR_GT, CR_LT, TCG_COND_GT },
1093 [TCG_COND_LTU] = { CR_LT, CR_LT, TCG_COND_LTU },
1094 [TCG_COND_LEU] = { CR_LT, CR_GT, TCG_COND_LTU },
1095 [TCG_COND_GTU] = { CR_GT, CR_GT, TCG_COND_GTU },
1096 [TCG_COND_GEU] = { CR_GT, CR_LT, TCG_COND_GTU },
1097 }, *b = &bits[cond];
1099 switch (cond) {
1100 case TCG_COND_EQ:
1101 case TCG_COND_NE:
1102 op = (cond == TCG_COND_EQ) ? CRAND : CRNAND;
1103 tcg_out_cmp (s, cond, args[0], args[2], const_args[2], 6);
1104 tcg_out_cmp (s, cond, args[1], args[3], const_args[3], 7);
1105 tcg_out32 (s, op | BT (7, CR_EQ) | BA (6, CR_EQ) | BB (7, CR_EQ));
1106 break;
1107 case TCG_COND_LT:
1108 case TCG_COND_LE:
1109 case TCG_COND_GT:
1110 case TCG_COND_GE:
1111 case TCG_COND_LTU:
1112 case TCG_COND_LEU:
1113 case TCG_COND_GTU:
1114 case TCG_COND_GEU:
1115 op = (b->bit1 != b->bit2) ? CRANDC : CRAND;
1116 tcg_out_cmp (s, b->cond2, args[1], args[3], const_args[3], 5);
1117 tcg_out_cmp (s, tcg_unsigned_cond (cond), args[0], args[2],
1118 const_args[2], 7);
1119 tcg_out32 (s, op | BT (7, CR_EQ) | BA (5, CR_EQ) | BB (7, b->bit2));
1120 tcg_out32 (s, CROR | BT (7, CR_EQ) | BA (5, b->bit1) | BB (7, CR_EQ));
1121 break;
1122 default:
1123 tcg_abort();
1127 static void tcg_out_setcond (TCGContext *s, TCGCond cond, TCGArg arg0,
1128 TCGArg arg1, TCGArg arg2, int const_arg2)
1130 int crop, sh, arg;
1132 switch (cond) {
1133 case TCG_COND_EQ:
1134 if (const_arg2) {
1135 if (!arg2) {
1136 arg = arg1;
1138 else {
1139 arg = 0;
1140 if ((uint16_t) arg2 == arg2) {
1141 tcg_out32 (s, XORI | RS (arg1) | RA (0) | arg2);
1143 else {
1144 tcg_out_movi (s, TCG_TYPE_I32, 0, arg2);
1145 tcg_out32 (s, XOR | SAB (arg1, 0, 0));
1149 else {
1150 arg = 0;
1151 tcg_out32 (s, XOR | SAB (arg1, 0, arg2));
1153 tcg_out32 (s, CNTLZW | RS (arg) | RA (0));
1154 tcg_out32 (s, (RLWINM
1155 | RA (arg0)
1156 | RS (0)
1157 | SH (27)
1158 | MB (5)
1159 | ME (31)
1162 break;
1164 case TCG_COND_NE:
1165 if (const_arg2) {
1166 if (!arg2) {
1167 arg = arg1;
1169 else {
1170 arg = 0;
1171 if ((uint16_t) arg2 == arg2) {
1172 tcg_out32 (s, XORI | RS (arg1) | RA (0) | arg2);
1174 else {
1175 tcg_out_movi (s, TCG_TYPE_I32, 0, arg2);
1176 tcg_out32 (s, XOR | SAB (arg1, 0, 0));
1180 else {
1181 arg = 0;
1182 tcg_out32 (s, XOR | SAB (arg1, 0, arg2));
1185 if (arg == arg1 && arg1 == arg0) {
1186 tcg_out32 (s, ADDIC | RT (0) | RA (arg) | 0xffff);
1187 tcg_out32 (s, SUBFE | TAB (arg0, 0, arg));
1189 else {
1190 tcg_out32 (s, ADDIC | RT (arg0) | RA (arg) | 0xffff);
1191 tcg_out32 (s, SUBFE | TAB (arg0, arg0, arg));
1193 break;
1195 case TCG_COND_GT:
1196 case TCG_COND_GTU:
1197 sh = 30;
1198 crop = 0;
1199 goto crtest;
1201 case TCG_COND_LT:
1202 case TCG_COND_LTU:
1203 sh = 29;
1204 crop = 0;
1205 goto crtest;
1207 case TCG_COND_GE:
1208 case TCG_COND_GEU:
1209 sh = 31;
1210 crop = CRNOR | BT (7, CR_EQ) | BA (7, CR_LT) | BB (7, CR_LT);
1211 goto crtest;
1213 case TCG_COND_LE:
1214 case TCG_COND_LEU:
1215 sh = 31;
1216 crop = CRNOR | BT (7, CR_EQ) | BA (7, CR_GT) | BB (7, CR_GT);
1217 crtest:
1218 tcg_out_cmp (s, cond, arg1, arg2, const_arg2, 7);
1219 if (crop) tcg_out32 (s, crop);
1220 tcg_out32 (s, MFCR | RT (0));
1221 tcg_out32 (s, (RLWINM
1222 | RA (arg0)
1223 | RS (0)
1224 | SH (sh)
1225 | MB (31)
1226 | ME (31)
1229 break;
1231 default:
1232 tcg_abort ();
1236 static void tcg_out_setcond2 (TCGContext *s, const TCGArg *args,
1237 const int *const_args)
1239 tcg_out_cr7eq_from_cond (s, args + 1, const_args + 1);
1240 tcg_out32 (s, MFCR | RT (0));
1241 tcg_out32 (s, (RLWINM
1242 | RA (args[0])
1243 | RS (0)
1244 | SH (31)
1245 | MB (31)
1246 | ME (31)
1251 static void tcg_out_movcond (TCGContext *s, TCGCond cond,
1252 TCGArg dest,
1253 TCGArg c1, TCGArg c2,
1254 TCGArg v1, TCGArg v2,
1255 int const_c2)
1257 tcg_out_cmp (s, cond, c1, c2, const_c2, 7);
1259 if (1) {
1260 /* At least here on 7747A bit twiddling hacks are outperformed
1261 by jumpy code (the testing was not scientific) */
1262 if (dest == v2) {
1263 cond = tcg_invert_cond (cond);
1264 v2 = v1;
1266 else {
1267 if (dest != v1) {
1268 tcg_out_mov (s, TCG_TYPE_I32, dest, v1);
1271 /* Branch forward over one insn */
1272 tcg_out32 (s, tcg_to_bc[cond] | 8);
1273 tcg_out_mov (s, TCG_TYPE_I32, dest, v2);
1275 else {
1276 /* isel version, "if (1)" above should be replaced once a way
1277 to figure out availability of isel on the underlying
1278 hardware is found */
1279 int tab, bc;
1281 switch (cond) {
1282 case TCG_COND_EQ:
1283 tab = TAB (dest, v1, v2);
1284 bc = CR_EQ;
1285 break;
1286 case TCG_COND_NE:
1287 tab = TAB (dest, v2, v1);
1288 bc = CR_EQ;
1289 break;
1290 case TCG_COND_LTU:
1291 case TCG_COND_LT:
1292 tab = TAB (dest, v1, v2);
1293 bc = CR_LT;
1294 break;
1295 case TCG_COND_GEU:
1296 case TCG_COND_GE:
1297 tab = TAB (dest, v2, v1);
1298 bc = CR_LT;
1299 break;
1300 case TCG_COND_LEU:
1301 case TCG_COND_LE:
1302 tab = TAB (dest, v2, v1);
1303 bc = CR_GT;
1304 break;
1305 case TCG_COND_GTU:
1306 case TCG_COND_GT:
1307 tab = TAB (dest, v1, v2);
1308 bc = CR_GT;
1309 break;
1310 default:
1311 tcg_abort ();
1313 tcg_out32 (s, ISEL | tab | ((bc + 28) << 6));
1317 static void tcg_out_brcond (TCGContext *s, TCGCond cond,
1318 TCGArg arg1, TCGArg arg2, int const_arg2,
1319 int label_index)
1321 tcg_out_cmp (s, cond, arg1, arg2, const_arg2, 7);
1322 tcg_out_bc (s, tcg_to_bc[cond], label_index);
1325 /* XXX: we implement it at the target level to avoid having to
1326 handle cross basic blocks temporaries */
1327 static void tcg_out_brcond2 (TCGContext *s, const TCGArg *args,
1328 const int *const_args)
1330 tcg_out_cr7eq_from_cond (s, args, const_args);
1331 tcg_out_bc (s, (BC | BI (7, CR_EQ) | BO_COND_TRUE), args[5]);
1334 void ppc_tb_set_jmp_target (unsigned long jmp_addr, unsigned long addr)
1336 uint32_t *ptr;
1337 long disp = addr - jmp_addr;
1338 unsigned long patch_size;
1340 ptr = (uint32_t *)jmp_addr;
1342 if ((disp << 6) >> 6 != disp) {
1343 ptr[0] = 0x3c000000 | (addr >> 16); /* lis 0,addr@ha */
1344 ptr[1] = 0x60000000 | (addr & 0xffff); /* la 0,addr@l(0) */
1345 ptr[2] = 0x7c0903a6; /* mtctr 0 */
1346 ptr[3] = 0x4e800420; /* brctr */
1347 patch_size = 16;
1348 } else {
1349 /* patch the branch destination */
1350 if (disp != 16) {
1351 *ptr = 0x48000000 | (disp & 0x03fffffc); /* b disp */
1352 patch_size = 4;
1353 } else {
1354 ptr[0] = 0x60000000; /* nop */
1355 ptr[1] = 0x60000000;
1356 ptr[2] = 0x60000000;
1357 ptr[3] = 0x60000000;
1358 patch_size = 16;
1361 /* flush icache */
1362 flush_icache_range(jmp_addr, jmp_addr + patch_size);
1365 static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
1366 const int *const_args)
1368 switch (opc) {
1369 case INDEX_op_exit_tb:
1370 tcg_out_movi (s, TCG_TYPE_I32, TCG_REG_R3, args[0]);
1371 tcg_out_b (s, 0, (tcg_target_long) tb_ret_addr);
1372 break;
1373 case INDEX_op_goto_tb:
1374 if (s->tb_jmp_offset) {
1375 /* direct jump method */
1377 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
1378 s->code_ptr += 16;
1380 else {
1381 tcg_abort ();
1383 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1384 break;
1385 case INDEX_op_br:
1387 TCGLabel *l = &s->labels[args[0]];
1389 if (l->has_value) {
1390 tcg_out_b (s, 0, l->u.value);
1392 else {
1393 uint32_t val = *(uint32_t *) s->code_ptr;
1395 /* Thanks to Andrzej Zaborowski */
1396 tcg_out32 (s, B | (val & 0x3fffffc));
1397 tcg_out_reloc (s, s->code_ptr - 4, R_PPC_REL24, args[0], 0);
1400 break;
1401 case INDEX_op_call:
1402 tcg_out_call (s, args[0], const_args[0], LK);
1403 break;
1404 case INDEX_op_movi_i32:
1405 tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
1406 break;
1407 case INDEX_op_ld8u_i32:
1408 tcg_out_ldst (s, args[0], args[1], args[2], LBZ, LBZX);
1409 break;
1410 case INDEX_op_ld8s_i32:
1411 tcg_out_ldst (s, args[0], args[1], args[2], LBZ, LBZX);
1412 tcg_out32 (s, EXTSB | RS (args[0]) | RA (args[0]));
1413 break;
1414 case INDEX_op_ld16u_i32:
1415 tcg_out_ldst (s, args[0], args[1], args[2], LHZ, LHZX);
1416 break;
1417 case INDEX_op_ld16s_i32:
1418 tcg_out_ldst (s, args[0], args[1], args[2], LHA, LHAX);
1419 break;
1420 case INDEX_op_ld_i32:
1421 tcg_out_ldst (s, args[0], args[1], args[2], LWZ, LWZX);
1422 break;
1423 case INDEX_op_st8_i32:
1424 tcg_out_ldst (s, args[0], args[1], args[2], STB, STBX);
1425 break;
1426 case INDEX_op_st16_i32:
1427 tcg_out_ldst (s, args[0], args[1], args[2], STH, STHX);
1428 break;
1429 case INDEX_op_st_i32:
1430 tcg_out_ldst (s, args[0], args[1], args[2], STW, STWX);
1431 break;
1433 case INDEX_op_add_i32:
1434 if (const_args[2])
1435 ppc_addi (s, args[0], args[1], args[2]);
1436 else
1437 tcg_out32 (s, ADD | TAB (args[0], args[1], args[2]));
1438 break;
1439 case INDEX_op_sub_i32:
1440 if (const_args[2])
1441 ppc_addi (s, args[0], args[1], -args[2]);
1442 else
1443 tcg_out32 (s, SUBF | TAB (args[0], args[2], args[1]));
1444 break;
1446 case INDEX_op_and_i32:
1447 if (const_args[2]) {
1448 uint32_t c;
1450 c = args[2];
1452 if (!c) {
1453 tcg_out_movi (s, TCG_TYPE_I32, args[0], 0);
1454 break;
1456 #ifdef __PPU__
1457 uint32_t t, n;
1458 int mb, me;
1460 n = c ^ -(c & 1);
1461 t = n + (n & -n);
1463 if ((t & (t - 1)) == 0) {
1464 int lzc, tzc;
1466 if ((c & 0x80000001) == 0x80000001) {
1467 lzc = clz32 (n);
1468 tzc = ctz32 (n);
1470 mb = 32 - tzc;
1471 me = lzc - 1;
1473 else {
1474 lzc = clz32 (c);
1475 tzc = ctz32 (c);
1477 mb = lzc;
1478 me = 31 - tzc;
1481 tcg_out32 (s, (RLWINM
1482 | RA (args[0])
1483 | RS (args[1])
1484 | SH (0)
1485 | MB (mb)
1486 | ME (me)
1490 else
1491 #endif /* !__PPU__ */
1493 if ((c & 0xffff) == c)
1494 tcg_out32 (s, ANDI | RS (args[1]) | RA (args[0]) | c);
1495 else if ((c & 0xffff0000) == c)
1496 tcg_out32 (s, ANDIS | RS (args[1]) | RA (args[0])
1497 | ((c >> 16) & 0xffff));
1498 else {
1499 tcg_out_movi (s, TCG_TYPE_I32, 0, c);
1500 tcg_out32 (s, AND | SAB (args[1], args[0], 0));
1504 else
1505 tcg_out32 (s, AND | SAB (args[1], args[0], args[2]));
1506 break;
1507 case INDEX_op_or_i32:
1508 if (const_args[2]) {
1509 if (args[2] & 0xffff) {
1510 tcg_out32 (s, ORI | RS (args[1]) | RA (args[0])
1511 | (args[2] & 0xffff));
1512 if (args[2] >> 16)
1513 tcg_out32 (s, ORIS | RS (args[0]) | RA (args[0])
1514 | ((args[2] >> 16) & 0xffff));
1516 else {
1517 tcg_out32 (s, ORIS | RS (args[1]) | RA (args[0])
1518 | ((args[2] >> 16) & 0xffff));
1521 else
1522 tcg_out32 (s, OR | SAB (args[1], args[0], args[2]));
1523 break;
1524 case INDEX_op_xor_i32:
1525 if (const_args[2]) {
1526 if ((args[2] & 0xffff) == args[2])
1527 tcg_out32 (s, XORI | RS (args[1]) | RA (args[0])
1528 | (args[2] & 0xffff));
1529 else if ((args[2] & 0xffff0000) == args[2])
1530 tcg_out32 (s, XORIS | RS (args[1]) | RA (args[0])
1531 | ((args[2] >> 16) & 0xffff));
1532 else {
1533 tcg_out_movi (s, TCG_TYPE_I32, 0, args[2]);
1534 tcg_out32 (s, XOR | SAB (args[1], args[0], 0));
1537 else
1538 tcg_out32 (s, XOR | SAB (args[1], args[0], args[2]));
1539 break;
1540 case INDEX_op_andc_i32:
1541 tcg_out32 (s, ANDC | SAB (args[1], args[0], args[2]));
1542 break;
1543 case INDEX_op_orc_i32:
1544 tcg_out32 (s, ORC | SAB (args[1], args[0], args[2]));
1545 break;
1546 case INDEX_op_eqv_i32:
1547 tcg_out32 (s, EQV | SAB (args[1], args[0], args[2]));
1548 break;
1549 case INDEX_op_nand_i32:
1550 tcg_out32 (s, NAND | SAB (args[1], args[0], args[2]));
1551 break;
1552 case INDEX_op_nor_i32:
1553 tcg_out32 (s, NOR | SAB (args[1], args[0], args[2]));
1554 break;
1556 case INDEX_op_mul_i32:
1557 if (const_args[2]) {
1558 if (args[2] == (int16_t) args[2])
1559 tcg_out32 (s, MULLI | RT (args[0]) | RA (args[1])
1560 | (args[2] & 0xffff));
1561 else {
1562 tcg_out_movi (s, TCG_TYPE_I32, 0, args[2]);
1563 tcg_out32 (s, MULLW | TAB (args[0], args[1], 0));
1566 else
1567 tcg_out32 (s, MULLW | TAB (args[0], args[1], args[2]));
1568 break;
1570 case INDEX_op_div_i32:
1571 tcg_out32 (s, DIVW | TAB (args[0], args[1], args[2]));
1572 break;
1574 case INDEX_op_divu_i32:
1575 tcg_out32 (s, DIVWU | TAB (args[0], args[1], args[2]));
1576 break;
1578 case INDEX_op_mulu2_i32:
1579 if (args[0] == args[2] || args[0] == args[3]) {
1580 tcg_out32 (s, MULLW | TAB (0, args[2], args[3]));
1581 tcg_out32 (s, MULHWU | TAB (args[1], args[2], args[3]));
1582 tcg_out_mov (s, TCG_TYPE_I32, args[0], 0);
1584 else {
1585 tcg_out32 (s, MULLW | TAB (args[0], args[2], args[3]));
1586 tcg_out32 (s, MULHWU | TAB (args[1], args[2], args[3]));
1588 break;
1590 case INDEX_op_shl_i32:
1591 if (const_args[2]) {
1592 tcg_out32 (s, (RLWINM
1593 | RA (args[0])
1594 | RS (args[1])
1595 | SH (args[2])
1596 | MB (0)
1597 | ME (31 - args[2])
1601 else
1602 tcg_out32 (s, SLW | SAB (args[1], args[0], args[2]));
1603 break;
1604 case INDEX_op_shr_i32:
1605 if (const_args[2]) {
1606 tcg_out32 (s, (RLWINM
1607 | RA (args[0])
1608 | RS (args[1])
1609 | SH (32 - args[2])
1610 | MB (args[2])
1611 | ME (31)
1615 else
1616 tcg_out32 (s, SRW | SAB (args[1], args[0], args[2]));
1617 break;
1618 case INDEX_op_sar_i32:
1619 if (const_args[2])
1620 tcg_out32 (s, SRAWI | RS (args[1]) | RA (args[0]) | SH (args[2]));
1621 else
1622 tcg_out32 (s, SRAW | SAB (args[1], args[0], args[2]));
1623 break;
1624 case INDEX_op_rotl_i32:
1626 int op = 0
1627 | RA (args[0])
1628 | RS (args[1])
1629 | MB (0)
1630 | ME (31)
1631 | (const_args[2] ? RLWINM | SH (args[2])
1632 : RLWNM | RB (args[2]))
1634 tcg_out32 (s, op);
1636 break;
1637 case INDEX_op_rotr_i32:
1638 if (const_args[2]) {
1639 if (!args[2]) {
1640 tcg_out_mov (s, TCG_TYPE_I32, args[0], args[1]);
1642 else {
1643 tcg_out32 (s, RLWINM
1644 | RA (args[0])
1645 | RS (args[1])
1646 | SH (32 - args[2])
1647 | MB (0)
1648 | ME (31)
1652 else {
1653 tcg_out32 (s, SUBFIC | RT (0) | RA (args[2]) | 32);
1654 tcg_out32 (s, RLWNM
1655 | RA (args[0])
1656 | RS (args[1])
1657 | RB (0)
1658 | MB (0)
1659 | ME (31)
1662 break;
1664 case INDEX_op_add2_i32:
1665 if (args[0] == args[3] || args[0] == args[5]) {
1666 tcg_out32 (s, ADDC | TAB (0, args[2], args[4]));
1667 tcg_out32 (s, ADDE | TAB (args[1], args[3], args[5]));
1668 tcg_out_mov (s, TCG_TYPE_I32, args[0], 0);
1670 else {
1671 tcg_out32 (s, ADDC | TAB (args[0], args[2], args[4]));
1672 tcg_out32 (s, ADDE | TAB (args[1], args[3], args[5]));
1674 break;
1675 case INDEX_op_sub2_i32:
1676 if (args[0] == args[3] || args[0] == args[5]) {
1677 tcg_out32 (s, SUBFC | TAB (0, args[4], args[2]));
1678 tcg_out32 (s, SUBFE | TAB (args[1], args[5], args[3]));
1679 tcg_out_mov (s, TCG_TYPE_I32, args[0], 0);
1681 else {
1682 tcg_out32 (s, SUBFC | TAB (args[0], args[4], args[2]));
1683 tcg_out32 (s, SUBFE | TAB (args[1], args[5], args[3]));
1685 break;
1687 case INDEX_op_brcond_i32:
1689 args[0] = r0
1690 args[1] = r1
1691 args[2] = cond
1692 args[3] = r1 is const
1693 args[4] = label_index
1695 tcg_out_brcond (s, args[2], args[0], args[1], const_args[1], args[3]);
1696 break;
1697 case INDEX_op_brcond2_i32:
1698 tcg_out_brcond2(s, args, const_args);
1699 break;
1701 case INDEX_op_neg_i32:
1702 tcg_out32 (s, NEG | RT (args[0]) | RA (args[1]));
1703 break;
1705 case INDEX_op_not_i32:
1706 tcg_out32 (s, NOR | SAB (args[1], args[0], args[1]));
1707 break;
1709 case INDEX_op_qemu_ld_i32:
1710 tcg_out_qemu_ld(s, args, 0);
1711 break;
1712 case INDEX_op_qemu_ld_i64:
1713 tcg_out_qemu_ld(s, args, 1);
1714 break;
1715 case INDEX_op_qemu_st_i32:
1716 tcg_out_qemu_st(s, args, 0);
1717 break;
1718 case INDEX_op_qemu_st_i64:
1719 tcg_out_qemu_st(s, args, 1);
1720 break;
1722 case INDEX_op_ext8s_i32:
1723 tcg_out32 (s, EXTSB | RS (args[1]) | RA (args[0]));
1724 break;
1725 case INDEX_op_ext8u_i32:
1726 tcg_out32 (s, RLWINM
1727 | RA (args[0])
1728 | RS (args[1])
1729 | SH (0)
1730 | MB (24)
1731 | ME (31)
1733 break;
1734 case INDEX_op_ext16s_i32:
1735 tcg_out32 (s, EXTSH | RS (args[1]) | RA (args[0]));
1736 break;
1737 case INDEX_op_ext16u_i32:
1738 tcg_out32 (s, RLWINM
1739 | RA (args[0])
1740 | RS (args[1])
1741 | SH (0)
1742 | MB (16)
1743 | ME (31)
1745 break;
1747 case INDEX_op_setcond_i32:
1748 tcg_out_setcond (s, args[3], args[0], args[1], args[2], const_args[2]);
1749 break;
1750 case INDEX_op_setcond2_i32:
1751 tcg_out_setcond2 (s, args, const_args);
1752 break;
1754 case INDEX_op_bswap16_i32:
1755 /* Stolen from gcc's builtin_bswap16 */
1757 /* a1 = abcd */
1759 /* r0 = (a1 << 8) & 0xff00 # 00d0 */
1760 tcg_out32 (s, RLWINM
1761 | RA (0)
1762 | RS (args[1])
1763 | SH (8)
1764 | MB (16)
1765 | ME (23)
1768 /* a0 = rotate_left (a1, 24) & 0xff # 000c */
1769 tcg_out32 (s, RLWINM
1770 | RA (args[0])
1771 | RS (args[1])
1772 | SH (24)
1773 | MB (24)
1774 | ME (31)
1777 /* a0 = a0 | r0 # 00dc */
1778 tcg_out32 (s, OR | SAB (0, args[0], args[0]));
1779 break;
1781 case INDEX_op_bswap32_i32:
1782 /* Stolen from gcc's builtin_bswap32 */
1784 int a0 = args[0];
1786 /* a1 = args[1] # abcd */
1788 if (a0 == args[1]) {
1789 a0 = 0;
1792 /* a0 = rotate_left (a1, 8) # bcda */
1793 tcg_out32 (s, RLWINM
1794 | RA (a0)
1795 | RS (args[1])
1796 | SH (8)
1797 | MB (0)
1798 | ME (31)
1801 /* a0 = (a0 & ~0xff000000) | ((a1 << 24) & 0xff000000) # dcda */
1802 tcg_out32 (s, RLWIMI
1803 | RA (a0)
1804 | RS (args[1])
1805 | SH (24)
1806 | MB (0)
1807 | ME (7)
1810 /* a0 = (a0 & ~0x0000ff00) | ((a1 << 24) & 0x0000ff00) # dcba */
1811 tcg_out32 (s, RLWIMI
1812 | RA (a0)
1813 | RS (args[1])
1814 | SH (24)
1815 | MB (16)
1816 | ME (23)
1819 if (!a0) {
1820 tcg_out_mov (s, TCG_TYPE_I32, args[0], a0);
1823 break;
1825 case INDEX_op_deposit_i32:
1826 tcg_out32 (s, RLWIMI
1827 | RA (args[0])
1828 | RS (args[2])
1829 | SH (args[3])
1830 | MB (32 - args[3] - args[4])
1831 | ME (31 - args[3])
1833 break;
1835 case INDEX_op_movcond_i32:
1836 tcg_out_movcond (s, args[5], args[0],
1837 args[1], args[2],
1838 args[3], args[4],
1839 const_args[2]);
1840 break;
1842 default:
1843 tcg_dump_ops (s);
1844 tcg_abort ();
1848 static const TCGTargetOpDef ppc_op_defs[] = {
1849 { INDEX_op_exit_tb, { } },
1850 { INDEX_op_goto_tb, { } },
1851 { INDEX_op_call, { "ri" } },
1852 { INDEX_op_br, { } },
1854 { INDEX_op_mov_i32, { "r", "r" } },
1855 { INDEX_op_movi_i32, { "r" } },
1856 { INDEX_op_ld8u_i32, { "r", "r" } },
1857 { INDEX_op_ld8s_i32, { "r", "r" } },
1858 { INDEX_op_ld16u_i32, { "r", "r" } },
1859 { INDEX_op_ld16s_i32, { "r", "r" } },
1860 { INDEX_op_ld_i32, { "r", "r" } },
1861 { INDEX_op_st8_i32, { "r", "r" } },
1862 { INDEX_op_st16_i32, { "r", "r" } },
1863 { INDEX_op_st_i32, { "r", "r" } },
1865 { INDEX_op_add_i32, { "r", "r", "ri" } },
1866 { INDEX_op_mul_i32, { "r", "r", "ri" } },
1867 { INDEX_op_div_i32, { "r", "r", "r" } },
1868 { INDEX_op_divu_i32, { "r", "r", "r" } },
1869 { INDEX_op_mulu2_i32, { "r", "r", "r", "r" } },
1870 { INDEX_op_sub_i32, { "r", "r", "ri" } },
1871 { INDEX_op_and_i32, { "r", "r", "ri" } },
1872 { INDEX_op_or_i32, { "r", "r", "ri" } },
1873 { INDEX_op_xor_i32, { "r", "r", "ri" } },
1875 { INDEX_op_shl_i32, { "r", "r", "ri" } },
1876 { INDEX_op_shr_i32, { "r", "r", "ri" } },
1877 { INDEX_op_sar_i32, { "r", "r", "ri" } },
1879 { INDEX_op_rotl_i32, { "r", "r", "ri" } },
1880 { INDEX_op_rotr_i32, { "r", "r", "ri" } },
1882 { INDEX_op_brcond_i32, { "r", "ri" } },
1884 { INDEX_op_add2_i32, { "r", "r", "r", "r", "r", "r" } },
1885 { INDEX_op_sub2_i32, { "r", "r", "r", "r", "r", "r" } },
1886 { INDEX_op_brcond2_i32, { "r", "r", "r", "r" } },
1888 { INDEX_op_neg_i32, { "r", "r" } },
1889 { INDEX_op_not_i32, { "r", "r" } },
1891 { INDEX_op_andc_i32, { "r", "r", "r" } },
1892 { INDEX_op_orc_i32, { "r", "r", "r" } },
1893 { INDEX_op_eqv_i32, { "r", "r", "r" } },
1894 { INDEX_op_nand_i32, { "r", "r", "r" } },
1895 { INDEX_op_nor_i32, { "r", "r", "r" } },
1897 { INDEX_op_setcond_i32, { "r", "r", "ri" } },
1898 { INDEX_op_setcond2_i32, { "r", "r", "r", "ri", "ri" } },
1900 { INDEX_op_bswap16_i32, { "r", "r" } },
1901 { INDEX_op_bswap32_i32, { "r", "r" } },
1903 #if TARGET_LONG_BITS == 32
1904 { INDEX_op_qemu_ld_i32, { "r", "L" } },
1905 { INDEX_op_qemu_ld_i64, { "L", "L", "L" } },
1906 { INDEX_op_qemu_st_i32, { "K", "K" } },
1907 { INDEX_op_qemu_st_i64, { "M", "M", "M" } },
1908 #else
1909 { INDEX_op_qemu_ld_i32, { "r", "L", "L" } },
1910 { INDEX_op_qemu_ld_i64, { "L", "L", "L", "L" } },
1911 { INDEX_op_qemu_st_i32, { "K", "K", "K" } },
1912 { INDEX_op_qemu_st_i64, { "M", "M", "M", "M" } },
1913 #endif
1915 { INDEX_op_ext8s_i32, { "r", "r" } },
1916 { INDEX_op_ext8u_i32, { "r", "r" } },
1917 { INDEX_op_ext16s_i32, { "r", "r" } },
1918 { INDEX_op_ext16u_i32, { "r", "r" } },
1920 { INDEX_op_deposit_i32, { "r", "0", "r" } },
1921 { INDEX_op_movcond_i32, { "r", "r", "ri", "r", "r" } },
1923 { -1 },
1926 static void tcg_target_init(TCGContext *s)
1928 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffffffff);
1929 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1930 (1 << TCG_REG_R0) |
1931 #ifdef TCG_TARGET_CALL_DARWIN
1932 (1 << TCG_REG_R2) |
1933 #endif
1934 (1 << TCG_REG_R3) |
1935 (1 << TCG_REG_R4) |
1936 (1 << TCG_REG_R5) |
1937 (1 << TCG_REG_R6) |
1938 (1 << TCG_REG_R7) |
1939 (1 << TCG_REG_R8) |
1940 (1 << TCG_REG_R9) |
1941 (1 << TCG_REG_R10) |
1942 (1 << TCG_REG_R11) |
1943 (1 << TCG_REG_R12)
1946 tcg_regset_clear(s->reserved_regs);
1947 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R0);
1948 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R1);
1949 #ifndef TCG_TARGET_CALL_DARWIN
1950 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R2);
1951 #endif
1952 #ifdef _CALL_SYSV
1953 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R13);
1954 #endif
1956 tcg_add_target_add_op_defs(ppc_op_defs);