ETRAX-FS: Correct ethernet PHY diagnostics register reads.
[qemu/qemu-JZ.git] / target-ppc / translate.c
blob275f7807c9c271120b37ab7b0d7392ab4aa25faa
1 /*
2 * PowerPC emulation for qemu: main translation routines.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
26 #include "cpu.h"
27 #include "exec-all.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31 #include "qemu-common.h"
33 #define CPU_SINGLE_STEP 0x1
34 #define CPU_BRANCH_STEP 0x2
35 #define GDBSTUB_SINGLE_STEP 0x4
37 /* Include definitions for instructions classes and implementations flags */
38 //#define DO_SINGLE_STEP
39 //#define PPC_DEBUG_DISAS
40 //#define DEBUG_MEMORY_ACCESSES
41 //#define DO_PPC_STATISTICS
42 //#define OPTIMIZE_FPRF_UPDATE
44 /*****************************************************************************/
45 /* Code translation helpers */
47 /* global register indexes */
48 static TCGv cpu_env;
49 static char cpu_reg_names[10*3 + 22*4 /* GPR */
50 #if !defined(TARGET_PPC64)
51 + 10*4 + 22*5 /* SPE GPRh */
52 #endif
53 + 10*4 + 22*5 /* FPR */
54 + 2*(10*6 + 22*7) /* AVRh, AVRl */
55 + 8*5 /* CRF */];
56 static TCGv cpu_gpr[32];
57 #if !defined(TARGET_PPC64)
58 static TCGv cpu_gprh[32];
59 #endif
60 static TCGv cpu_fpr[32];
61 static TCGv cpu_avrh[32], cpu_avrl[32];
62 static TCGv cpu_crf[8];
63 static TCGv cpu_nip;
64 static TCGv cpu_ctr;
65 static TCGv cpu_lr;
67 /* dyngen register indexes */
68 static TCGv cpu_T[3];
69 #if defined(TARGET_PPC64)
70 #define cpu_T64 cpu_T
71 #else
72 static TCGv cpu_T64[3];
73 #endif
74 static TCGv cpu_FT[3];
75 static TCGv cpu_AVRh[3], cpu_AVRl[3];
77 #include "gen-icount.h"
79 void ppc_translate_init(void)
81 int i;
82 char* p;
83 static int done_init = 0;
85 if (done_init)
86 return;
88 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
89 #if TARGET_LONG_BITS > HOST_LONG_BITS
90 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
91 TCG_AREG0, offsetof(CPUState, t0), "T0");
92 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
93 TCG_AREG0, offsetof(CPUState, t1), "T1");
94 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
95 TCG_AREG0, offsetof(CPUState, t2), "T2");
96 #else
97 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
98 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
99 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "T2");
100 #endif
101 #if !defined(TARGET_PPC64)
102 cpu_T64[0] = tcg_global_mem_new(TCG_TYPE_I64,
103 TCG_AREG0, offsetof(CPUState, t0_64),
104 "T0_64");
105 cpu_T64[1] = tcg_global_mem_new(TCG_TYPE_I64,
106 TCG_AREG0, offsetof(CPUState, t1_64),
107 "T1_64");
108 cpu_T64[2] = tcg_global_mem_new(TCG_TYPE_I64,
109 TCG_AREG0, offsetof(CPUState, t2_64),
110 "T2_64");
111 #endif
113 cpu_FT[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
114 offsetof(CPUState, ft0), "FT0");
115 cpu_FT[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
116 offsetof(CPUState, ft1), "FT1");
117 cpu_FT[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
118 offsetof(CPUState, ft2), "FT2");
120 cpu_AVRh[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
121 offsetof(CPUState, avr0.u64[0]), "AVR0H");
122 cpu_AVRl[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
123 offsetof(CPUState, avr0.u64[1]), "AVR0L");
124 cpu_AVRh[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
125 offsetof(CPUState, avr1.u64[0]), "AVR1H");
126 cpu_AVRl[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
127 offsetof(CPUState, avr1.u64[1]), "AVR1L");
128 cpu_AVRh[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
129 offsetof(CPUState, avr2.u64[0]), "AVR2H");
130 cpu_AVRl[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
131 offsetof(CPUState, avr2.u64[1]), "AVR2L");
133 p = cpu_reg_names;
135 for (i = 0; i < 8; i++) {
136 sprintf(p, "crf%d", i);
137 cpu_crf[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
138 offsetof(CPUState, crf[i]), p);
139 p += 5;
142 for (i = 0; i < 32; i++) {
143 sprintf(p, "r%d", i);
144 cpu_gpr[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
145 offsetof(CPUState, gpr[i]), p);
146 p += (i < 10) ? 3 : 4;
147 #if !defined(TARGET_PPC64)
148 sprintf(p, "r%dH", i);
149 cpu_gprh[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
150 offsetof(CPUState, gprh[i]), p);
151 p += (i < 10) ? 4 : 5;
152 #endif
154 sprintf(p, "fp%d", i);
155 cpu_fpr[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
156 offsetof(CPUState, fpr[i]), p);
157 p += (i < 10) ? 4 : 5;
159 sprintf(p, "avr%dH", i);
160 cpu_avrh[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
161 offsetof(CPUState, avr[i].u64[0]), p);
162 p += (i < 10) ? 6 : 7;
164 sprintf(p, "avr%dL", i);
165 cpu_avrl[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
166 offsetof(CPUState, avr[i].u64[1]), p);
167 p += (i < 10) ? 6 : 7;
170 cpu_nip = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
171 offsetof(CPUState, nip), "nip");
173 cpu_ctr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
174 offsetof(CPUState, ctr), "ctr");
176 cpu_lr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
177 offsetof(CPUState, lr), "lr");
179 /* register helpers */
180 #undef DEF_HELPER
181 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
182 #include "helper.h"
184 done_init = 1;
187 #if defined(OPTIMIZE_FPRF_UPDATE)
188 static uint16_t *gen_fprf_buf[OPC_BUF_SIZE];
189 static uint16_t **gen_fprf_ptr;
190 #endif
192 /* internal defines */
193 typedef struct DisasContext {
194 struct TranslationBlock *tb;
195 target_ulong nip;
196 uint32_t opcode;
197 uint32_t exception;
198 /* Routine used to access memory */
199 int mem_idx;
200 /* Translation flags */
201 #if !defined(CONFIG_USER_ONLY)
202 int supervisor;
203 #endif
204 #if defined(TARGET_PPC64)
205 int sf_mode;
206 #endif
207 int fpu_enabled;
208 int altivec_enabled;
209 int spe_enabled;
210 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
211 int singlestep_enabled;
212 int dcache_line_size;
213 } DisasContext;
215 struct opc_handler_t {
216 /* invalid bits */
217 uint32_t inval;
218 /* instruction type */
219 uint64_t type;
220 /* handler */
221 void (*handler)(DisasContext *ctx);
222 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
223 const char *oname;
224 #endif
225 #if defined(DO_PPC_STATISTICS)
226 uint64_t count;
227 #endif
230 static always_inline void gen_set_Rc0 (DisasContext *ctx)
232 #if defined(TARGET_PPC64)
233 if (ctx->sf_mode)
234 gen_op_cmpi_64(0);
235 else
236 #endif
237 gen_op_cmpi(0);
238 gen_op_set_Rc0();
241 static always_inline void gen_reset_fpstatus (void)
243 #ifdef CONFIG_SOFTFLOAT
244 gen_op_reset_fpstatus();
245 #endif
248 static always_inline void gen_compute_fprf (int set_fprf, int set_rc)
250 if (set_fprf != 0) {
251 /* This case might be optimized later */
252 #if defined(OPTIMIZE_FPRF_UPDATE)
253 *gen_fprf_ptr++ = gen_opc_ptr;
254 #endif
255 gen_op_compute_fprf(1);
256 if (unlikely(set_rc))
257 tcg_gen_andi_i32(cpu_crf[1], cpu_T[0], 0xf);
258 gen_op_float_check_status();
259 } else if (unlikely(set_rc)) {
260 /* We always need to compute fpcc */
261 gen_op_compute_fprf(0);
262 tcg_gen_andi_i32(cpu_crf[1], cpu_T[0], 0xf);
263 if (set_fprf)
264 gen_op_float_check_status();
268 static always_inline void gen_optimize_fprf (void)
270 #if defined(OPTIMIZE_FPRF_UPDATE)
271 uint16_t **ptr;
273 for (ptr = gen_fprf_buf; ptr != (gen_fprf_ptr - 1); ptr++)
274 *ptr = INDEX_op_nop1;
275 gen_fprf_ptr = gen_fprf_buf;
276 #endif
279 static always_inline void gen_update_nip (DisasContext *ctx, target_ulong nip)
281 #if defined(TARGET_PPC64)
282 if (ctx->sf_mode)
283 tcg_gen_movi_tl(cpu_nip, nip);
284 else
285 #endif
286 tcg_gen_movi_tl(cpu_nip, (uint32_t)nip);
289 #define GEN_EXCP(ctx, excp, error) \
290 do { \
291 if ((ctx)->exception == POWERPC_EXCP_NONE) { \
292 gen_update_nip(ctx, (ctx)->nip); \
294 gen_op_raise_exception_err((excp), (error)); \
295 ctx->exception = (excp); \
296 } while (0)
298 #define GEN_EXCP_INVAL(ctx) \
299 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
300 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_INVAL)
302 #define GEN_EXCP_PRIVOPC(ctx) \
303 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
304 POWERPC_EXCP_INVAL | POWERPC_EXCP_PRIV_OPC)
306 #define GEN_EXCP_PRIVREG(ctx) \
307 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
308 POWERPC_EXCP_INVAL | POWERPC_EXCP_PRIV_REG)
310 #define GEN_EXCP_NO_FP(ctx) \
311 GEN_EXCP(ctx, POWERPC_EXCP_FPU, 0)
313 #define GEN_EXCP_NO_AP(ctx) \
314 GEN_EXCP(ctx, POWERPC_EXCP_APU, 0)
316 #define GEN_EXCP_NO_VR(ctx) \
317 GEN_EXCP(ctx, POWERPC_EXCP_VPU, 0)
319 /* Stop translation */
320 static always_inline void GEN_STOP (DisasContext *ctx)
322 gen_update_nip(ctx, ctx->nip);
323 ctx->exception = POWERPC_EXCP_STOP;
326 /* No need to update nip here, as execution flow will change */
327 static always_inline void GEN_SYNC (DisasContext *ctx)
329 ctx->exception = POWERPC_EXCP_SYNC;
332 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
333 static void gen_##name (DisasContext *ctx); \
334 GEN_OPCODE(name, opc1, opc2, opc3, inval, type); \
335 static void gen_##name (DisasContext *ctx)
337 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
338 static void gen_##name (DisasContext *ctx); \
339 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type); \
340 static void gen_##name (DisasContext *ctx)
342 typedef struct opcode_t {
343 unsigned char opc1, opc2, opc3;
344 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
345 unsigned char pad[5];
346 #else
347 unsigned char pad[1];
348 #endif
349 opc_handler_t handler;
350 const char *oname;
351 } opcode_t;
353 /*****************************************************************************/
354 /*** Instruction decoding ***/
355 #define EXTRACT_HELPER(name, shift, nb) \
356 static always_inline uint32_t name (uint32_t opcode) \
358 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
361 #define EXTRACT_SHELPER(name, shift, nb) \
362 static always_inline int32_t name (uint32_t opcode) \
364 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
367 /* Opcode part 1 */
368 EXTRACT_HELPER(opc1, 26, 6);
369 /* Opcode part 2 */
370 EXTRACT_HELPER(opc2, 1, 5);
371 /* Opcode part 3 */
372 EXTRACT_HELPER(opc3, 6, 5);
373 /* Update Cr0 flags */
374 EXTRACT_HELPER(Rc, 0, 1);
375 /* Destination */
376 EXTRACT_HELPER(rD, 21, 5);
377 /* Source */
378 EXTRACT_HELPER(rS, 21, 5);
379 /* First operand */
380 EXTRACT_HELPER(rA, 16, 5);
381 /* Second operand */
382 EXTRACT_HELPER(rB, 11, 5);
383 /* Third operand */
384 EXTRACT_HELPER(rC, 6, 5);
385 /*** Get CRn ***/
386 EXTRACT_HELPER(crfD, 23, 3);
387 EXTRACT_HELPER(crfS, 18, 3);
388 EXTRACT_HELPER(crbD, 21, 5);
389 EXTRACT_HELPER(crbA, 16, 5);
390 EXTRACT_HELPER(crbB, 11, 5);
391 /* SPR / TBL */
392 EXTRACT_HELPER(_SPR, 11, 10);
393 static always_inline uint32_t SPR (uint32_t opcode)
395 uint32_t sprn = _SPR(opcode);
397 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
399 /*** Get constants ***/
400 EXTRACT_HELPER(IMM, 12, 8);
401 /* 16 bits signed immediate value */
402 EXTRACT_SHELPER(SIMM, 0, 16);
403 /* 16 bits unsigned immediate value */
404 EXTRACT_HELPER(UIMM, 0, 16);
405 /* Bit count */
406 EXTRACT_HELPER(NB, 11, 5);
407 /* Shift count */
408 EXTRACT_HELPER(SH, 11, 5);
409 /* Mask start */
410 EXTRACT_HELPER(MB, 6, 5);
411 /* Mask end */
412 EXTRACT_HELPER(ME, 1, 5);
413 /* Trap operand */
414 EXTRACT_HELPER(TO, 21, 5);
416 EXTRACT_HELPER(CRM, 12, 8);
417 EXTRACT_HELPER(FM, 17, 8);
418 EXTRACT_HELPER(SR, 16, 4);
419 EXTRACT_HELPER(FPIMM, 12, 4);
421 /*** Jump target decoding ***/
422 /* Displacement */
423 EXTRACT_SHELPER(d, 0, 16);
424 /* Immediate address */
425 static always_inline target_ulong LI (uint32_t opcode)
427 return (opcode >> 0) & 0x03FFFFFC;
430 static always_inline uint32_t BD (uint32_t opcode)
432 return (opcode >> 0) & 0xFFFC;
435 EXTRACT_HELPER(BO, 21, 5);
436 EXTRACT_HELPER(BI, 16, 5);
437 /* Absolute/relative address */
438 EXTRACT_HELPER(AA, 1, 1);
439 /* Link */
440 EXTRACT_HELPER(LK, 0, 1);
442 /* Create a mask between <start> and <end> bits */
443 static always_inline target_ulong MASK (uint32_t start, uint32_t end)
445 target_ulong ret;
447 #if defined(TARGET_PPC64)
448 if (likely(start == 0)) {
449 ret = UINT64_MAX << (63 - end);
450 } else if (likely(end == 63)) {
451 ret = UINT64_MAX >> start;
453 #else
454 if (likely(start == 0)) {
455 ret = UINT32_MAX << (31 - end);
456 } else if (likely(end == 31)) {
457 ret = UINT32_MAX >> start;
459 #endif
460 else {
461 ret = (((target_ulong)(-1ULL)) >> (start)) ^
462 (((target_ulong)(-1ULL) >> (end)) >> 1);
463 if (unlikely(start > end))
464 return ~ret;
467 return ret;
470 /*****************************************************************************/
471 /* PowerPC Instructions types definitions */
472 enum {
473 PPC_NONE = 0x0000000000000000ULL,
474 /* PowerPC base instructions set */
475 PPC_INSNS_BASE = 0x0000000000000001ULL,
476 /* integer operations instructions */
477 #define PPC_INTEGER PPC_INSNS_BASE
478 /* flow control instructions */
479 #define PPC_FLOW PPC_INSNS_BASE
480 /* virtual memory instructions */
481 #define PPC_MEM PPC_INSNS_BASE
482 /* ld/st with reservation instructions */
483 #define PPC_RES PPC_INSNS_BASE
484 /* spr/msr access instructions */
485 #define PPC_MISC PPC_INSNS_BASE
486 /* Deprecated instruction sets */
487 /* Original POWER instruction set */
488 PPC_POWER = 0x0000000000000002ULL,
489 /* POWER2 instruction set extension */
490 PPC_POWER2 = 0x0000000000000004ULL,
491 /* Power RTC support */
492 PPC_POWER_RTC = 0x0000000000000008ULL,
493 /* Power-to-PowerPC bridge (601) */
494 PPC_POWER_BR = 0x0000000000000010ULL,
495 /* 64 bits PowerPC instruction set */
496 PPC_64B = 0x0000000000000020ULL,
497 /* New 64 bits extensions (PowerPC 2.0x) */
498 PPC_64BX = 0x0000000000000040ULL,
499 /* 64 bits hypervisor extensions */
500 PPC_64H = 0x0000000000000080ULL,
501 /* New wait instruction (PowerPC 2.0x) */
502 PPC_WAIT = 0x0000000000000100ULL,
503 /* Time base mftb instruction */
504 PPC_MFTB = 0x0000000000000200ULL,
506 /* Fixed-point unit extensions */
507 /* PowerPC 602 specific */
508 PPC_602_SPEC = 0x0000000000000400ULL,
509 /* isel instruction */
510 PPC_ISEL = 0x0000000000000800ULL,
511 /* popcntb instruction */
512 PPC_POPCNTB = 0x0000000000001000ULL,
513 /* string load / store */
514 PPC_STRING = 0x0000000000002000ULL,
516 /* Floating-point unit extensions */
517 /* Optional floating point instructions */
518 PPC_FLOAT = 0x0000000000010000ULL,
519 /* New floating-point extensions (PowerPC 2.0x) */
520 PPC_FLOAT_EXT = 0x0000000000020000ULL,
521 PPC_FLOAT_FSQRT = 0x0000000000040000ULL,
522 PPC_FLOAT_FRES = 0x0000000000080000ULL,
523 PPC_FLOAT_FRSQRTE = 0x0000000000100000ULL,
524 PPC_FLOAT_FRSQRTES = 0x0000000000200000ULL,
525 PPC_FLOAT_FSEL = 0x0000000000400000ULL,
526 PPC_FLOAT_STFIWX = 0x0000000000800000ULL,
528 /* Vector/SIMD extensions */
529 /* Altivec support */
530 PPC_ALTIVEC = 0x0000000001000000ULL,
531 /* PowerPC 2.03 SPE extension */
532 PPC_SPE = 0x0000000002000000ULL,
533 /* PowerPC 2.03 SPE floating-point extension */
534 PPC_SPEFPU = 0x0000000004000000ULL,
536 /* Optional memory control instructions */
537 PPC_MEM_TLBIA = 0x0000000010000000ULL,
538 PPC_MEM_TLBIE = 0x0000000020000000ULL,
539 PPC_MEM_TLBSYNC = 0x0000000040000000ULL,
540 /* sync instruction */
541 PPC_MEM_SYNC = 0x0000000080000000ULL,
542 /* eieio instruction */
543 PPC_MEM_EIEIO = 0x0000000100000000ULL,
545 /* Cache control instructions */
546 PPC_CACHE = 0x0000000200000000ULL,
547 /* icbi instruction */
548 PPC_CACHE_ICBI = 0x0000000400000000ULL,
549 /* dcbz instruction with fixed cache line size */
550 PPC_CACHE_DCBZ = 0x0000000800000000ULL,
551 /* dcbz instruction with tunable cache line size */
552 PPC_CACHE_DCBZT = 0x0000001000000000ULL,
553 /* dcba instruction */
554 PPC_CACHE_DCBA = 0x0000002000000000ULL,
555 /* Freescale cache locking instructions */
556 PPC_CACHE_LOCK = 0x0000004000000000ULL,
558 /* MMU related extensions */
559 /* external control instructions */
560 PPC_EXTERN = 0x0000010000000000ULL,
561 /* segment register access instructions */
562 PPC_SEGMENT = 0x0000020000000000ULL,
563 /* PowerPC 6xx TLB management instructions */
564 PPC_6xx_TLB = 0x0000040000000000ULL,
565 /* PowerPC 74xx TLB management instructions */
566 PPC_74xx_TLB = 0x0000080000000000ULL,
567 /* PowerPC 40x TLB management instructions */
568 PPC_40x_TLB = 0x0000100000000000ULL,
569 /* segment register access instructions for PowerPC 64 "bridge" */
570 PPC_SEGMENT_64B = 0x0000200000000000ULL,
571 /* SLB management */
572 PPC_SLBI = 0x0000400000000000ULL,
574 /* Embedded PowerPC dedicated instructions */
575 PPC_WRTEE = 0x0001000000000000ULL,
576 /* PowerPC 40x exception model */
577 PPC_40x_EXCP = 0x0002000000000000ULL,
578 /* PowerPC 405 Mac instructions */
579 PPC_405_MAC = 0x0004000000000000ULL,
580 /* PowerPC 440 specific instructions */
581 PPC_440_SPEC = 0x0008000000000000ULL,
582 /* BookE (embedded) PowerPC specification */
583 PPC_BOOKE = 0x0010000000000000ULL,
584 /* mfapidi instruction */
585 PPC_MFAPIDI = 0x0020000000000000ULL,
586 /* tlbiva instruction */
587 PPC_TLBIVA = 0x0040000000000000ULL,
588 /* tlbivax instruction */
589 PPC_TLBIVAX = 0x0080000000000000ULL,
590 /* PowerPC 4xx dedicated instructions */
591 PPC_4xx_COMMON = 0x0100000000000000ULL,
592 /* PowerPC 40x ibct instructions */
593 PPC_40x_ICBT = 0x0200000000000000ULL,
594 /* rfmci is not implemented in all BookE PowerPC */
595 PPC_RFMCI = 0x0400000000000000ULL,
596 /* rfdi instruction */
597 PPC_RFDI = 0x0800000000000000ULL,
598 /* DCR accesses */
599 PPC_DCR = 0x1000000000000000ULL,
600 /* DCR extended accesse */
601 PPC_DCRX = 0x2000000000000000ULL,
602 /* user-mode DCR access, implemented in PowerPC 460 */
603 PPC_DCRUX = 0x4000000000000000ULL,
606 /*****************************************************************************/
607 /* PowerPC instructions table */
608 #if HOST_LONG_BITS == 64
609 #define OPC_ALIGN 8
610 #else
611 #define OPC_ALIGN 4
612 #endif
613 #if defined(__APPLE__)
614 #define OPCODES_SECTION \
615 __attribute__ ((section("__TEXT,__opcodes"), unused, aligned (OPC_ALIGN) ))
616 #else
617 #define OPCODES_SECTION \
618 __attribute__ ((section(".opcodes"), unused, aligned (OPC_ALIGN) ))
619 #endif
621 #if defined(DO_PPC_STATISTICS)
622 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
623 OPCODES_SECTION opcode_t opc_##name = { \
624 .opc1 = op1, \
625 .opc2 = op2, \
626 .opc3 = op3, \
627 .pad = { 0, }, \
628 .handler = { \
629 .inval = invl, \
630 .type = _typ, \
631 .handler = &gen_##name, \
632 .oname = stringify(name), \
633 }, \
634 .oname = stringify(name), \
636 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
637 OPCODES_SECTION opcode_t opc_##name = { \
638 .opc1 = op1, \
639 .opc2 = op2, \
640 .opc3 = op3, \
641 .pad = { 0, }, \
642 .handler = { \
643 .inval = invl, \
644 .type = _typ, \
645 .handler = &gen_##name, \
646 .oname = onam, \
647 }, \
648 .oname = onam, \
650 #else
651 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
652 OPCODES_SECTION opcode_t opc_##name = { \
653 .opc1 = op1, \
654 .opc2 = op2, \
655 .opc3 = op3, \
656 .pad = { 0, }, \
657 .handler = { \
658 .inval = invl, \
659 .type = _typ, \
660 .handler = &gen_##name, \
661 }, \
662 .oname = stringify(name), \
664 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
665 OPCODES_SECTION opcode_t opc_##name = { \
666 .opc1 = op1, \
667 .opc2 = op2, \
668 .opc3 = op3, \
669 .pad = { 0, }, \
670 .handler = { \
671 .inval = invl, \
672 .type = _typ, \
673 .handler = &gen_##name, \
674 }, \
675 .oname = onam, \
677 #endif
679 #define GEN_OPCODE_MARK(name) \
680 OPCODES_SECTION opcode_t opc_##name = { \
681 .opc1 = 0xFF, \
682 .opc2 = 0xFF, \
683 .opc3 = 0xFF, \
684 .pad = { 0, }, \
685 .handler = { \
686 .inval = 0x00000000, \
687 .type = 0x00, \
688 .handler = NULL, \
689 }, \
690 .oname = stringify(name), \
693 /* Start opcode list */
694 GEN_OPCODE_MARK(start);
696 /* Invalid instruction */
697 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE)
699 GEN_EXCP_INVAL(ctx);
702 static opc_handler_t invalid_handler = {
703 .inval = 0xFFFFFFFF,
704 .type = PPC_NONE,
705 .handler = gen_invalid,
708 /*** Integer arithmetic ***/
709 #define __GEN_INT_ARITH2(name, opc1, opc2, opc3, inval, type) \
710 GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
712 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
713 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
714 gen_op_##name(); \
715 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
716 if (unlikely(Rc(ctx->opcode) != 0)) \
717 gen_set_Rc0(ctx); \
720 #define __GEN_INT_ARITH2_O(name, opc1, opc2, opc3, inval, type) \
721 GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
723 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
724 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
725 gen_op_##name(); \
726 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
727 if (unlikely(Rc(ctx->opcode) != 0)) \
728 gen_set_Rc0(ctx); \
731 #define __GEN_INT_ARITH1(name, opc1, opc2, opc3, type) \
732 GEN_HANDLER(name, opc1, opc2, opc3, 0x0000F800, type) \
734 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
735 gen_op_##name(); \
736 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
737 if (unlikely(Rc(ctx->opcode) != 0)) \
738 gen_set_Rc0(ctx); \
740 #define __GEN_INT_ARITH1_O(name, opc1, opc2, opc3, type) \
741 GEN_HANDLER(name, opc1, opc2, opc3, 0x0000F800, type) \
743 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
744 gen_op_##name(); \
745 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
746 if (unlikely(Rc(ctx->opcode) != 0)) \
747 gen_set_Rc0(ctx); \
750 /* Two operands arithmetic functions */
751 #define GEN_INT_ARITH2(name, opc1, opc2, opc3, type) \
752 __GEN_INT_ARITH2(name, opc1, opc2, opc3, 0x00000000, type) \
753 __GEN_INT_ARITH2_O(name##o, opc1, opc2, opc3 | 0x10, 0x00000000, type)
755 /* Two operands arithmetic functions with no overflow allowed */
756 #define GEN_INT_ARITHN(name, opc1, opc2, opc3, type) \
757 __GEN_INT_ARITH2(name, opc1, opc2, opc3, 0x00000400, type)
759 /* One operand arithmetic functions */
760 #define GEN_INT_ARITH1(name, opc1, opc2, opc3, type) \
761 __GEN_INT_ARITH1(name, opc1, opc2, opc3, type) \
762 __GEN_INT_ARITH1_O(name##o, opc1, opc2, opc3 | 0x10, type)
764 #if defined(TARGET_PPC64)
765 #define __GEN_INT_ARITH2_64(name, opc1, opc2, opc3, inval, type) \
766 GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
768 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
769 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
770 if (ctx->sf_mode) \
771 gen_op_##name##_64(); \
772 else \
773 gen_op_##name(); \
774 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
775 if (unlikely(Rc(ctx->opcode) != 0)) \
776 gen_set_Rc0(ctx); \
779 #define __GEN_INT_ARITH2_O_64(name, opc1, opc2, opc3, inval, type) \
780 GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
782 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
783 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
784 if (ctx->sf_mode) \
785 gen_op_##name##_64(); \
786 else \
787 gen_op_##name(); \
788 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
789 if (unlikely(Rc(ctx->opcode) != 0)) \
790 gen_set_Rc0(ctx); \
793 #define __GEN_INT_ARITH1_64(name, opc1, opc2, opc3, type) \
794 GEN_HANDLER(name, opc1, opc2, opc3, 0x0000F800, type) \
796 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
797 if (ctx->sf_mode) \
798 gen_op_##name##_64(); \
799 else \
800 gen_op_##name(); \
801 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
802 if (unlikely(Rc(ctx->opcode) != 0)) \
803 gen_set_Rc0(ctx); \
805 #define __GEN_INT_ARITH1_O_64(name, opc1, opc2, opc3, type) \
806 GEN_HANDLER(name, opc1, opc2, opc3, 0x0000F800, type) \
808 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
809 if (ctx->sf_mode) \
810 gen_op_##name##_64(); \
811 else \
812 gen_op_##name(); \
813 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
814 if (unlikely(Rc(ctx->opcode) != 0)) \
815 gen_set_Rc0(ctx); \
818 /* Two operands arithmetic functions */
819 #define GEN_INT_ARITH2_64(name, opc1, opc2, opc3, type) \
820 __GEN_INT_ARITH2_64(name, opc1, opc2, opc3, 0x00000000, type) \
821 __GEN_INT_ARITH2_O_64(name##o, opc1, opc2, opc3 | 0x10, 0x00000000, type)
823 /* Two operands arithmetic functions with no overflow allowed */
824 #define GEN_INT_ARITHN_64(name, opc1, opc2, opc3, type) \
825 __GEN_INT_ARITH2_64(name, opc1, opc2, opc3, 0x00000400, type)
827 /* One operand arithmetic functions */
828 #define GEN_INT_ARITH1_64(name, opc1, opc2, opc3, type) \
829 __GEN_INT_ARITH1_64(name, opc1, opc2, opc3, type) \
830 __GEN_INT_ARITH1_O_64(name##o, opc1, opc2, opc3 | 0x10, type)
831 #else
832 #define GEN_INT_ARITH2_64 GEN_INT_ARITH2
833 #define GEN_INT_ARITHN_64 GEN_INT_ARITHN
834 #define GEN_INT_ARITH1_64 GEN_INT_ARITH1
835 #endif
837 /* add add. addo addo. */
838 static always_inline void gen_op_add (void)
840 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
842 static always_inline void gen_op_addo (void)
844 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
845 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
846 gen_op_check_addo();
848 #if defined(TARGET_PPC64)
849 #define gen_op_add_64 gen_op_add
850 static always_inline void gen_op_addo_64 (void)
852 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
853 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
854 gen_op_check_addo_64();
856 #endif
857 GEN_INT_ARITH2_64 (add, 0x1F, 0x0A, 0x08, PPC_INTEGER);
858 /* addc addc. addco addco. */
859 static always_inline void gen_op_addc (void)
861 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
862 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
863 gen_op_check_addc();
865 static always_inline void gen_op_addco (void)
867 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
868 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
869 gen_op_check_addc();
870 gen_op_check_addo();
872 #if defined(TARGET_PPC64)
873 static always_inline void gen_op_addc_64 (void)
875 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
876 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
877 gen_op_check_addc_64();
879 static always_inline void gen_op_addco_64 (void)
881 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
882 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
883 gen_op_check_addc_64();
884 gen_op_check_addo_64();
886 #endif
887 GEN_INT_ARITH2_64 (addc, 0x1F, 0x0A, 0x00, PPC_INTEGER);
888 /* adde adde. addeo addeo. */
889 static always_inline void gen_op_addeo (void)
891 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
892 gen_op_adde();
893 gen_op_check_addo();
895 #if defined(TARGET_PPC64)
896 static always_inline void gen_op_addeo_64 (void)
898 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
899 gen_op_adde_64();
900 gen_op_check_addo_64();
902 #endif
903 GEN_INT_ARITH2_64 (adde, 0x1F, 0x0A, 0x04, PPC_INTEGER);
904 /* addme addme. addmeo addmeo. */
905 static always_inline void gen_op_addme (void)
907 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
908 gen_op_add_me();
910 #if defined(TARGET_PPC64)
911 static always_inline void gen_op_addme_64 (void)
913 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
914 gen_op_add_me_64();
916 #endif
917 GEN_INT_ARITH1_64 (addme, 0x1F, 0x0A, 0x07, PPC_INTEGER);
918 /* addze addze. addzeo addzeo. */
919 static always_inline void gen_op_addze (void)
921 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
922 gen_op_add_ze();
923 gen_op_check_addc();
925 static always_inline void gen_op_addzeo (void)
927 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
928 gen_op_add_ze();
929 gen_op_check_addc();
930 gen_op_check_addo();
932 #if defined(TARGET_PPC64)
933 static always_inline void gen_op_addze_64 (void)
935 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
936 gen_op_add_ze();
937 gen_op_check_addc_64();
939 static always_inline void gen_op_addzeo_64 (void)
941 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
942 gen_op_add_ze();
943 gen_op_check_addc_64();
944 gen_op_check_addo_64();
946 #endif
947 GEN_INT_ARITH1_64 (addze, 0x1F, 0x0A, 0x06, PPC_INTEGER);
948 /* divw divw. divwo divwo. */
949 GEN_INT_ARITH2 (divw, 0x1F, 0x0B, 0x0F, PPC_INTEGER);
950 /* divwu divwu. divwuo divwuo. */
951 GEN_INT_ARITH2 (divwu, 0x1F, 0x0B, 0x0E, PPC_INTEGER);
952 /* mulhw mulhw. */
953 GEN_INT_ARITHN (mulhw, 0x1F, 0x0B, 0x02, PPC_INTEGER);
954 /* mulhwu mulhwu. */
955 GEN_INT_ARITHN (mulhwu, 0x1F, 0x0B, 0x00, PPC_INTEGER);
956 /* mullw mullw. mullwo mullwo. */
957 GEN_INT_ARITH2 (mullw, 0x1F, 0x0B, 0x07, PPC_INTEGER);
958 /* neg neg. nego nego. */
959 GEN_INT_ARITH1_64 (neg, 0x1F, 0x08, 0x03, PPC_INTEGER);
960 /* subf subf. subfo subfo. */
961 static always_inline void gen_op_subf (void)
963 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
965 static always_inline void gen_op_subfo (void)
967 tcg_gen_not_tl(cpu_T[2], cpu_T[0]);
968 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
969 gen_op_check_addo();
971 #if defined(TARGET_PPC64)
972 #define gen_op_subf_64 gen_op_subf
973 static always_inline void gen_op_subfo_64 (void)
975 tcg_gen_not_i64(cpu_T[2], cpu_T[0]);
976 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
977 gen_op_check_addo_64();
979 #endif
980 GEN_INT_ARITH2_64 (subf, 0x1F, 0x08, 0x01, PPC_INTEGER);
981 /* subfc subfc. subfco subfco. */
982 static always_inline void gen_op_subfc (void)
984 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
985 gen_op_check_subfc();
987 static always_inline void gen_op_subfco (void)
989 tcg_gen_not_tl(cpu_T[2], cpu_T[0]);
990 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
991 gen_op_check_subfc();
992 gen_op_check_addo();
994 #if defined(TARGET_PPC64)
995 static always_inline void gen_op_subfc_64 (void)
997 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
998 gen_op_check_subfc_64();
1000 static always_inline void gen_op_subfco_64 (void)
1002 tcg_gen_not_i64(cpu_T[2], cpu_T[0]);
1003 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
1004 gen_op_check_subfc_64();
1005 gen_op_check_addo_64();
1007 #endif
1008 GEN_INT_ARITH2_64 (subfc, 0x1F, 0x08, 0x00, PPC_INTEGER);
1009 /* subfe subfe. subfeo subfeo. */
1010 static always_inline void gen_op_subfeo (void)
1012 tcg_gen_not_tl(cpu_T[2], cpu_T[0]);
1013 gen_op_subfe();
1014 gen_op_check_addo();
1016 #if defined(TARGET_PPC64)
1017 #define gen_op_subfe_64 gen_op_subfe
1018 static always_inline void gen_op_subfeo_64 (void)
1020 tcg_gen_not_i64(cpu_T[2], cpu_T[0]);
1021 gen_op_subfe_64();
1022 gen_op_check_addo_64();
1024 #endif
1025 GEN_INT_ARITH2_64 (subfe, 0x1F, 0x08, 0x04, PPC_INTEGER);
1026 /* subfme subfme. subfmeo subfmeo. */
1027 GEN_INT_ARITH1_64 (subfme, 0x1F, 0x08, 0x07, PPC_INTEGER);
1028 /* subfze subfze. subfzeo subfzeo. */
1029 GEN_INT_ARITH1_64 (subfze, 0x1F, 0x08, 0x06, PPC_INTEGER);
1030 /* addi */
1031 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1033 target_long simm = SIMM(ctx->opcode);
1035 if (rA(ctx->opcode) == 0) {
1036 /* li case */
1037 tcg_gen_movi_tl(cpu_T[0], simm);
1038 } else {
1039 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1040 if (likely(simm != 0))
1041 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], simm);
1043 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1045 /* addic */
1046 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1048 target_long simm = SIMM(ctx->opcode);
1050 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1051 if (likely(simm != 0)) {
1052 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
1053 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], simm);
1054 #if defined(TARGET_PPC64)
1055 if (ctx->sf_mode)
1056 gen_op_check_addc_64();
1057 else
1058 #endif
1059 gen_op_check_addc();
1060 } else {
1061 gen_op_clear_xer_ca();
1063 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1065 /* addic. */
1066 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1068 target_long simm = SIMM(ctx->opcode);
1070 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1071 if (likely(simm != 0)) {
1072 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
1073 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], simm);
1074 #if defined(TARGET_PPC64)
1075 if (ctx->sf_mode)
1076 gen_op_check_addc_64();
1077 else
1078 #endif
1079 gen_op_check_addc();
1080 } else {
1081 gen_op_clear_xer_ca();
1083 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1084 gen_set_Rc0(ctx);
1086 /* addis */
1087 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1089 target_long simm = SIMM(ctx->opcode);
1091 if (rA(ctx->opcode) == 0) {
1092 /* lis case */
1093 tcg_gen_movi_tl(cpu_T[0], simm << 16);
1094 } else {
1095 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1096 if (likely(simm != 0))
1097 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], simm << 16);
1099 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1101 /* mulli */
1102 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1104 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1105 gen_op_mulli(SIMM(ctx->opcode));
1106 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1108 /* subfic */
1109 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1111 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1112 #if defined(TARGET_PPC64)
1113 if (ctx->sf_mode)
1114 gen_op_subfic_64(SIMM(ctx->opcode));
1115 else
1116 #endif
1117 gen_op_subfic(SIMM(ctx->opcode));
1118 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1121 #if defined(TARGET_PPC64)
1122 /* mulhd mulhd. */
1123 GEN_INT_ARITHN (mulhd, 0x1F, 0x09, 0x02, PPC_64B);
1124 /* mulhdu mulhdu. */
1125 GEN_INT_ARITHN (mulhdu, 0x1F, 0x09, 0x00, PPC_64B);
1126 /* mulld mulld. mulldo mulldo. */
1127 GEN_INT_ARITH2 (mulld, 0x1F, 0x09, 0x07, PPC_64B);
1128 /* divd divd. divdo divdo. */
1129 GEN_INT_ARITH2 (divd, 0x1F, 0x09, 0x0F, PPC_64B);
1130 /* divdu divdu. divduo divduo. */
1131 GEN_INT_ARITH2 (divdu, 0x1F, 0x09, 0x0E, PPC_64B);
1132 #endif
1134 /*** Integer comparison ***/
1135 #if defined(TARGET_PPC64)
1136 #define GEN_CMP(name, opc, type) \
1137 GEN_HANDLER(name, 0x1F, 0x00, opc, 0x00400000, type) \
1139 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
1140 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
1141 if (ctx->sf_mode && (ctx->opcode & 0x00200000)) \
1142 gen_op_##name##_64(); \
1143 else \
1144 gen_op_##name(); \
1145 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf); \
1147 #else
1148 #define GEN_CMP(name, opc, type) \
1149 GEN_HANDLER(name, 0x1F, 0x00, opc, 0x00400000, type) \
1151 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
1152 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
1153 gen_op_##name(); \
1154 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf); \
1156 #endif
1158 /* cmp */
1159 GEN_CMP(cmp, 0x00, PPC_INTEGER);
1160 /* cmpi */
1161 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER)
1163 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1164 #if defined(TARGET_PPC64)
1165 if (ctx->sf_mode && (ctx->opcode & 0x00200000))
1166 gen_op_cmpi_64(SIMM(ctx->opcode));
1167 else
1168 #endif
1169 gen_op_cmpi(SIMM(ctx->opcode));
1170 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
1172 /* cmpl */
1173 GEN_CMP(cmpl, 0x01, PPC_INTEGER);
1174 /* cmpli */
1175 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER)
1177 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1178 #if defined(TARGET_PPC64)
1179 if (ctx->sf_mode && (ctx->opcode & 0x00200000))
1180 gen_op_cmpli_64(UIMM(ctx->opcode));
1181 else
1182 #endif
1183 gen_op_cmpli(UIMM(ctx->opcode));
1184 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
1187 /* isel (PowerPC 2.03 specification) */
1188 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL)
1190 uint32_t bi = rC(ctx->opcode);
1191 uint32_t mask;
1193 if (rA(ctx->opcode) == 0) {
1194 tcg_gen_movi_tl(cpu_T[0], 0);
1195 } else {
1196 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1198 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
1199 mask = 1 << (3 - (bi & 0x03));
1200 tcg_gen_mov_i32(cpu_T[0], cpu_crf[bi >> 2]);
1201 gen_op_test_true(mask);
1202 gen_op_isel();
1203 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1206 /*** Integer logical ***/
1207 #define __GEN_LOGICAL2(name, opc2, opc3, type) \
1208 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000000, type) \
1210 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]); \
1211 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
1212 gen_op_##name(); \
1213 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
1214 if (unlikely(Rc(ctx->opcode) != 0)) \
1215 gen_set_Rc0(ctx); \
1217 #define GEN_LOGICAL2(name, opc, type) \
1218 __GEN_LOGICAL2(name, 0x1C, opc, type)
1220 #define GEN_LOGICAL1(name, opc, type) \
1221 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) \
1223 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]); \
1224 gen_op_##name(); \
1225 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
1226 if (unlikely(Rc(ctx->opcode) != 0)) \
1227 gen_set_Rc0(ctx); \
1230 /* and & and. */
1231 GEN_LOGICAL2(and, 0x00, PPC_INTEGER);
1232 /* andc & andc. */
1233 GEN_LOGICAL2(andc, 0x01, PPC_INTEGER);
1234 /* andi. */
1235 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1237 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1238 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], UIMM(ctx->opcode));
1239 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1240 gen_set_Rc0(ctx);
1242 /* andis. */
1243 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1245 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1246 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], UIMM(ctx->opcode) << 16);
1247 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1248 gen_set_Rc0(ctx);
1251 /* cntlzw */
1252 GEN_LOGICAL1(cntlzw, 0x00, PPC_INTEGER);
1253 /* eqv & eqv. */
1254 GEN_LOGICAL2(eqv, 0x08, PPC_INTEGER);
1255 /* extsb & extsb. */
1256 GEN_LOGICAL1(extsb, 0x1D, PPC_INTEGER);
1257 /* extsh & extsh. */
1258 GEN_LOGICAL1(extsh, 0x1C, PPC_INTEGER);
1259 /* nand & nand. */
1260 GEN_LOGICAL2(nand, 0x0E, PPC_INTEGER);
1261 /* nor & nor. */
1262 GEN_LOGICAL2(nor, 0x03, PPC_INTEGER);
1264 /* or & or. */
1265 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER)
1267 int rs, ra, rb;
1269 rs = rS(ctx->opcode);
1270 ra = rA(ctx->opcode);
1271 rb = rB(ctx->opcode);
1272 /* Optimisation for mr. ri case */
1273 if (rs != ra || rs != rb) {
1274 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rs]);
1275 if (rs != rb) {
1276 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rb]);
1277 gen_op_or();
1279 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
1280 if (unlikely(Rc(ctx->opcode) != 0))
1281 gen_set_Rc0(ctx);
1282 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1283 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rs]);
1284 gen_set_Rc0(ctx);
1285 #if defined(TARGET_PPC64)
1286 } else {
1287 switch (rs) {
1288 case 1:
1289 /* Set process priority to low */
1290 gen_op_store_pri(2);
1291 break;
1292 case 6:
1293 /* Set process priority to medium-low */
1294 gen_op_store_pri(3);
1295 break;
1296 case 2:
1297 /* Set process priority to normal */
1298 gen_op_store_pri(4);
1299 break;
1300 #if !defined(CONFIG_USER_ONLY)
1301 case 31:
1302 if (ctx->supervisor > 0) {
1303 /* Set process priority to very low */
1304 gen_op_store_pri(1);
1306 break;
1307 case 5:
1308 if (ctx->supervisor > 0) {
1309 /* Set process priority to medium-hight */
1310 gen_op_store_pri(5);
1312 break;
1313 case 3:
1314 if (ctx->supervisor > 0) {
1315 /* Set process priority to high */
1316 gen_op_store_pri(6);
1318 break;
1319 case 7:
1320 if (ctx->supervisor > 1) {
1321 /* Set process priority to very high */
1322 gen_op_store_pri(7);
1324 break;
1325 #endif
1326 default:
1327 /* nop */
1328 break;
1330 #endif
1334 /* orc & orc. */
1335 GEN_LOGICAL2(orc, 0x0C, PPC_INTEGER);
1336 /* xor & xor. */
1337 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER)
1339 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1340 /* Optimisation for "set to zero" case */
1341 if (rS(ctx->opcode) != rB(ctx->opcode)) {
1342 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
1343 gen_op_xor();
1344 } else {
1345 tcg_gen_movi_tl(cpu_T[0], 0);
1347 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1348 if (unlikely(Rc(ctx->opcode) != 0))
1349 gen_set_Rc0(ctx);
1351 /* ori */
1352 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1354 target_ulong uimm = UIMM(ctx->opcode);
1356 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1357 /* NOP */
1358 /* XXX: should handle special NOPs for POWER series */
1359 return;
1361 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1362 if (likely(uimm != 0))
1363 gen_op_ori(uimm);
1364 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1366 /* oris */
1367 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1369 target_ulong uimm = UIMM(ctx->opcode);
1371 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1372 /* NOP */
1373 return;
1375 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1376 if (likely(uimm != 0))
1377 gen_op_ori(uimm << 16);
1378 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1380 /* xori */
1381 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1383 target_ulong uimm = UIMM(ctx->opcode);
1385 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1386 /* NOP */
1387 return;
1389 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1390 if (likely(uimm != 0))
1391 gen_op_xori(uimm);
1392 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1395 /* xoris */
1396 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1398 target_ulong uimm = UIMM(ctx->opcode);
1400 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1401 /* NOP */
1402 return;
1404 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1405 if (likely(uimm != 0))
1406 gen_op_xori(uimm << 16);
1407 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1410 /* popcntb : PowerPC 2.03 specification */
1411 GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB)
1413 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1414 #if defined(TARGET_PPC64)
1415 if (ctx->sf_mode)
1416 gen_op_popcntb_64();
1417 else
1418 #endif
1419 gen_op_popcntb();
1420 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1423 #if defined(TARGET_PPC64)
1424 /* extsw & extsw. */
1425 GEN_LOGICAL1(extsw, 0x1E, PPC_64B);
1426 /* cntlzd */
1427 GEN_LOGICAL1(cntlzd, 0x01, PPC_64B);
1428 #endif
1430 /*** Integer rotate ***/
1431 /* rlwimi & rlwimi. */
1432 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1434 target_ulong mask;
1435 uint32_t mb, me, sh;
1437 mb = MB(ctx->opcode);
1438 me = ME(ctx->opcode);
1439 sh = SH(ctx->opcode);
1440 if (likely(sh == 0)) {
1441 if (likely(mb == 0 && me == 31)) {
1442 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1443 goto do_store;
1444 } else if (likely(mb == 31 && me == 0)) {
1445 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1446 goto do_store;
1448 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1449 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1450 goto do_mask;
1452 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1453 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1454 gen_op_rotli32_T0(SH(ctx->opcode));
1455 do_mask:
1456 #if defined(TARGET_PPC64)
1457 mb += 32;
1458 me += 32;
1459 #endif
1460 mask = MASK(mb, me);
1461 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], mask);
1462 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], ~mask);
1463 gen_op_or();
1464 do_store:
1465 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1466 if (unlikely(Rc(ctx->opcode) != 0))
1467 gen_set_Rc0(ctx);
1469 /* rlwinm & rlwinm. */
1470 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1472 uint32_t mb, me, sh;
1474 sh = SH(ctx->opcode);
1475 mb = MB(ctx->opcode);
1476 me = ME(ctx->opcode);
1477 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1478 if (likely(sh == 0)) {
1479 goto do_mask;
1481 if (likely(mb == 0)) {
1482 if (likely(me == 31)) {
1483 gen_op_rotli32_T0(sh);
1484 goto do_store;
1485 } else if (likely(me == (31 - sh))) {
1486 gen_op_sli_T0(sh);
1487 goto do_store;
1489 } else if (likely(me == 31)) {
1490 if (likely(sh == (32 - mb))) {
1491 gen_op_srli_T0(mb);
1492 goto do_store;
1495 gen_op_rotli32_T0(sh);
1496 do_mask:
1497 #if defined(TARGET_PPC64)
1498 mb += 32;
1499 me += 32;
1500 #endif
1501 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], MASK(mb, me));
1502 do_store:
1503 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1504 if (unlikely(Rc(ctx->opcode) != 0))
1505 gen_set_Rc0(ctx);
1507 /* rlwnm & rlwnm. */
1508 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1510 uint32_t mb, me;
1512 mb = MB(ctx->opcode);
1513 me = ME(ctx->opcode);
1514 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1515 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
1516 gen_op_rotl32_T0_T1();
1517 if (unlikely(mb != 0 || me != 31)) {
1518 #if defined(TARGET_PPC64)
1519 mb += 32;
1520 me += 32;
1521 #endif
1522 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], MASK(mb, me));
1524 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1525 if (unlikely(Rc(ctx->opcode) != 0))
1526 gen_set_Rc0(ctx);
1529 #if defined(TARGET_PPC64)
1530 #define GEN_PPC64_R2(name, opc1, opc2) \
1531 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \
1533 gen_##name(ctx, 0); \
1535 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
1536 PPC_64B) \
1538 gen_##name(ctx, 1); \
1540 #define GEN_PPC64_R4(name, opc1, opc2) \
1541 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \
1543 gen_##name(ctx, 0, 0); \
1545 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
1546 PPC_64B) \
1548 gen_##name(ctx, 0, 1); \
1550 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
1551 PPC_64B) \
1553 gen_##name(ctx, 1, 0); \
1555 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
1556 PPC_64B) \
1558 gen_##name(ctx, 1, 1); \
1561 static always_inline void gen_rldinm (DisasContext *ctx, uint32_t mb,
1562 uint32_t me, uint32_t sh)
1564 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1565 if (likely(sh == 0)) {
1566 goto do_mask;
1568 if (likely(mb == 0)) {
1569 if (likely(me == 63)) {
1570 gen_op_rotli64_T0(sh);
1571 goto do_store;
1572 } else if (likely(me == (63 - sh))) {
1573 gen_op_sli_T0(sh);
1574 goto do_store;
1576 } else if (likely(me == 63)) {
1577 if (likely(sh == (64 - mb))) {
1578 gen_op_srli_T0_64(mb);
1579 goto do_store;
1582 gen_op_rotli64_T0(sh);
1583 do_mask:
1584 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], MASK(mb, me));
1585 do_store:
1586 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1587 if (unlikely(Rc(ctx->opcode) != 0))
1588 gen_set_Rc0(ctx);
1590 /* rldicl - rldicl. */
1591 static always_inline void gen_rldicl (DisasContext *ctx, int mbn, int shn)
1593 uint32_t sh, mb;
1595 sh = SH(ctx->opcode) | (shn << 5);
1596 mb = MB(ctx->opcode) | (mbn << 5);
1597 gen_rldinm(ctx, mb, 63, sh);
1599 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1600 /* rldicr - rldicr. */
1601 static always_inline void gen_rldicr (DisasContext *ctx, int men, int shn)
1603 uint32_t sh, me;
1605 sh = SH(ctx->opcode) | (shn << 5);
1606 me = MB(ctx->opcode) | (men << 5);
1607 gen_rldinm(ctx, 0, me, sh);
1609 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1610 /* rldic - rldic. */
1611 static always_inline void gen_rldic (DisasContext *ctx, int mbn, int shn)
1613 uint32_t sh, mb;
1615 sh = SH(ctx->opcode) | (shn << 5);
1616 mb = MB(ctx->opcode) | (mbn << 5);
1617 gen_rldinm(ctx, mb, 63 - sh, sh);
1619 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1621 static always_inline void gen_rldnm (DisasContext *ctx, uint32_t mb,
1622 uint32_t me)
1624 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1625 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
1626 gen_op_rotl64_T0_T1();
1627 if (unlikely(mb != 0 || me != 63)) {
1628 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], MASK(mb, me));
1630 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1631 if (unlikely(Rc(ctx->opcode) != 0))
1632 gen_set_Rc0(ctx);
1635 /* rldcl - rldcl. */
1636 static always_inline void gen_rldcl (DisasContext *ctx, int mbn)
1638 uint32_t mb;
1640 mb = MB(ctx->opcode) | (mbn << 5);
1641 gen_rldnm(ctx, mb, 63);
1643 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1644 /* rldcr - rldcr. */
1645 static always_inline void gen_rldcr (DisasContext *ctx, int men)
1647 uint32_t me;
1649 me = MB(ctx->opcode) | (men << 5);
1650 gen_rldnm(ctx, 0, me);
1652 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1653 /* rldimi - rldimi. */
1654 static always_inline void gen_rldimi (DisasContext *ctx, int mbn, int shn)
1656 uint64_t mask;
1657 uint32_t sh, mb, me;
1659 sh = SH(ctx->opcode) | (shn << 5);
1660 mb = MB(ctx->opcode) | (mbn << 5);
1661 me = 63 - sh;
1662 if (likely(sh == 0)) {
1663 if (likely(mb == 0)) {
1664 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1665 goto do_store;
1667 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1668 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1669 goto do_mask;
1671 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1672 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1673 gen_op_rotli64_T0(sh);
1674 do_mask:
1675 mask = MASK(mb, me);
1676 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], mask);
1677 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], ~mask);
1678 gen_op_or();
1679 do_store:
1680 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1681 if (unlikely(Rc(ctx->opcode) != 0))
1682 gen_set_Rc0(ctx);
1684 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1685 #endif
1687 /*** Integer shift ***/
1688 /* slw & slw. */
1689 __GEN_LOGICAL2(slw, 0x18, 0x00, PPC_INTEGER);
1690 /* sraw & sraw. */
1691 __GEN_LOGICAL2(sraw, 0x18, 0x18, PPC_INTEGER);
1692 /* srawi & srawi. */
1693 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER)
1695 int mb, me;
1696 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1697 if (SH(ctx->opcode) != 0) {
1698 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
1699 mb = 32 - SH(ctx->opcode);
1700 me = 31;
1701 #if defined(TARGET_PPC64)
1702 mb += 32;
1703 me += 32;
1704 #endif
1705 gen_op_srawi(SH(ctx->opcode), MASK(mb, me));
1707 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1708 if (unlikely(Rc(ctx->opcode) != 0))
1709 gen_set_Rc0(ctx);
1711 /* srw & srw. */
1712 __GEN_LOGICAL2(srw, 0x18, 0x10, PPC_INTEGER);
1714 #if defined(TARGET_PPC64)
1715 /* sld & sld. */
1716 __GEN_LOGICAL2(sld, 0x1B, 0x00, PPC_64B);
1717 /* srad & srad. */
1718 __GEN_LOGICAL2(srad, 0x1A, 0x18, PPC_64B);
1719 /* sradi & sradi. */
1720 static always_inline void gen_sradi (DisasContext *ctx, int n)
1722 uint64_t mask;
1723 int sh, mb, me;
1725 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1726 sh = SH(ctx->opcode) + (n << 5);
1727 if (sh != 0) {
1728 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
1729 mb = 64 - SH(ctx->opcode);
1730 me = 63;
1731 mask = MASK(mb, me);
1732 gen_op_sradi(sh, mask >> 32, mask);
1734 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1735 if (unlikely(Rc(ctx->opcode) != 0))
1736 gen_set_Rc0(ctx);
1738 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B)
1740 gen_sradi(ctx, 0);
1742 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B)
1744 gen_sradi(ctx, 1);
1746 /* srd & srd. */
1747 __GEN_LOGICAL2(srd, 0x1B, 0x10, PPC_64B);
1748 #endif
1750 /*** Floating-Point arithmetic ***/
1751 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
1752 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type) \
1754 if (unlikely(!ctx->fpu_enabled)) { \
1755 GEN_EXCP_NO_FP(ctx); \
1756 return; \
1758 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]); \
1759 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rC(ctx->opcode)]); \
1760 tcg_gen_mov_i64(cpu_FT[2], cpu_fpr[rB(ctx->opcode)]); \
1761 gen_reset_fpstatus(); \
1762 gen_op_f##op(); \
1763 if (isfloat) { \
1764 gen_op_frsp(); \
1766 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1767 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1770 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
1771 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
1772 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
1774 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
1775 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \
1777 if (unlikely(!ctx->fpu_enabled)) { \
1778 GEN_EXCP_NO_FP(ctx); \
1779 return; \
1781 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]); \
1782 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]); \
1783 gen_reset_fpstatus(); \
1784 gen_op_f##op(); \
1785 if (isfloat) { \
1786 gen_op_frsp(); \
1788 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1789 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1791 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
1792 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
1793 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
1795 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
1796 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \
1798 if (unlikely(!ctx->fpu_enabled)) { \
1799 GEN_EXCP_NO_FP(ctx); \
1800 return; \
1802 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]); \
1803 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rC(ctx->opcode)]); \
1804 gen_reset_fpstatus(); \
1805 gen_op_f##op(); \
1806 if (isfloat) { \
1807 gen_op_frsp(); \
1809 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1810 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1812 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
1813 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
1814 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
1816 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
1817 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type) \
1819 if (unlikely(!ctx->fpu_enabled)) { \
1820 GEN_EXCP_NO_FP(ctx); \
1821 return; \
1823 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]); \
1824 gen_reset_fpstatus(); \
1825 gen_op_f##name(); \
1826 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1827 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1830 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
1831 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type) \
1833 if (unlikely(!ctx->fpu_enabled)) { \
1834 GEN_EXCP_NO_FP(ctx); \
1835 return; \
1837 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]); \
1838 gen_reset_fpstatus(); \
1839 gen_op_f##name(); \
1840 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1841 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1844 /* fadd - fadds */
1845 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
1846 /* fdiv - fdivs */
1847 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
1848 /* fmul - fmuls */
1849 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
1851 /* fre */
1852 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
1854 /* fres */
1855 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
1857 /* frsqrte */
1858 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
1860 /* frsqrtes */
1861 static always_inline void gen_op_frsqrtes (void)
1863 gen_op_frsqrte();
1864 gen_op_frsp();
1866 GEN_FLOAT_BS(rsqrtes, 0x3B, 0x1A, 1, PPC_FLOAT_FRSQRTES);
1868 /* fsel */
1869 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
1870 /* fsub - fsubs */
1871 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
1872 /* Optional: */
1873 /* fsqrt */
1874 GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT)
1876 if (unlikely(!ctx->fpu_enabled)) {
1877 GEN_EXCP_NO_FP(ctx);
1878 return;
1880 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
1881 gen_reset_fpstatus();
1882 gen_op_fsqrt();
1883 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
1884 gen_compute_fprf(1, Rc(ctx->opcode) != 0);
1887 GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT)
1889 if (unlikely(!ctx->fpu_enabled)) {
1890 GEN_EXCP_NO_FP(ctx);
1891 return;
1893 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
1894 gen_reset_fpstatus();
1895 gen_op_fsqrt();
1896 gen_op_frsp();
1897 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
1898 gen_compute_fprf(1, Rc(ctx->opcode) != 0);
1901 /*** Floating-Point multiply-and-add ***/
1902 /* fmadd - fmadds */
1903 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
1904 /* fmsub - fmsubs */
1905 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
1906 /* fnmadd - fnmadds */
1907 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
1908 /* fnmsub - fnmsubs */
1909 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
1911 /*** Floating-Point round & convert ***/
1912 /* fctiw */
1913 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
1914 /* fctiwz */
1915 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
1916 /* frsp */
1917 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
1918 #if defined(TARGET_PPC64)
1919 /* fcfid */
1920 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
1921 /* fctid */
1922 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
1923 /* fctidz */
1924 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
1925 #endif
1927 /* frin */
1928 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
1929 /* friz */
1930 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
1931 /* frip */
1932 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
1933 /* frim */
1934 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
1936 /*** Floating-Point compare ***/
1937 /* fcmpo */
1938 GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT)
1940 if (unlikely(!ctx->fpu_enabled)) {
1941 GEN_EXCP_NO_FP(ctx);
1942 return;
1944 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]);
1945 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]);
1946 gen_reset_fpstatus();
1947 gen_op_fcmpo();
1948 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
1949 gen_op_float_check_status();
1952 /* fcmpu */
1953 GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT)
1955 if (unlikely(!ctx->fpu_enabled)) {
1956 GEN_EXCP_NO_FP(ctx);
1957 return;
1959 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]);
1960 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]);
1961 gen_reset_fpstatus();
1962 gen_op_fcmpu();
1963 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
1964 gen_op_float_check_status();
1967 /*** Floating-point move ***/
1968 /* fabs */
1969 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
1970 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT);
1972 /* fmr - fmr. */
1973 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
1974 GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT)
1976 if (unlikely(!ctx->fpu_enabled)) {
1977 GEN_EXCP_NO_FP(ctx);
1978 return;
1980 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
1981 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
1982 gen_compute_fprf(0, Rc(ctx->opcode) != 0);
1985 /* fnabs */
1986 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
1987 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT);
1988 /* fneg */
1989 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
1990 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT);
1992 /*** Floating-Point status & ctrl register ***/
1993 /* mcrfs */
1994 GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT)
1996 int bfa;
1998 if (unlikely(!ctx->fpu_enabled)) {
1999 GEN_EXCP_NO_FP(ctx);
2000 return;
2002 gen_optimize_fprf();
2003 bfa = 4 * (7 - crfS(ctx->opcode));
2004 gen_op_load_fpscr_T0(bfa);
2005 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
2006 gen_op_fpscr_resetbit(~(0xF << bfa));
2009 /* mffs */
2010 GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT)
2012 if (unlikely(!ctx->fpu_enabled)) {
2013 GEN_EXCP_NO_FP(ctx);
2014 return;
2016 gen_optimize_fprf();
2017 gen_reset_fpstatus();
2018 gen_op_load_fpscr_FT0();
2019 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
2020 gen_compute_fprf(0, Rc(ctx->opcode) != 0);
2023 /* mtfsb0 */
2024 GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT)
2026 uint8_t crb;
2028 if (unlikely(!ctx->fpu_enabled)) {
2029 GEN_EXCP_NO_FP(ctx);
2030 return;
2032 crb = 32 - (crbD(ctx->opcode) >> 2);
2033 gen_optimize_fprf();
2034 gen_reset_fpstatus();
2035 if (likely(crb != 30 && crb != 29))
2036 gen_op_fpscr_resetbit(~(1 << crb));
2037 if (unlikely(Rc(ctx->opcode) != 0)) {
2038 gen_op_load_fpcc();
2039 gen_op_set_Rc0();
2043 /* mtfsb1 */
2044 GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT)
2046 uint8_t crb;
2048 if (unlikely(!ctx->fpu_enabled)) {
2049 GEN_EXCP_NO_FP(ctx);
2050 return;
2052 crb = 32 - (crbD(ctx->opcode) >> 2);
2053 gen_optimize_fprf();
2054 gen_reset_fpstatus();
2055 /* XXX: we pretend we can only do IEEE floating-point computations */
2056 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI))
2057 gen_op_fpscr_setbit(crb);
2058 if (unlikely(Rc(ctx->opcode) != 0)) {
2059 gen_op_load_fpcc();
2060 gen_op_set_Rc0();
2062 /* We can raise a differed exception */
2063 gen_op_float_check_status();
2066 /* mtfsf */
2067 GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x02010000, PPC_FLOAT)
2069 if (unlikely(!ctx->fpu_enabled)) {
2070 GEN_EXCP_NO_FP(ctx);
2071 return;
2073 gen_optimize_fprf();
2074 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
2075 gen_reset_fpstatus();
2076 gen_op_store_fpscr(FM(ctx->opcode));
2077 if (unlikely(Rc(ctx->opcode) != 0)) {
2078 gen_op_load_fpcc();
2079 gen_op_set_Rc0();
2081 /* We can raise a differed exception */
2082 gen_op_float_check_status();
2085 /* mtfsfi */
2086 GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT)
2088 int bf, sh;
2090 if (unlikely(!ctx->fpu_enabled)) {
2091 GEN_EXCP_NO_FP(ctx);
2092 return;
2094 bf = crbD(ctx->opcode) >> 2;
2095 sh = 7 - bf;
2096 gen_optimize_fprf();
2097 tcg_gen_movi_i64(cpu_FT[0], FPIMM(ctx->opcode) << (4 * sh));
2098 gen_reset_fpstatus();
2099 gen_op_store_fpscr(1 << sh);
2100 if (unlikely(Rc(ctx->opcode) != 0)) {
2101 gen_op_load_fpcc();
2102 gen_op_set_Rc0();
2104 /* We can raise a differed exception */
2105 gen_op_float_check_status();
2108 /*** Addressing modes ***/
2109 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2110 static always_inline void gen_addr_imm_index (DisasContext *ctx,
2111 target_long maskl)
2113 target_long simm = SIMM(ctx->opcode);
2115 simm &= ~maskl;
2116 if (rA(ctx->opcode) == 0) {
2117 tcg_gen_movi_tl(cpu_T[0], simm);
2118 } else {
2119 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
2120 if (likely(simm != 0))
2121 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], simm);
2123 #ifdef DEBUG_MEMORY_ACCESSES
2124 gen_op_print_mem_EA();
2125 #endif
2128 static always_inline void gen_addr_reg_index (DisasContext *ctx)
2130 if (rA(ctx->opcode) == 0) {
2131 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
2132 } else {
2133 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
2134 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
2135 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2137 #ifdef DEBUG_MEMORY_ACCESSES
2138 gen_op_print_mem_EA();
2139 #endif
2142 static always_inline void gen_addr_register (DisasContext *ctx)
2144 if (rA(ctx->opcode) == 0) {
2145 tcg_gen_movi_tl(cpu_T[0], 0);
2146 } else {
2147 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
2149 #ifdef DEBUG_MEMORY_ACCESSES
2150 gen_op_print_mem_EA();
2151 #endif
2154 #if defined(TARGET_PPC64)
2155 #define _GEN_MEM_FUNCS(name, mode) \
2156 &gen_op_##name##_##mode, \
2157 &gen_op_##name##_le_##mode, \
2158 &gen_op_##name##_64_##mode, \
2159 &gen_op_##name##_le_64_##mode
2160 #else
2161 #define _GEN_MEM_FUNCS(name, mode) \
2162 &gen_op_##name##_##mode, \
2163 &gen_op_##name##_le_##mode
2164 #endif
2165 #if defined(CONFIG_USER_ONLY)
2166 #if defined(TARGET_PPC64)
2167 #define NB_MEM_FUNCS 4
2168 #else
2169 #define NB_MEM_FUNCS 2
2170 #endif
2171 #define GEN_MEM_FUNCS(name) \
2172 _GEN_MEM_FUNCS(name, raw)
2173 #else
2174 #if defined(TARGET_PPC64)
2175 #define NB_MEM_FUNCS 12
2176 #else
2177 #define NB_MEM_FUNCS 6
2178 #endif
2179 #define GEN_MEM_FUNCS(name) \
2180 _GEN_MEM_FUNCS(name, user), \
2181 _GEN_MEM_FUNCS(name, kernel), \
2182 _GEN_MEM_FUNCS(name, hypv)
2183 #endif
2185 /*** Integer load ***/
2186 #define op_ldst(name) (*gen_op_##name[ctx->mem_idx])()
2187 /* Byte access routine are endian safe */
2188 #define gen_op_lbz_le_raw gen_op_lbz_raw
2189 #define gen_op_lbz_le_user gen_op_lbz_user
2190 #define gen_op_lbz_le_kernel gen_op_lbz_kernel
2191 #define gen_op_lbz_le_hypv gen_op_lbz_hypv
2192 #define gen_op_lbz_le_64_raw gen_op_lbz_64_raw
2193 #define gen_op_lbz_le_64_user gen_op_lbz_64_user
2194 #define gen_op_lbz_le_64_kernel gen_op_lbz_64_kernel
2195 #define gen_op_lbz_le_64_hypv gen_op_lbz_64_hypv
2196 #define gen_op_stb_le_raw gen_op_stb_raw
2197 #define gen_op_stb_le_user gen_op_stb_user
2198 #define gen_op_stb_le_kernel gen_op_stb_kernel
2199 #define gen_op_stb_le_hypv gen_op_stb_hypv
2200 #define gen_op_stb_le_64_raw gen_op_stb_64_raw
2201 #define gen_op_stb_le_64_user gen_op_stb_64_user
2202 #define gen_op_stb_le_64_kernel gen_op_stb_64_kernel
2203 #define gen_op_stb_le_64_hypv gen_op_stb_64_hypv
2204 #define OP_LD_TABLE(width) \
2205 static GenOpFunc *gen_op_l##width[NB_MEM_FUNCS] = { \
2206 GEN_MEM_FUNCS(l##width), \
2208 #define OP_ST_TABLE(width) \
2209 static GenOpFunc *gen_op_st##width[NB_MEM_FUNCS] = { \
2210 GEN_MEM_FUNCS(st##width), \
2213 #define GEN_LD(width, opc, type) \
2214 GEN_HANDLER(l##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2216 gen_addr_imm_index(ctx, 0); \
2217 op_ldst(l##width); \
2218 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]); \
2221 #define GEN_LDU(width, opc, type) \
2222 GEN_HANDLER(l##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2224 if (unlikely(rA(ctx->opcode) == 0 || \
2225 rA(ctx->opcode) == rD(ctx->opcode))) { \
2226 GEN_EXCP_INVAL(ctx); \
2227 return; \
2229 if (type == PPC_64B) \
2230 gen_addr_imm_index(ctx, 0x03); \
2231 else \
2232 gen_addr_imm_index(ctx, 0); \
2233 op_ldst(l##width); \
2234 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]); \
2235 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2238 #define GEN_LDUX(width, opc2, opc3, type) \
2239 GEN_HANDLER(l##width##ux, 0x1F, opc2, opc3, 0x00000001, type) \
2241 if (unlikely(rA(ctx->opcode) == 0 || \
2242 rA(ctx->opcode) == rD(ctx->opcode))) { \
2243 GEN_EXCP_INVAL(ctx); \
2244 return; \
2246 gen_addr_reg_index(ctx); \
2247 op_ldst(l##width); \
2248 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]); \
2249 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2252 #define GEN_LDX(width, opc2, opc3, type) \
2253 GEN_HANDLER(l##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
2255 gen_addr_reg_index(ctx); \
2256 op_ldst(l##width); \
2257 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]); \
2260 #define GEN_LDS(width, op, type) \
2261 OP_LD_TABLE(width); \
2262 GEN_LD(width, op | 0x20, type); \
2263 GEN_LDU(width, op | 0x21, type); \
2264 GEN_LDUX(width, 0x17, op | 0x01, type); \
2265 GEN_LDX(width, 0x17, op | 0x00, type)
2267 /* lbz lbzu lbzux lbzx */
2268 GEN_LDS(bz, 0x02, PPC_INTEGER);
2269 /* lha lhau lhaux lhax */
2270 GEN_LDS(ha, 0x0A, PPC_INTEGER);
2271 /* lhz lhzu lhzux lhzx */
2272 GEN_LDS(hz, 0x08, PPC_INTEGER);
2273 /* lwz lwzu lwzux lwzx */
2274 GEN_LDS(wz, 0x00, PPC_INTEGER);
2275 #if defined(TARGET_PPC64)
2276 OP_LD_TABLE(wa);
2277 OP_LD_TABLE(d);
2278 /* lwaux */
2279 GEN_LDUX(wa, 0x15, 0x0B, PPC_64B);
2280 /* lwax */
2281 GEN_LDX(wa, 0x15, 0x0A, PPC_64B);
2282 /* ldux */
2283 GEN_LDUX(d, 0x15, 0x01, PPC_64B);
2284 /* ldx */
2285 GEN_LDX(d, 0x15, 0x00, PPC_64B);
2286 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B)
2288 if (Rc(ctx->opcode)) {
2289 if (unlikely(rA(ctx->opcode) == 0 ||
2290 rA(ctx->opcode) == rD(ctx->opcode))) {
2291 GEN_EXCP_INVAL(ctx);
2292 return;
2295 gen_addr_imm_index(ctx, 0x03);
2296 if (ctx->opcode & 0x02) {
2297 /* lwa (lwau is undefined) */
2298 op_ldst(lwa);
2299 } else {
2300 /* ld - ldu */
2301 op_ldst(ld);
2303 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]);
2304 if (Rc(ctx->opcode))
2305 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
2307 /* lq */
2308 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX)
2310 #if defined(CONFIG_USER_ONLY)
2311 GEN_EXCP_PRIVOPC(ctx);
2312 #else
2313 int ra, rd;
2315 /* Restore CPU state */
2316 if (unlikely(ctx->supervisor == 0)) {
2317 GEN_EXCP_PRIVOPC(ctx);
2318 return;
2320 ra = rA(ctx->opcode);
2321 rd = rD(ctx->opcode);
2322 if (unlikely((rd & 1) || rd == ra)) {
2323 GEN_EXCP_INVAL(ctx);
2324 return;
2326 if (unlikely(ctx->mem_idx & 1)) {
2327 /* Little-endian mode is not handled */
2328 GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2329 return;
2331 gen_addr_imm_index(ctx, 0x0F);
2332 op_ldst(ld);
2333 tcg_gen_mov_tl(cpu_gpr[rd], cpu_T[1]);
2334 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 8);
2335 op_ldst(ld);
2336 tcg_gen_mov_tl(cpu_gpr[rd + 1], cpu_T[1]);
2337 #endif
2339 #endif
2341 /*** Integer store ***/
2342 #define GEN_ST(width, opc, type) \
2343 GEN_HANDLER(st##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2345 gen_addr_imm_index(ctx, 0); \
2346 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]); \
2347 op_ldst(st##width); \
2350 #define GEN_STU(width, opc, type) \
2351 GEN_HANDLER(st##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2353 if (unlikely(rA(ctx->opcode) == 0)) { \
2354 GEN_EXCP_INVAL(ctx); \
2355 return; \
2357 if (type == PPC_64B) \
2358 gen_addr_imm_index(ctx, 0x03); \
2359 else \
2360 gen_addr_imm_index(ctx, 0); \
2361 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]); \
2362 op_ldst(st##width); \
2363 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2366 #define GEN_STUX(width, opc2, opc3, type) \
2367 GEN_HANDLER(st##width##ux, 0x1F, opc2, opc3, 0x00000001, type) \
2369 if (unlikely(rA(ctx->opcode) == 0)) { \
2370 GEN_EXCP_INVAL(ctx); \
2371 return; \
2373 gen_addr_reg_index(ctx); \
2374 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]); \
2375 op_ldst(st##width); \
2376 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2379 #define GEN_STX(width, opc2, opc3, type) \
2380 GEN_HANDLER(st##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
2382 gen_addr_reg_index(ctx); \
2383 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]); \
2384 op_ldst(st##width); \
2387 #define GEN_STS(width, op, type) \
2388 OP_ST_TABLE(width); \
2389 GEN_ST(width, op | 0x20, type); \
2390 GEN_STU(width, op | 0x21, type); \
2391 GEN_STUX(width, 0x17, op | 0x01, type); \
2392 GEN_STX(width, 0x17, op | 0x00, type)
2394 /* stb stbu stbux stbx */
2395 GEN_STS(b, 0x06, PPC_INTEGER);
2396 /* sth sthu sthux sthx */
2397 GEN_STS(h, 0x0C, PPC_INTEGER);
2398 /* stw stwu stwux stwx */
2399 GEN_STS(w, 0x04, PPC_INTEGER);
2400 #if defined(TARGET_PPC64)
2401 OP_ST_TABLE(d);
2402 GEN_STUX(d, 0x15, 0x05, PPC_64B);
2403 GEN_STX(d, 0x15, 0x04, PPC_64B);
2404 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B)
2406 int rs;
2408 rs = rS(ctx->opcode);
2409 if ((ctx->opcode & 0x3) == 0x2) {
2410 #if defined(CONFIG_USER_ONLY)
2411 GEN_EXCP_PRIVOPC(ctx);
2412 #else
2413 /* stq */
2414 if (unlikely(ctx->supervisor == 0)) {
2415 GEN_EXCP_PRIVOPC(ctx);
2416 return;
2418 if (unlikely(rs & 1)) {
2419 GEN_EXCP_INVAL(ctx);
2420 return;
2422 if (unlikely(ctx->mem_idx & 1)) {
2423 /* Little-endian mode is not handled */
2424 GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2425 return;
2427 gen_addr_imm_index(ctx, 0x03);
2428 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rs]);
2429 op_ldst(std);
2430 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 8);
2431 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rs + 1]);
2432 op_ldst(std);
2433 #endif
2434 } else {
2435 /* std / stdu */
2436 if (Rc(ctx->opcode)) {
2437 if (unlikely(rA(ctx->opcode) == 0)) {
2438 GEN_EXCP_INVAL(ctx);
2439 return;
2442 gen_addr_imm_index(ctx, 0x03);
2443 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rs]);
2444 op_ldst(std);
2445 if (Rc(ctx->opcode))
2446 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
2449 #endif
2450 /*** Integer load and store with byte reverse ***/
2451 /* lhbrx */
2452 OP_LD_TABLE(hbr);
2453 GEN_LDX(hbr, 0x16, 0x18, PPC_INTEGER);
2454 /* lwbrx */
2455 OP_LD_TABLE(wbr);
2456 GEN_LDX(wbr, 0x16, 0x10, PPC_INTEGER);
2457 /* sthbrx */
2458 OP_ST_TABLE(hbr);
2459 GEN_STX(hbr, 0x16, 0x1C, PPC_INTEGER);
2460 /* stwbrx */
2461 OP_ST_TABLE(wbr);
2462 GEN_STX(wbr, 0x16, 0x14, PPC_INTEGER);
2464 /*** Integer load and store multiple ***/
2465 #define op_ldstm(name, reg) (*gen_op_##name[ctx->mem_idx])(reg)
2466 static GenOpFunc1 *gen_op_lmw[NB_MEM_FUNCS] = {
2467 GEN_MEM_FUNCS(lmw),
2469 static GenOpFunc1 *gen_op_stmw[NB_MEM_FUNCS] = {
2470 GEN_MEM_FUNCS(stmw),
2473 /* lmw */
2474 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
2476 /* NIP cannot be restored if the memory exception comes from an helper */
2477 gen_update_nip(ctx, ctx->nip - 4);
2478 gen_addr_imm_index(ctx, 0);
2479 op_ldstm(lmw, rD(ctx->opcode));
2482 /* stmw */
2483 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
2485 /* NIP cannot be restored if the memory exception comes from an helper */
2486 gen_update_nip(ctx, ctx->nip - 4);
2487 gen_addr_imm_index(ctx, 0);
2488 op_ldstm(stmw, rS(ctx->opcode));
2491 /*** Integer load and store strings ***/
2492 #define op_ldsts(name, start) (*gen_op_##name[ctx->mem_idx])(start)
2493 #define op_ldstsx(name, rd, ra, rb) (*gen_op_##name[ctx->mem_idx])(rd, ra, rb)
2494 /* string load & stores are by definition endian-safe */
2495 #define gen_op_lswi_le_raw gen_op_lswi_raw
2496 #define gen_op_lswi_le_user gen_op_lswi_user
2497 #define gen_op_lswi_le_kernel gen_op_lswi_kernel
2498 #define gen_op_lswi_le_hypv gen_op_lswi_hypv
2499 #define gen_op_lswi_le_64_raw gen_op_lswi_raw
2500 #define gen_op_lswi_le_64_user gen_op_lswi_user
2501 #define gen_op_lswi_le_64_kernel gen_op_lswi_kernel
2502 #define gen_op_lswi_le_64_hypv gen_op_lswi_hypv
2503 static GenOpFunc1 *gen_op_lswi[NB_MEM_FUNCS] = {
2504 GEN_MEM_FUNCS(lswi),
2506 #define gen_op_lswx_le_raw gen_op_lswx_raw
2507 #define gen_op_lswx_le_user gen_op_lswx_user
2508 #define gen_op_lswx_le_kernel gen_op_lswx_kernel
2509 #define gen_op_lswx_le_hypv gen_op_lswx_hypv
2510 #define gen_op_lswx_le_64_raw gen_op_lswx_raw
2511 #define gen_op_lswx_le_64_user gen_op_lswx_user
2512 #define gen_op_lswx_le_64_kernel gen_op_lswx_kernel
2513 #define gen_op_lswx_le_64_hypv gen_op_lswx_hypv
2514 static GenOpFunc3 *gen_op_lswx[NB_MEM_FUNCS] = {
2515 GEN_MEM_FUNCS(lswx),
2517 #define gen_op_stsw_le_raw gen_op_stsw_raw
2518 #define gen_op_stsw_le_user gen_op_stsw_user
2519 #define gen_op_stsw_le_kernel gen_op_stsw_kernel
2520 #define gen_op_stsw_le_hypv gen_op_stsw_hypv
2521 #define gen_op_stsw_le_64_raw gen_op_stsw_raw
2522 #define gen_op_stsw_le_64_user gen_op_stsw_user
2523 #define gen_op_stsw_le_64_kernel gen_op_stsw_kernel
2524 #define gen_op_stsw_le_64_hypv gen_op_stsw_hypv
2525 static GenOpFunc1 *gen_op_stsw[NB_MEM_FUNCS] = {
2526 GEN_MEM_FUNCS(stsw),
2529 /* lswi */
2530 /* PowerPC32 specification says we must generate an exception if
2531 * rA is in the range of registers to be loaded.
2532 * In an other hand, IBM says this is valid, but rA won't be loaded.
2533 * For now, I'll follow the spec...
2535 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING)
2537 int nb = NB(ctx->opcode);
2538 int start = rD(ctx->opcode);
2539 int ra = rA(ctx->opcode);
2540 int nr;
2542 if (nb == 0)
2543 nb = 32;
2544 nr = nb / 4;
2545 if (unlikely(((start + nr) > 32 &&
2546 start <= ra && (start + nr - 32) > ra) ||
2547 ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
2548 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
2549 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_LSWX);
2550 return;
2552 /* NIP cannot be restored if the memory exception comes from an helper */
2553 gen_update_nip(ctx, ctx->nip - 4);
2554 gen_addr_register(ctx);
2555 tcg_gen_movi_tl(cpu_T[1], nb);
2556 op_ldsts(lswi, start);
2559 /* lswx */
2560 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING)
2562 int ra = rA(ctx->opcode);
2563 int rb = rB(ctx->opcode);
2565 /* NIP cannot be restored if the memory exception comes from an helper */
2566 gen_update_nip(ctx, ctx->nip - 4);
2567 gen_addr_reg_index(ctx);
2568 if (ra == 0) {
2569 ra = rb;
2571 gen_op_load_xer_bc();
2572 op_ldstsx(lswx, rD(ctx->opcode), ra, rb);
2575 /* stswi */
2576 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING)
2578 int nb = NB(ctx->opcode);
2580 /* NIP cannot be restored if the memory exception comes from an helper */
2581 gen_update_nip(ctx, ctx->nip - 4);
2582 gen_addr_register(ctx);
2583 if (nb == 0)
2584 nb = 32;
2585 tcg_gen_movi_tl(cpu_T[1], nb);
2586 op_ldsts(stsw, rS(ctx->opcode));
2589 /* stswx */
2590 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING)
2592 /* NIP cannot be restored if the memory exception comes from an helper */
2593 gen_update_nip(ctx, ctx->nip - 4);
2594 gen_addr_reg_index(ctx);
2595 gen_op_load_xer_bc();
2596 op_ldsts(stsw, rS(ctx->opcode));
2599 /*** Memory synchronisation ***/
2600 /* eieio */
2601 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO)
2605 /* isync */
2606 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM)
2608 GEN_STOP(ctx);
2611 #define op_lwarx() (*gen_op_lwarx[ctx->mem_idx])()
2612 #define op_stwcx() (*gen_op_stwcx[ctx->mem_idx])()
2613 static GenOpFunc *gen_op_lwarx[NB_MEM_FUNCS] = {
2614 GEN_MEM_FUNCS(lwarx),
2616 static GenOpFunc *gen_op_stwcx[NB_MEM_FUNCS] = {
2617 GEN_MEM_FUNCS(stwcx),
2620 /* lwarx */
2621 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000001, PPC_RES)
2623 /* NIP cannot be restored if the memory exception comes from an helper */
2624 gen_update_nip(ctx, ctx->nip - 4);
2625 gen_addr_reg_index(ctx);
2626 op_lwarx();
2627 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]);
2630 /* stwcx. */
2631 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES)
2633 /* NIP cannot be restored if the memory exception comes from an helper */
2634 gen_update_nip(ctx, ctx->nip - 4);
2635 gen_addr_reg_index(ctx);
2636 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
2637 op_stwcx();
2640 #if defined(TARGET_PPC64)
2641 #define op_ldarx() (*gen_op_ldarx[ctx->mem_idx])()
2642 #define op_stdcx() (*gen_op_stdcx[ctx->mem_idx])()
2643 static GenOpFunc *gen_op_ldarx[NB_MEM_FUNCS] = {
2644 GEN_MEM_FUNCS(ldarx),
2646 static GenOpFunc *gen_op_stdcx[NB_MEM_FUNCS] = {
2647 GEN_MEM_FUNCS(stdcx),
2650 /* ldarx */
2651 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000001, PPC_64B)
2653 /* NIP cannot be restored if the memory exception comes from an helper */
2654 gen_update_nip(ctx, ctx->nip - 4);
2655 gen_addr_reg_index(ctx);
2656 op_ldarx();
2657 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]);
2660 /* stdcx. */
2661 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B)
2663 /* NIP cannot be restored if the memory exception comes from an helper */
2664 gen_update_nip(ctx, ctx->nip - 4);
2665 gen_addr_reg_index(ctx);
2666 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
2667 op_stdcx();
2669 #endif /* defined(TARGET_PPC64) */
2671 /* sync */
2672 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC)
2676 /* wait */
2677 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT)
2679 /* Stop translation, as the CPU is supposed to sleep from now */
2680 gen_op_wait();
2681 GEN_EXCP(ctx, EXCP_HLT, 1);
2684 /*** Floating-point load ***/
2685 #define GEN_LDF(width, opc, type) \
2686 GEN_HANDLER(l##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2688 if (unlikely(!ctx->fpu_enabled)) { \
2689 GEN_EXCP_NO_FP(ctx); \
2690 return; \
2692 gen_addr_imm_index(ctx, 0); \
2693 op_ldst(l##width); \
2694 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2697 #define GEN_LDUF(width, opc, type) \
2698 GEN_HANDLER(l##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2700 if (unlikely(!ctx->fpu_enabled)) { \
2701 GEN_EXCP_NO_FP(ctx); \
2702 return; \
2704 if (unlikely(rA(ctx->opcode) == 0)) { \
2705 GEN_EXCP_INVAL(ctx); \
2706 return; \
2708 gen_addr_imm_index(ctx, 0); \
2709 op_ldst(l##width); \
2710 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2711 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2714 #define GEN_LDUXF(width, opc, type) \
2715 GEN_HANDLER(l##width##ux, 0x1F, 0x17, opc, 0x00000001, type) \
2717 if (unlikely(!ctx->fpu_enabled)) { \
2718 GEN_EXCP_NO_FP(ctx); \
2719 return; \
2721 if (unlikely(rA(ctx->opcode) == 0)) { \
2722 GEN_EXCP_INVAL(ctx); \
2723 return; \
2725 gen_addr_reg_index(ctx); \
2726 op_ldst(l##width); \
2727 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2728 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2731 #define GEN_LDXF(width, opc2, opc3, type) \
2732 GEN_HANDLER(l##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
2734 if (unlikely(!ctx->fpu_enabled)) { \
2735 GEN_EXCP_NO_FP(ctx); \
2736 return; \
2738 gen_addr_reg_index(ctx); \
2739 op_ldst(l##width); \
2740 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2743 #define GEN_LDFS(width, op, type) \
2744 OP_LD_TABLE(width); \
2745 GEN_LDF(width, op | 0x20, type); \
2746 GEN_LDUF(width, op | 0x21, type); \
2747 GEN_LDUXF(width, op | 0x01, type); \
2748 GEN_LDXF(width, 0x17, op | 0x00, type)
2750 /* lfd lfdu lfdux lfdx */
2751 GEN_LDFS(fd, 0x12, PPC_FLOAT);
2752 /* lfs lfsu lfsux lfsx */
2753 GEN_LDFS(fs, 0x10, PPC_FLOAT);
2755 /*** Floating-point store ***/
2756 #define GEN_STF(width, opc, type) \
2757 GEN_HANDLER(st##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2759 if (unlikely(!ctx->fpu_enabled)) { \
2760 GEN_EXCP_NO_FP(ctx); \
2761 return; \
2763 gen_addr_imm_index(ctx, 0); \
2764 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
2765 op_ldst(st##width); \
2768 #define GEN_STUF(width, opc, type) \
2769 GEN_HANDLER(st##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2771 if (unlikely(!ctx->fpu_enabled)) { \
2772 GEN_EXCP_NO_FP(ctx); \
2773 return; \
2775 if (unlikely(rA(ctx->opcode) == 0)) { \
2776 GEN_EXCP_INVAL(ctx); \
2777 return; \
2779 gen_addr_imm_index(ctx, 0); \
2780 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
2781 op_ldst(st##width); \
2782 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2785 #define GEN_STUXF(width, opc, type) \
2786 GEN_HANDLER(st##width##ux, 0x1F, 0x17, opc, 0x00000001, type) \
2788 if (unlikely(!ctx->fpu_enabled)) { \
2789 GEN_EXCP_NO_FP(ctx); \
2790 return; \
2792 if (unlikely(rA(ctx->opcode) == 0)) { \
2793 GEN_EXCP_INVAL(ctx); \
2794 return; \
2796 gen_addr_reg_index(ctx); \
2797 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
2798 op_ldst(st##width); \
2799 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2802 #define GEN_STXF(width, opc2, opc3, type) \
2803 GEN_HANDLER(st##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
2805 if (unlikely(!ctx->fpu_enabled)) { \
2806 GEN_EXCP_NO_FP(ctx); \
2807 return; \
2809 gen_addr_reg_index(ctx); \
2810 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
2811 op_ldst(st##width); \
2814 #define GEN_STFS(width, op, type) \
2815 OP_ST_TABLE(width); \
2816 GEN_STF(width, op | 0x20, type); \
2817 GEN_STUF(width, op | 0x21, type); \
2818 GEN_STUXF(width, op | 0x01, type); \
2819 GEN_STXF(width, 0x17, op | 0x00, type)
2821 /* stfd stfdu stfdux stfdx */
2822 GEN_STFS(fd, 0x16, PPC_FLOAT);
2823 /* stfs stfsu stfsux stfsx */
2824 GEN_STFS(fs, 0x14, PPC_FLOAT);
2826 /* Optional: */
2827 /* stfiwx */
2828 OP_ST_TABLE(fiw);
2829 GEN_STXF(fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
2831 /*** Branch ***/
2832 static always_inline void gen_goto_tb (DisasContext *ctx, int n,
2833 target_ulong dest)
2835 TranslationBlock *tb;
2836 tb = ctx->tb;
2837 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
2838 likely(!ctx->singlestep_enabled)) {
2839 tcg_gen_goto_tb(n);
2840 tcg_gen_movi_tl(cpu_T[1], dest);
2841 #if defined(TARGET_PPC64)
2842 if (ctx->sf_mode)
2843 tcg_gen_andi_tl(cpu_nip, cpu_T[1], ~3);
2844 else
2845 #endif
2846 tcg_gen_andi_tl(cpu_nip, cpu_T[1], (uint32_t)~3);
2847 tcg_gen_exit_tb((long)tb + n);
2848 } else {
2849 tcg_gen_movi_tl(cpu_T[1], dest);
2850 #if defined(TARGET_PPC64)
2851 if (ctx->sf_mode)
2852 tcg_gen_andi_tl(cpu_nip, cpu_T[1], ~3);
2853 else
2854 #endif
2855 tcg_gen_andi_tl(cpu_nip, cpu_T[1], (uint32_t)~3);
2856 if (unlikely(ctx->singlestep_enabled)) {
2857 if ((ctx->singlestep_enabled &
2858 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
2859 ctx->exception == POWERPC_EXCP_BRANCH) {
2860 target_ulong tmp = ctx->nip;
2861 ctx->nip = dest;
2862 GEN_EXCP(ctx, POWERPC_EXCP_TRACE, 0);
2863 ctx->nip = tmp;
2865 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
2866 gen_update_nip(ctx, dest);
2867 gen_op_debug();
2870 tcg_gen_exit_tb(0);
2874 static always_inline void gen_setlr (DisasContext *ctx, target_ulong nip)
2876 #if defined(TARGET_PPC64)
2877 if (ctx->sf_mode != 0 && (nip >> 32))
2878 gen_op_setlr_64(ctx->nip >> 32, ctx->nip);
2879 else
2880 #endif
2881 gen_op_setlr(ctx->nip);
2884 /* b ba bl bla */
2885 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
2887 target_ulong li, target;
2889 ctx->exception = POWERPC_EXCP_BRANCH;
2890 /* sign extend LI */
2891 #if defined(TARGET_PPC64)
2892 if (ctx->sf_mode)
2893 li = ((int64_t)LI(ctx->opcode) << 38) >> 38;
2894 else
2895 #endif
2896 li = ((int32_t)LI(ctx->opcode) << 6) >> 6;
2897 if (likely(AA(ctx->opcode) == 0))
2898 target = ctx->nip + li - 4;
2899 else
2900 target = li;
2901 #if defined(TARGET_PPC64)
2902 if (!ctx->sf_mode)
2903 target = (uint32_t)target;
2904 #endif
2905 if (LK(ctx->opcode))
2906 gen_setlr(ctx, ctx->nip);
2907 gen_goto_tb(ctx, 0, target);
2910 #define BCOND_IM 0
2911 #define BCOND_LR 1
2912 #define BCOND_CTR 2
2914 static always_inline void gen_bcond (DisasContext *ctx, int type)
2916 target_ulong target = 0;
2917 target_ulong li;
2918 uint32_t bo = BO(ctx->opcode);
2919 uint32_t bi = BI(ctx->opcode);
2920 uint32_t mask;
2922 ctx->exception = POWERPC_EXCP_BRANCH;
2923 if ((bo & 0x4) == 0)
2924 gen_op_dec_ctr();
2925 switch(type) {
2926 case BCOND_IM:
2927 li = (target_long)((int16_t)(BD(ctx->opcode)));
2928 if (likely(AA(ctx->opcode) == 0)) {
2929 target = ctx->nip + li - 4;
2930 } else {
2931 target = li;
2933 #if defined(TARGET_PPC64)
2934 if (!ctx->sf_mode)
2935 target = (uint32_t)target;
2936 #endif
2937 break;
2938 case BCOND_CTR:
2939 gen_op_movl_T1_ctr();
2940 break;
2941 default:
2942 case BCOND_LR:
2943 gen_op_movl_T1_lr();
2944 break;
2946 if (LK(ctx->opcode))
2947 gen_setlr(ctx, ctx->nip);
2948 if (bo & 0x10) {
2949 /* No CR condition */
2950 switch (bo & 0x6) {
2951 case 0:
2952 #if defined(TARGET_PPC64)
2953 if (ctx->sf_mode)
2954 gen_op_test_ctr_64();
2955 else
2956 #endif
2957 gen_op_test_ctr();
2958 break;
2959 case 2:
2960 #if defined(TARGET_PPC64)
2961 if (ctx->sf_mode)
2962 gen_op_test_ctrz_64();
2963 else
2964 #endif
2965 gen_op_test_ctrz();
2966 break;
2967 default:
2968 case 4:
2969 case 6:
2970 if (type == BCOND_IM) {
2971 gen_goto_tb(ctx, 0, target);
2972 return;
2973 } else {
2974 #if defined(TARGET_PPC64)
2975 if (ctx->sf_mode)
2976 tcg_gen_andi_tl(cpu_nip, cpu_T[1], ~3);
2977 else
2978 #endif
2979 tcg_gen_andi_tl(cpu_nip, cpu_T[1], (uint32_t)~3);
2980 goto no_test;
2982 break;
2984 } else {
2985 mask = 1 << (3 - (bi & 0x03));
2986 tcg_gen_mov_i32(cpu_T[0], cpu_crf[bi >> 2]);
2987 if (bo & 0x8) {
2988 switch (bo & 0x6) {
2989 case 0:
2990 #if defined(TARGET_PPC64)
2991 if (ctx->sf_mode)
2992 gen_op_test_ctr_true_64(mask);
2993 else
2994 #endif
2995 gen_op_test_ctr_true(mask);
2996 break;
2997 case 2:
2998 #if defined(TARGET_PPC64)
2999 if (ctx->sf_mode)
3000 gen_op_test_ctrz_true_64(mask);
3001 else
3002 #endif
3003 gen_op_test_ctrz_true(mask);
3004 break;
3005 default:
3006 case 4:
3007 case 6:
3008 gen_op_test_true(mask);
3009 break;
3011 } else {
3012 switch (bo & 0x6) {
3013 case 0:
3014 #if defined(TARGET_PPC64)
3015 if (ctx->sf_mode)
3016 gen_op_test_ctr_false_64(mask);
3017 else
3018 #endif
3019 gen_op_test_ctr_false(mask);
3020 break;
3021 case 2:
3022 #if defined(TARGET_PPC64)
3023 if (ctx->sf_mode)
3024 gen_op_test_ctrz_false_64(mask);
3025 else
3026 #endif
3027 gen_op_test_ctrz_false(mask);
3028 break;
3029 default:
3030 case 4:
3031 case 6:
3032 gen_op_test_false(mask);
3033 break;
3037 if (type == BCOND_IM) {
3038 int l1 = gen_new_label();
3039 gen_op_jz_T0(l1);
3040 gen_goto_tb(ctx, 0, target);
3041 gen_set_label(l1);
3042 gen_goto_tb(ctx, 1, ctx->nip);
3043 } else {
3044 #if defined(TARGET_PPC64)
3045 if (ctx->sf_mode)
3046 gen_op_btest_T1_64(ctx->nip >> 32, ctx->nip);
3047 else
3048 #endif
3049 gen_op_btest_T1(ctx->nip);
3050 no_test:
3051 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3052 gen_update_nip(ctx, ctx->nip);
3053 gen_op_debug();
3055 tcg_gen_exit_tb(0);
3059 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3061 gen_bcond(ctx, BCOND_IM);
3064 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW)
3066 gen_bcond(ctx, BCOND_CTR);
3069 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW)
3071 gen_bcond(ctx, BCOND_LR);
3074 /*** Condition register logical ***/
3075 #define GEN_CRLOGIC(op, opc) \
3076 GEN_HANDLER(cr##op, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) \
3078 uint8_t bitmask; \
3079 int sh; \
3080 tcg_gen_mov_i32(cpu_T[0], cpu_crf[crbA(ctx->opcode) >> 2]); \
3081 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3082 if (sh > 0) \
3083 gen_op_srli_T0(sh); \
3084 else if (sh < 0) \
3085 gen_op_sli_T0(-sh); \
3086 tcg_gen_mov_i32(cpu_T[1], cpu_crf[crbB(ctx->opcode) >> 2]); \
3087 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3088 if (sh > 0) \
3089 gen_op_srli_T1(sh); \
3090 else if (sh < 0) \
3091 gen_op_sli_T1(-sh); \
3092 gen_op_##op(); \
3093 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3094 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], bitmask); \
3095 tcg_gen_andi_i32(cpu_T[1], cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3096 gen_op_or(); \
3097 tcg_gen_andi_i32(cpu_crf[crbD(ctx->opcode) >> 2], cpu_T[0], 0xf); \
3100 /* crand */
3101 GEN_CRLOGIC(and, 0x08);
3102 /* crandc */
3103 GEN_CRLOGIC(andc, 0x04);
3104 /* creqv */
3105 GEN_CRLOGIC(eqv, 0x09);
3106 /* crnand */
3107 GEN_CRLOGIC(nand, 0x07);
3108 /* crnor */
3109 GEN_CRLOGIC(nor, 0x01);
3110 /* cror */
3111 GEN_CRLOGIC(or, 0x0E);
3112 /* crorc */
3113 GEN_CRLOGIC(orc, 0x0D);
3114 /* crxor */
3115 GEN_CRLOGIC(xor, 0x06);
3116 /* mcrf */
3117 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER)
3119 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3122 /*** System linkage ***/
3123 /* rfi (supervisor only) */
3124 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW)
3126 #if defined(CONFIG_USER_ONLY)
3127 GEN_EXCP_PRIVOPC(ctx);
3128 #else
3129 /* Restore CPU state */
3130 if (unlikely(!ctx->supervisor)) {
3131 GEN_EXCP_PRIVOPC(ctx);
3132 return;
3134 gen_op_rfi();
3135 GEN_SYNC(ctx);
3136 #endif
3139 #if defined(TARGET_PPC64)
3140 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B)
3142 #if defined(CONFIG_USER_ONLY)
3143 GEN_EXCP_PRIVOPC(ctx);
3144 #else
3145 /* Restore CPU state */
3146 if (unlikely(!ctx->supervisor)) {
3147 GEN_EXCP_PRIVOPC(ctx);
3148 return;
3150 gen_op_rfid();
3151 GEN_SYNC(ctx);
3152 #endif
3155 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H)
3157 #if defined(CONFIG_USER_ONLY)
3158 GEN_EXCP_PRIVOPC(ctx);
3159 #else
3160 /* Restore CPU state */
3161 if (unlikely(ctx->supervisor <= 1)) {
3162 GEN_EXCP_PRIVOPC(ctx);
3163 return;
3165 gen_op_hrfid();
3166 GEN_SYNC(ctx);
3167 #endif
3169 #endif
3171 /* sc */
3172 #if defined(CONFIG_USER_ONLY)
3173 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3174 #else
3175 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3176 #endif
3177 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW)
3179 uint32_t lev;
3181 lev = (ctx->opcode >> 5) & 0x7F;
3182 GEN_EXCP(ctx, POWERPC_SYSCALL, lev);
3185 /*** Trap ***/
3186 /* tw */
3187 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW)
3189 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3190 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3191 /* Update the nip since this might generate a trap exception */
3192 gen_update_nip(ctx, ctx->nip);
3193 gen_op_tw(TO(ctx->opcode));
3196 /* twi */
3197 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3199 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3200 tcg_gen_movi_tl(cpu_T[1], SIMM(ctx->opcode));
3201 /* Update the nip since this might generate a trap exception */
3202 gen_update_nip(ctx, ctx->nip);
3203 gen_op_tw(TO(ctx->opcode));
3206 #if defined(TARGET_PPC64)
3207 /* td */
3208 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B)
3210 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3211 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3212 /* Update the nip since this might generate a trap exception */
3213 gen_update_nip(ctx, ctx->nip);
3214 gen_op_td(TO(ctx->opcode));
3217 /* tdi */
3218 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B)
3220 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3221 tcg_gen_movi_tl(cpu_T[1], SIMM(ctx->opcode));
3222 /* Update the nip since this might generate a trap exception */
3223 gen_update_nip(ctx, ctx->nip);
3224 gen_op_td(TO(ctx->opcode));
3226 #endif
3228 /*** Processor control ***/
3229 /* mcrxr */
3230 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC)
3232 gen_op_load_xer_cr();
3233 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
3234 gen_op_clear_xer_ov();
3235 gen_op_clear_xer_ca();
3238 /* mfcr */
3239 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC)
3241 uint32_t crm, crn;
3243 if (likely(ctx->opcode & 0x00100000)) {
3244 crm = CRM(ctx->opcode);
3245 if (likely((crm ^ (crm - 1)) == 0)) {
3246 crn = ffs(crm);
3247 tcg_gen_mov_i32(cpu_T[0], cpu_crf[7 - crn]);
3249 } else {
3250 gen_op_load_cr();
3252 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3255 /* mfmsr */
3256 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC)
3258 #if defined(CONFIG_USER_ONLY)
3259 GEN_EXCP_PRIVREG(ctx);
3260 #else
3261 if (unlikely(!ctx->supervisor)) {
3262 GEN_EXCP_PRIVREG(ctx);
3263 return;
3265 gen_op_load_msr();
3266 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3267 #endif
3270 #if 1
3271 #define SPR_NOACCESS ((void *)(-1UL))
3272 #else
3273 static void spr_noaccess (void *opaque, int sprn)
3275 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3276 printf("ERROR: try to access SPR %d !\n", sprn);
3278 #define SPR_NOACCESS (&spr_noaccess)
3279 #endif
3281 /* mfspr */
3282 static always_inline void gen_op_mfspr (DisasContext *ctx)
3284 void (*read_cb)(void *opaque, int sprn);
3285 uint32_t sprn = SPR(ctx->opcode);
3287 #if !defined(CONFIG_USER_ONLY)
3288 if (ctx->supervisor == 2)
3289 read_cb = ctx->spr_cb[sprn].hea_read;
3290 else if (ctx->supervisor)
3291 read_cb = ctx->spr_cb[sprn].oea_read;
3292 else
3293 #endif
3294 read_cb = ctx->spr_cb[sprn].uea_read;
3295 if (likely(read_cb != NULL)) {
3296 if (likely(read_cb != SPR_NOACCESS)) {
3297 (*read_cb)(ctx, sprn);
3298 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3299 } else {
3300 /* Privilege exception */
3301 /* This is a hack to avoid warnings when running Linux:
3302 * this OS breaks the PowerPC virtualisation model,
3303 * allowing userland application to read the PVR
3305 if (sprn != SPR_PVR) {
3306 if (loglevel != 0) {
3307 fprintf(logfile, "Trying to read privileged spr %d %03x at "
3308 ADDRX "\n", sprn, sprn, ctx->nip);
3310 printf("Trying to read privileged spr %d %03x at " ADDRX "\n",
3311 sprn, sprn, ctx->nip);
3313 GEN_EXCP_PRIVREG(ctx);
3315 } else {
3316 /* Not defined */
3317 if (loglevel != 0) {
3318 fprintf(logfile, "Trying to read invalid spr %d %03x at "
3319 ADDRX "\n", sprn, sprn, ctx->nip);
3321 printf("Trying to read invalid spr %d %03x at " ADDRX "\n",
3322 sprn, sprn, ctx->nip);
3323 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3324 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_SPR);
3328 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC)
3330 gen_op_mfspr(ctx);
3333 /* mftb */
3334 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB)
3336 gen_op_mfspr(ctx);
3339 /* mtcrf */
3340 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC)
3342 uint32_t crm, crn;
3344 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3345 crm = CRM(ctx->opcode);
3346 if (likely((ctx->opcode & 0x00100000) || (crm ^ (crm - 1)) == 0)) {
3347 crn = ffs(crm);
3348 gen_op_srli_T0(crn * 4);
3349 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_T[0], 0xf);
3350 } else {
3351 gen_op_store_cr(crm);
3355 /* mtmsr */
3356 #if defined(TARGET_PPC64)
3357 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B)
3359 #if defined(CONFIG_USER_ONLY)
3360 GEN_EXCP_PRIVREG(ctx);
3361 #else
3362 if (unlikely(!ctx->supervisor)) {
3363 GEN_EXCP_PRIVREG(ctx);
3364 return;
3366 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3367 if (ctx->opcode & 0x00010000) {
3368 /* Special form that does not need any synchronisation */
3369 gen_op_update_riee();
3370 } else {
3371 /* XXX: we need to update nip before the store
3372 * if we enter power saving mode, we will exit the loop
3373 * directly from ppc_store_msr
3375 gen_update_nip(ctx, ctx->nip);
3376 gen_op_store_msr();
3377 /* Must stop the translation as machine state (may have) changed */
3378 /* Note that mtmsr is not always defined as context-synchronizing */
3379 ctx->exception = POWERPC_EXCP_STOP;
3381 #endif
3383 #endif
3385 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC)
3387 #if defined(CONFIG_USER_ONLY)
3388 GEN_EXCP_PRIVREG(ctx);
3389 #else
3390 if (unlikely(!ctx->supervisor)) {
3391 GEN_EXCP_PRIVREG(ctx);
3392 return;
3394 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3395 if (ctx->opcode & 0x00010000) {
3396 /* Special form that does not need any synchronisation */
3397 gen_op_update_riee();
3398 } else {
3399 /* XXX: we need to update nip before the store
3400 * if we enter power saving mode, we will exit the loop
3401 * directly from ppc_store_msr
3403 gen_update_nip(ctx, ctx->nip);
3404 #if defined(TARGET_PPC64)
3405 if (!ctx->sf_mode)
3406 gen_op_store_msr_32();
3407 else
3408 #endif
3409 gen_op_store_msr();
3410 /* Must stop the translation as machine state (may have) changed */
3411 /* Note that mtmsrd is not always defined as context-synchronizing */
3412 ctx->exception = POWERPC_EXCP_STOP;
3414 #endif
3417 /* mtspr */
3418 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC)
3420 void (*write_cb)(void *opaque, int sprn);
3421 uint32_t sprn = SPR(ctx->opcode);
3423 #if !defined(CONFIG_USER_ONLY)
3424 if (ctx->supervisor == 2)
3425 write_cb = ctx->spr_cb[sprn].hea_write;
3426 else if (ctx->supervisor)
3427 write_cb = ctx->spr_cb[sprn].oea_write;
3428 else
3429 #endif
3430 write_cb = ctx->spr_cb[sprn].uea_write;
3431 if (likely(write_cb != NULL)) {
3432 if (likely(write_cb != SPR_NOACCESS)) {
3433 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3434 (*write_cb)(ctx, sprn);
3435 } else {
3436 /* Privilege exception */
3437 if (loglevel != 0) {
3438 fprintf(logfile, "Trying to write privileged spr %d %03x at "
3439 ADDRX "\n", sprn, sprn, ctx->nip);
3441 printf("Trying to write privileged spr %d %03x at " ADDRX "\n",
3442 sprn, sprn, ctx->nip);
3443 GEN_EXCP_PRIVREG(ctx);
3445 } else {
3446 /* Not defined */
3447 if (loglevel != 0) {
3448 fprintf(logfile, "Trying to write invalid spr %d %03x at "
3449 ADDRX "\n", sprn, sprn, ctx->nip);
3451 printf("Trying to write invalid spr %d %03x at " ADDRX "\n",
3452 sprn, sprn, ctx->nip);
3453 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3454 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_SPR);
3458 /*** Cache management ***/
3459 /* dcbf */
3460 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE)
3462 /* XXX: specification says this is treated as a load by the MMU */
3463 gen_addr_reg_index(ctx);
3464 op_ldst(lbz);
3467 /* dcbi (Supervisor only) */
3468 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE)
3470 #if defined(CONFIG_USER_ONLY)
3471 GEN_EXCP_PRIVOPC(ctx);
3472 #else
3473 if (unlikely(!ctx->supervisor)) {
3474 GEN_EXCP_PRIVOPC(ctx);
3475 return;
3477 gen_addr_reg_index(ctx);
3478 /* XXX: specification says this should be treated as a store by the MMU */
3479 op_ldst(lbz);
3480 op_ldst(stb);
3481 #endif
3484 /* dcdst */
3485 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE)
3487 /* XXX: specification say this is treated as a load by the MMU */
3488 gen_addr_reg_index(ctx);
3489 op_ldst(lbz);
3492 /* dcbt */
3493 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE)
3495 /* interpreted as no-op */
3496 /* XXX: specification say this is treated as a load by the MMU
3497 * but does not generate any exception
3501 /* dcbtst */
3502 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE)
3504 /* interpreted as no-op */
3505 /* XXX: specification say this is treated as a load by the MMU
3506 * but does not generate any exception
3510 /* dcbz */
3511 #define op_dcbz(n) (*gen_op_dcbz[n][ctx->mem_idx])()
3512 static GenOpFunc *gen_op_dcbz[4][NB_MEM_FUNCS] = {
3513 /* 32 bytes cache line size */
3515 #define gen_op_dcbz_l32_le_raw gen_op_dcbz_l32_raw
3516 #define gen_op_dcbz_l32_le_user gen_op_dcbz_l32_user
3517 #define gen_op_dcbz_l32_le_kernel gen_op_dcbz_l32_kernel
3518 #define gen_op_dcbz_l32_le_hypv gen_op_dcbz_l32_hypv
3519 #define gen_op_dcbz_l32_le_64_raw gen_op_dcbz_l32_64_raw
3520 #define gen_op_dcbz_l32_le_64_user gen_op_dcbz_l32_64_user
3521 #define gen_op_dcbz_l32_le_64_kernel gen_op_dcbz_l32_64_kernel
3522 #define gen_op_dcbz_l32_le_64_hypv gen_op_dcbz_l32_64_hypv
3523 GEN_MEM_FUNCS(dcbz_l32),
3525 /* 64 bytes cache line size */
3527 #define gen_op_dcbz_l64_le_raw gen_op_dcbz_l64_raw
3528 #define gen_op_dcbz_l64_le_user gen_op_dcbz_l64_user
3529 #define gen_op_dcbz_l64_le_kernel gen_op_dcbz_l64_kernel
3530 #define gen_op_dcbz_l64_le_hypv gen_op_dcbz_l64_hypv
3531 #define gen_op_dcbz_l64_le_64_raw gen_op_dcbz_l64_64_raw
3532 #define gen_op_dcbz_l64_le_64_user gen_op_dcbz_l64_64_user
3533 #define gen_op_dcbz_l64_le_64_kernel gen_op_dcbz_l64_64_kernel
3534 #define gen_op_dcbz_l64_le_64_hypv gen_op_dcbz_l64_64_hypv
3535 GEN_MEM_FUNCS(dcbz_l64),
3537 /* 128 bytes cache line size */
3539 #define gen_op_dcbz_l128_le_raw gen_op_dcbz_l128_raw
3540 #define gen_op_dcbz_l128_le_user gen_op_dcbz_l128_user
3541 #define gen_op_dcbz_l128_le_kernel gen_op_dcbz_l128_kernel
3542 #define gen_op_dcbz_l128_le_hypv gen_op_dcbz_l128_hypv
3543 #define gen_op_dcbz_l128_le_64_raw gen_op_dcbz_l128_64_raw
3544 #define gen_op_dcbz_l128_le_64_user gen_op_dcbz_l128_64_user
3545 #define gen_op_dcbz_l128_le_64_kernel gen_op_dcbz_l128_64_kernel
3546 #define gen_op_dcbz_l128_le_64_hypv gen_op_dcbz_l128_64_hypv
3547 GEN_MEM_FUNCS(dcbz_l128),
3549 /* tunable cache line size */
3551 #define gen_op_dcbz_le_raw gen_op_dcbz_raw
3552 #define gen_op_dcbz_le_user gen_op_dcbz_user
3553 #define gen_op_dcbz_le_kernel gen_op_dcbz_kernel
3554 #define gen_op_dcbz_le_hypv gen_op_dcbz_hypv
3555 #define gen_op_dcbz_le_64_raw gen_op_dcbz_64_raw
3556 #define gen_op_dcbz_le_64_user gen_op_dcbz_64_user
3557 #define gen_op_dcbz_le_64_kernel gen_op_dcbz_64_kernel
3558 #define gen_op_dcbz_le_64_hypv gen_op_dcbz_64_hypv
3559 GEN_MEM_FUNCS(dcbz),
3563 static always_inline void handler_dcbz (DisasContext *ctx,
3564 int dcache_line_size)
3566 int n;
3568 switch (dcache_line_size) {
3569 case 32:
3570 n = 0;
3571 break;
3572 case 64:
3573 n = 1;
3574 break;
3575 case 128:
3576 n = 2;
3577 break;
3578 default:
3579 n = 3;
3580 break;
3582 op_dcbz(n);
3585 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03E00001, PPC_CACHE_DCBZ)
3587 gen_addr_reg_index(ctx);
3588 handler_dcbz(ctx, ctx->dcache_line_size);
3589 gen_op_check_reservation();
3592 GEN_HANDLER2(dcbz_970, "dcbz", 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZT)
3594 gen_addr_reg_index(ctx);
3595 if (ctx->opcode & 0x00200000)
3596 handler_dcbz(ctx, ctx->dcache_line_size);
3597 else
3598 handler_dcbz(ctx, -1);
3599 gen_op_check_reservation();
3602 /* icbi */
3603 #define op_icbi() (*gen_op_icbi[ctx->mem_idx])()
3604 #define gen_op_icbi_le_raw gen_op_icbi_raw
3605 #define gen_op_icbi_le_user gen_op_icbi_user
3606 #define gen_op_icbi_le_kernel gen_op_icbi_kernel
3607 #define gen_op_icbi_le_hypv gen_op_icbi_hypv
3608 #define gen_op_icbi_le_64_raw gen_op_icbi_64_raw
3609 #define gen_op_icbi_le_64_user gen_op_icbi_64_user
3610 #define gen_op_icbi_le_64_kernel gen_op_icbi_64_kernel
3611 #define gen_op_icbi_le_64_hypv gen_op_icbi_64_hypv
3612 static GenOpFunc *gen_op_icbi[NB_MEM_FUNCS] = {
3613 GEN_MEM_FUNCS(icbi),
3616 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI)
3618 /* NIP cannot be restored if the memory exception comes from an helper */
3619 gen_update_nip(ctx, ctx->nip - 4);
3620 gen_addr_reg_index(ctx);
3621 op_icbi();
3624 /* Optional: */
3625 /* dcba */
3626 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA)
3628 /* interpreted as no-op */
3629 /* XXX: specification say this is treated as a store by the MMU
3630 * but does not generate any exception
3634 /*** Segment register manipulation ***/
3635 /* Supervisor only: */
3636 /* mfsr */
3637 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT)
3639 #if defined(CONFIG_USER_ONLY)
3640 GEN_EXCP_PRIVREG(ctx);
3641 #else
3642 if (unlikely(!ctx->supervisor)) {
3643 GEN_EXCP_PRIVREG(ctx);
3644 return;
3646 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
3647 gen_op_load_sr();
3648 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3649 #endif
3652 /* mfsrin */
3653 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT)
3655 #if defined(CONFIG_USER_ONLY)
3656 GEN_EXCP_PRIVREG(ctx);
3657 #else
3658 if (unlikely(!ctx->supervisor)) {
3659 GEN_EXCP_PRIVREG(ctx);
3660 return;
3662 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3663 gen_op_srli_T1(28);
3664 gen_op_load_sr();
3665 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3666 #endif
3669 /* mtsr */
3670 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT)
3672 #if defined(CONFIG_USER_ONLY)
3673 GEN_EXCP_PRIVREG(ctx);
3674 #else
3675 if (unlikely(!ctx->supervisor)) {
3676 GEN_EXCP_PRIVREG(ctx);
3677 return;
3679 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3680 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
3681 gen_op_store_sr();
3682 #endif
3685 /* mtsrin */
3686 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT)
3688 #if defined(CONFIG_USER_ONLY)
3689 GEN_EXCP_PRIVREG(ctx);
3690 #else
3691 if (unlikely(!ctx->supervisor)) {
3692 GEN_EXCP_PRIVREG(ctx);
3693 return;
3695 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3696 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3697 gen_op_srli_T1(28);
3698 gen_op_store_sr();
3699 #endif
3702 #if defined(TARGET_PPC64)
3703 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
3704 /* mfsr */
3705 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B)
3707 #if defined(CONFIG_USER_ONLY)
3708 GEN_EXCP_PRIVREG(ctx);
3709 #else
3710 if (unlikely(!ctx->supervisor)) {
3711 GEN_EXCP_PRIVREG(ctx);
3712 return;
3714 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
3715 gen_op_load_slb();
3716 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3717 #endif
3720 /* mfsrin */
3721 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
3722 PPC_SEGMENT_64B)
3724 #if defined(CONFIG_USER_ONLY)
3725 GEN_EXCP_PRIVREG(ctx);
3726 #else
3727 if (unlikely(!ctx->supervisor)) {
3728 GEN_EXCP_PRIVREG(ctx);
3729 return;
3731 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3732 gen_op_srli_T1(28);
3733 gen_op_load_slb();
3734 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3735 #endif
3738 /* mtsr */
3739 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B)
3741 #if defined(CONFIG_USER_ONLY)
3742 GEN_EXCP_PRIVREG(ctx);
3743 #else
3744 if (unlikely(!ctx->supervisor)) {
3745 GEN_EXCP_PRIVREG(ctx);
3746 return;
3748 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3749 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
3750 gen_op_store_slb();
3751 #endif
3754 /* mtsrin */
3755 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
3756 PPC_SEGMENT_64B)
3758 #if defined(CONFIG_USER_ONLY)
3759 GEN_EXCP_PRIVREG(ctx);
3760 #else
3761 if (unlikely(!ctx->supervisor)) {
3762 GEN_EXCP_PRIVREG(ctx);
3763 return;
3765 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3766 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3767 gen_op_srli_T1(28);
3768 gen_op_store_slb();
3769 #endif
3771 #endif /* defined(TARGET_PPC64) */
3773 /*** Lookaside buffer management ***/
3774 /* Optional & supervisor only: */
3775 /* tlbia */
3776 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA)
3778 #if defined(CONFIG_USER_ONLY)
3779 GEN_EXCP_PRIVOPC(ctx);
3780 #else
3781 if (unlikely(!ctx->supervisor)) {
3782 GEN_EXCP_PRIVOPC(ctx);
3783 return;
3785 gen_op_tlbia();
3786 #endif
3789 /* tlbie */
3790 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE)
3792 #if defined(CONFIG_USER_ONLY)
3793 GEN_EXCP_PRIVOPC(ctx);
3794 #else
3795 if (unlikely(!ctx->supervisor)) {
3796 GEN_EXCP_PRIVOPC(ctx);
3797 return;
3799 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
3800 #if defined(TARGET_PPC64)
3801 if (ctx->sf_mode)
3802 gen_op_tlbie_64();
3803 else
3804 #endif
3805 gen_op_tlbie();
3806 #endif
3809 /* tlbsync */
3810 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC)
3812 #if defined(CONFIG_USER_ONLY)
3813 GEN_EXCP_PRIVOPC(ctx);
3814 #else
3815 if (unlikely(!ctx->supervisor)) {
3816 GEN_EXCP_PRIVOPC(ctx);
3817 return;
3819 /* This has no effect: it should ensure that all previous
3820 * tlbie have completed
3822 GEN_STOP(ctx);
3823 #endif
3826 #if defined(TARGET_PPC64)
3827 /* slbia */
3828 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI)
3830 #if defined(CONFIG_USER_ONLY)
3831 GEN_EXCP_PRIVOPC(ctx);
3832 #else
3833 if (unlikely(!ctx->supervisor)) {
3834 GEN_EXCP_PRIVOPC(ctx);
3835 return;
3837 gen_op_slbia();
3838 #endif
3841 /* slbie */
3842 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI)
3844 #if defined(CONFIG_USER_ONLY)
3845 GEN_EXCP_PRIVOPC(ctx);
3846 #else
3847 if (unlikely(!ctx->supervisor)) {
3848 GEN_EXCP_PRIVOPC(ctx);
3849 return;
3851 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
3852 gen_op_slbie();
3853 #endif
3855 #endif
3857 /*** External control ***/
3858 /* Optional: */
3859 #define op_eciwx() (*gen_op_eciwx[ctx->mem_idx])()
3860 #define op_ecowx() (*gen_op_ecowx[ctx->mem_idx])()
3861 static GenOpFunc *gen_op_eciwx[NB_MEM_FUNCS] = {
3862 GEN_MEM_FUNCS(eciwx),
3864 static GenOpFunc *gen_op_ecowx[NB_MEM_FUNCS] = {
3865 GEN_MEM_FUNCS(ecowx),
3868 /* eciwx */
3869 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN)
3871 /* Should check EAR[E] & alignment ! */
3872 gen_addr_reg_index(ctx);
3873 op_eciwx();
3874 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3877 /* ecowx */
3878 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN)
3880 /* Should check EAR[E] & alignment ! */
3881 gen_addr_reg_index(ctx);
3882 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
3883 op_ecowx();
3886 /* PowerPC 601 specific instructions */
3887 /* abs - abs. */
3888 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR)
3890 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3891 gen_op_POWER_abs();
3892 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3893 if (unlikely(Rc(ctx->opcode) != 0))
3894 gen_set_Rc0(ctx);
3897 /* abso - abso. */
3898 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR)
3900 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3901 gen_op_POWER_abso();
3902 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3903 if (unlikely(Rc(ctx->opcode) != 0))
3904 gen_set_Rc0(ctx);
3907 /* clcs */
3908 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR)
3910 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3911 gen_op_POWER_clcs();
3912 /* Rc=1 sets CR0 to an undefined state */
3913 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3916 /* div - div. */
3917 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR)
3919 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3920 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3921 gen_op_POWER_div();
3922 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3923 if (unlikely(Rc(ctx->opcode) != 0))
3924 gen_set_Rc0(ctx);
3927 /* divo - divo. */
3928 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR)
3930 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3931 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3932 gen_op_POWER_divo();
3933 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3934 if (unlikely(Rc(ctx->opcode) != 0))
3935 gen_set_Rc0(ctx);
3938 /* divs - divs. */
3939 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR)
3941 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3942 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3943 gen_op_POWER_divs();
3944 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3945 if (unlikely(Rc(ctx->opcode) != 0))
3946 gen_set_Rc0(ctx);
3949 /* divso - divso. */
3950 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR)
3952 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3953 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3954 gen_op_POWER_divso();
3955 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3956 if (unlikely(Rc(ctx->opcode) != 0))
3957 gen_set_Rc0(ctx);
3960 /* doz - doz. */
3961 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR)
3963 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3964 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3965 gen_op_POWER_doz();
3966 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3967 if (unlikely(Rc(ctx->opcode) != 0))
3968 gen_set_Rc0(ctx);
3971 /* dozo - dozo. */
3972 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR)
3974 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3975 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3976 gen_op_POWER_dozo();
3977 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3978 if (unlikely(Rc(ctx->opcode) != 0))
3979 gen_set_Rc0(ctx);
3982 /* dozi */
3983 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR)
3985 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3986 tcg_gen_movi_tl(cpu_T[1], SIMM(ctx->opcode));
3987 gen_op_POWER_doz();
3988 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3991 /* As lscbx load from memory byte after byte, it's always endian safe.
3992 * Original POWER is 32 bits only, define 64 bits ops as 32 bits ones
3994 #define op_POWER_lscbx(start, ra, rb) \
3995 (*gen_op_POWER_lscbx[ctx->mem_idx])(start, ra, rb)
3996 #define gen_op_POWER_lscbx_64_raw gen_op_POWER_lscbx_raw
3997 #define gen_op_POWER_lscbx_64_user gen_op_POWER_lscbx_user
3998 #define gen_op_POWER_lscbx_64_kernel gen_op_POWER_lscbx_kernel
3999 #define gen_op_POWER_lscbx_64_hypv gen_op_POWER_lscbx_hypv
4000 #define gen_op_POWER_lscbx_le_raw gen_op_POWER_lscbx_raw
4001 #define gen_op_POWER_lscbx_le_user gen_op_POWER_lscbx_user
4002 #define gen_op_POWER_lscbx_le_kernel gen_op_POWER_lscbx_kernel
4003 #define gen_op_POWER_lscbx_le_hypv gen_op_POWER_lscbx_hypv
4004 #define gen_op_POWER_lscbx_le_64_raw gen_op_POWER_lscbx_raw
4005 #define gen_op_POWER_lscbx_le_64_user gen_op_POWER_lscbx_user
4006 #define gen_op_POWER_lscbx_le_64_kernel gen_op_POWER_lscbx_kernel
4007 #define gen_op_POWER_lscbx_le_64_hypv gen_op_POWER_lscbx_hypv
4008 static GenOpFunc3 *gen_op_POWER_lscbx[NB_MEM_FUNCS] = {
4009 GEN_MEM_FUNCS(POWER_lscbx),
4012 /* lscbx - lscbx. */
4013 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR)
4015 int ra = rA(ctx->opcode);
4016 int rb = rB(ctx->opcode);
4018 gen_addr_reg_index(ctx);
4019 if (ra == 0) {
4020 ra = rb;
4022 /* NIP cannot be restored if the memory exception comes from an helper */
4023 gen_update_nip(ctx, ctx->nip - 4);
4024 gen_op_load_xer_bc();
4025 gen_op_load_xer_cmp();
4026 op_POWER_lscbx(rD(ctx->opcode), ra, rb);
4027 gen_op_store_xer_bc();
4028 if (unlikely(Rc(ctx->opcode) != 0))
4029 gen_set_Rc0(ctx);
4032 /* maskg - maskg. */
4033 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR)
4035 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4036 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4037 gen_op_POWER_maskg();
4038 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4039 if (unlikely(Rc(ctx->opcode) != 0))
4040 gen_set_Rc0(ctx);
4043 /* maskir - maskir. */
4044 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR)
4046 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4047 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
4048 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
4049 gen_op_POWER_maskir();
4050 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4051 if (unlikely(Rc(ctx->opcode) != 0))
4052 gen_set_Rc0(ctx);
4055 /* mul - mul. */
4056 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR)
4058 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4059 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4060 gen_op_POWER_mul();
4061 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4062 if (unlikely(Rc(ctx->opcode) != 0))
4063 gen_set_Rc0(ctx);
4066 /* mulo - mulo. */
4067 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR)
4069 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4070 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4071 gen_op_POWER_mulo();
4072 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4073 if (unlikely(Rc(ctx->opcode) != 0))
4074 gen_set_Rc0(ctx);
4077 /* nabs - nabs. */
4078 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR)
4080 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4081 gen_op_POWER_nabs();
4082 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4083 if (unlikely(Rc(ctx->opcode) != 0))
4084 gen_set_Rc0(ctx);
4087 /* nabso - nabso. */
4088 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR)
4090 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4091 gen_op_POWER_nabso();
4092 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4093 if (unlikely(Rc(ctx->opcode) != 0))
4094 gen_set_Rc0(ctx);
4097 /* rlmi - rlmi. */
4098 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR)
4100 uint32_t mb, me;
4102 mb = MB(ctx->opcode);
4103 me = ME(ctx->opcode);
4104 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4105 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
4106 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
4107 gen_op_POWER_rlmi(MASK(mb, me), ~MASK(mb, me));
4108 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4109 if (unlikely(Rc(ctx->opcode) != 0))
4110 gen_set_Rc0(ctx);
4113 /* rrib - rrib. */
4114 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR)
4116 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4117 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
4118 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
4119 gen_op_POWER_rrib();
4120 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4121 if (unlikely(Rc(ctx->opcode) != 0))
4122 gen_set_Rc0(ctx);
4125 /* sle - sle. */
4126 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR)
4128 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4129 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4130 gen_op_POWER_sle();
4131 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4132 if (unlikely(Rc(ctx->opcode) != 0))
4133 gen_set_Rc0(ctx);
4136 /* sleq - sleq. */
4137 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR)
4139 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4140 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4141 gen_op_POWER_sleq();
4142 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4143 if (unlikely(Rc(ctx->opcode) != 0))
4144 gen_set_Rc0(ctx);
4147 /* sliq - sliq. */
4148 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR)
4150 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4151 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4152 gen_op_POWER_sle();
4153 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4154 if (unlikely(Rc(ctx->opcode) != 0))
4155 gen_set_Rc0(ctx);
4158 /* slliq - slliq. */
4159 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR)
4161 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4162 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4163 gen_op_POWER_sleq();
4164 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4165 if (unlikely(Rc(ctx->opcode) != 0))
4166 gen_set_Rc0(ctx);
4169 /* sllq - sllq. */
4170 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR)
4172 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4173 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4174 gen_op_POWER_sllq();
4175 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4176 if (unlikely(Rc(ctx->opcode) != 0))
4177 gen_set_Rc0(ctx);
4180 /* slq - slq. */
4181 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR)
4183 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4184 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4185 gen_op_POWER_slq();
4186 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4187 if (unlikely(Rc(ctx->opcode) != 0))
4188 gen_set_Rc0(ctx);
4191 /* sraiq - sraiq. */
4192 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR)
4194 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4195 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4196 gen_op_POWER_sraq();
4197 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4198 if (unlikely(Rc(ctx->opcode) != 0))
4199 gen_set_Rc0(ctx);
4202 /* sraq - sraq. */
4203 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR)
4205 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4206 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4207 gen_op_POWER_sraq();
4208 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4209 if (unlikely(Rc(ctx->opcode) != 0))
4210 gen_set_Rc0(ctx);
4213 /* sre - sre. */
4214 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR)
4216 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4217 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4218 gen_op_POWER_sre();
4219 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4220 if (unlikely(Rc(ctx->opcode) != 0))
4221 gen_set_Rc0(ctx);
4224 /* srea - srea. */
4225 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR)
4227 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4228 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4229 gen_op_POWER_srea();
4230 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4231 if (unlikely(Rc(ctx->opcode) != 0))
4232 gen_set_Rc0(ctx);
4235 /* sreq */
4236 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR)
4238 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4239 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4240 gen_op_POWER_sreq();
4241 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4242 if (unlikely(Rc(ctx->opcode) != 0))
4243 gen_set_Rc0(ctx);
4246 /* sriq */
4247 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR)
4249 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4250 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4251 gen_op_POWER_srq();
4252 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4253 if (unlikely(Rc(ctx->opcode) != 0))
4254 gen_set_Rc0(ctx);
4257 /* srliq */
4258 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR)
4260 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4261 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4262 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4263 gen_op_POWER_srlq();
4264 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4265 if (unlikely(Rc(ctx->opcode) != 0))
4266 gen_set_Rc0(ctx);
4269 /* srlq */
4270 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR)
4272 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4273 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4274 gen_op_POWER_srlq();
4275 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4276 if (unlikely(Rc(ctx->opcode) != 0))
4277 gen_set_Rc0(ctx);
4280 /* srq */
4281 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR)
4283 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4284 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4285 gen_op_POWER_srq();
4286 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4287 if (unlikely(Rc(ctx->opcode) != 0))
4288 gen_set_Rc0(ctx);
4291 /* PowerPC 602 specific instructions */
4292 /* dsa */
4293 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC)
4295 /* XXX: TODO */
4296 GEN_EXCP_INVAL(ctx);
4299 /* esa */
4300 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC)
4302 /* XXX: TODO */
4303 GEN_EXCP_INVAL(ctx);
4306 /* mfrom */
4307 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC)
4309 #if defined(CONFIG_USER_ONLY)
4310 GEN_EXCP_PRIVOPC(ctx);
4311 #else
4312 if (unlikely(!ctx->supervisor)) {
4313 GEN_EXCP_PRIVOPC(ctx);
4314 return;
4316 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4317 gen_op_602_mfrom();
4318 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4319 #endif
4322 /* 602 - 603 - G2 TLB management */
4323 /* tlbld */
4324 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB)
4326 #if defined(CONFIG_USER_ONLY)
4327 GEN_EXCP_PRIVOPC(ctx);
4328 #else
4329 if (unlikely(!ctx->supervisor)) {
4330 GEN_EXCP_PRIVOPC(ctx);
4331 return;
4333 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4334 gen_op_6xx_tlbld();
4335 #endif
4338 /* tlbli */
4339 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB)
4341 #if defined(CONFIG_USER_ONLY)
4342 GEN_EXCP_PRIVOPC(ctx);
4343 #else
4344 if (unlikely(!ctx->supervisor)) {
4345 GEN_EXCP_PRIVOPC(ctx);
4346 return;
4348 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4349 gen_op_6xx_tlbli();
4350 #endif
4353 /* 74xx TLB management */
4354 /* tlbld */
4355 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB)
4357 #if defined(CONFIG_USER_ONLY)
4358 GEN_EXCP_PRIVOPC(ctx);
4359 #else
4360 if (unlikely(!ctx->supervisor)) {
4361 GEN_EXCP_PRIVOPC(ctx);
4362 return;
4364 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4365 gen_op_74xx_tlbld();
4366 #endif
4369 /* tlbli */
4370 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB)
4372 #if defined(CONFIG_USER_ONLY)
4373 GEN_EXCP_PRIVOPC(ctx);
4374 #else
4375 if (unlikely(!ctx->supervisor)) {
4376 GEN_EXCP_PRIVOPC(ctx);
4377 return;
4379 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4380 gen_op_74xx_tlbli();
4381 #endif
4384 /* POWER instructions not in PowerPC 601 */
4385 /* clf */
4386 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER)
4388 /* Cache line flush: implemented as no-op */
4391 /* cli */
4392 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER)
4394 /* Cache line invalidate: privileged and treated as no-op */
4395 #if defined(CONFIG_USER_ONLY)
4396 GEN_EXCP_PRIVOPC(ctx);
4397 #else
4398 if (unlikely(!ctx->supervisor)) {
4399 GEN_EXCP_PRIVOPC(ctx);
4400 return;
4402 #endif
4405 /* dclst */
4406 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER)
4408 /* Data cache line store: treated as no-op */
4411 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER)
4413 #if defined(CONFIG_USER_ONLY)
4414 GEN_EXCP_PRIVOPC(ctx);
4415 #else
4416 if (unlikely(!ctx->supervisor)) {
4417 GEN_EXCP_PRIVOPC(ctx);
4418 return;
4420 int ra = rA(ctx->opcode);
4421 int rd = rD(ctx->opcode);
4423 gen_addr_reg_index(ctx);
4424 gen_op_POWER_mfsri();
4425 tcg_gen_mov_tl(cpu_gpr[rd], cpu_T[0]);
4426 if (ra != 0 && ra != rd)
4427 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[1]);
4428 #endif
4431 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER)
4433 #if defined(CONFIG_USER_ONLY)
4434 GEN_EXCP_PRIVOPC(ctx);
4435 #else
4436 if (unlikely(!ctx->supervisor)) {
4437 GEN_EXCP_PRIVOPC(ctx);
4438 return;
4440 gen_addr_reg_index(ctx);
4441 gen_op_POWER_rac();
4442 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4443 #endif
4446 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER)
4448 #if defined(CONFIG_USER_ONLY)
4449 GEN_EXCP_PRIVOPC(ctx);
4450 #else
4451 if (unlikely(!ctx->supervisor)) {
4452 GEN_EXCP_PRIVOPC(ctx);
4453 return;
4455 gen_op_POWER_rfsvc();
4456 GEN_SYNC(ctx);
4457 #endif
4460 /* svc is not implemented for now */
4462 /* POWER2 specific instructions */
4463 /* Quad manipulation (load/store two floats at a time) */
4464 /* Original POWER2 is 32 bits only, define 64 bits ops as 32 bits ones */
4465 #define op_POWER2_lfq() (*gen_op_POWER2_lfq[ctx->mem_idx])()
4466 #define op_POWER2_stfq() (*gen_op_POWER2_stfq[ctx->mem_idx])()
4467 #define gen_op_POWER2_lfq_64_raw gen_op_POWER2_lfq_raw
4468 #define gen_op_POWER2_lfq_64_user gen_op_POWER2_lfq_user
4469 #define gen_op_POWER2_lfq_64_kernel gen_op_POWER2_lfq_kernel
4470 #define gen_op_POWER2_lfq_64_hypv gen_op_POWER2_lfq_hypv
4471 #define gen_op_POWER2_lfq_le_64_raw gen_op_POWER2_lfq_le_raw
4472 #define gen_op_POWER2_lfq_le_64_user gen_op_POWER2_lfq_le_user
4473 #define gen_op_POWER2_lfq_le_64_kernel gen_op_POWER2_lfq_le_kernel
4474 #define gen_op_POWER2_lfq_le_64_hypv gen_op_POWER2_lfq_le_hypv
4475 #define gen_op_POWER2_stfq_64_raw gen_op_POWER2_stfq_raw
4476 #define gen_op_POWER2_stfq_64_user gen_op_POWER2_stfq_user
4477 #define gen_op_POWER2_stfq_64_kernel gen_op_POWER2_stfq_kernel
4478 #define gen_op_POWER2_stfq_64_hypv gen_op_POWER2_stfq_hypv
4479 #define gen_op_POWER2_stfq_le_64_raw gen_op_POWER2_stfq_le_raw
4480 #define gen_op_POWER2_stfq_le_64_user gen_op_POWER2_stfq_le_user
4481 #define gen_op_POWER2_stfq_le_64_kernel gen_op_POWER2_stfq_le_kernel
4482 #define gen_op_POWER2_stfq_le_64_hypv gen_op_POWER2_stfq_le_hypv
4483 static GenOpFunc *gen_op_POWER2_lfq[NB_MEM_FUNCS] = {
4484 GEN_MEM_FUNCS(POWER2_lfq),
4486 static GenOpFunc *gen_op_POWER2_stfq[NB_MEM_FUNCS] = {
4487 GEN_MEM_FUNCS(POWER2_stfq),
4490 /* lfq */
4491 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
4493 /* NIP cannot be restored if the memory exception comes from an helper */
4494 gen_update_nip(ctx, ctx->nip - 4);
4495 gen_addr_imm_index(ctx, 0);
4496 op_POWER2_lfq();
4497 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
4498 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
4501 /* lfqu */
4502 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
4504 int ra = rA(ctx->opcode);
4506 /* NIP cannot be restored if the memory exception comes from an helper */
4507 gen_update_nip(ctx, ctx->nip - 4);
4508 gen_addr_imm_index(ctx, 0);
4509 op_POWER2_lfq();
4510 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
4511 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
4512 if (ra != 0)
4513 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
4516 /* lfqux */
4517 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2)
4519 int ra = rA(ctx->opcode);
4521 /* NIP cannot be restored if the memory exception comes from an helper */
4522 gen_update_nip(ctx, ctx->nip - 4);
4523 gen_addr_reg_index(ctx);
4524 op_POWER2_lfq();
4525 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
4526 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
4527 if (ra != 0)
4528 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
4531 /* lfqx */
4532 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2)
4534 /* NIP cannot be restored if the memory exception comes from an helper */
4535 gen_update_nip(ctx, ctx->nip - 4);
4536 gen_addr_reg_index(ctx);
4537 op_POWER2_lfq();
4538 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
4539 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
4542 /* stfq */
4543 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
4545 /* NIP cannot be restored if the memory exception comes from an helper */
4546 gen_update_nip(ctx, ctx->nip - 4);
4547 gen_addr_imm_index(ctx, 0);
4548 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
4549 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
4550 op_POWER2_stfq();
4553 /* stfqu */
4554 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
4556 int ra = rA(ctx->opcode);
4558 /* NIP cannot be restored if the memory exception comes from an helper */
4559 gen_update_nip(ctx, ctx->nip - 4);
4560 gen_addr_imm_index(ctx, 0);
4561 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
4562 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
4563 op_POWER2_stfq();
4564 if (ra != 0)
4565 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
4568 /* stfqux */
4569 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2)
4571 int ra = rA(ctx->opcode);
4573 /* NIP cannot be restored if the memory exception comes from an helper */
4574 gen_update_nip(ctx, ctx->nip - 4);
4575 gen_addr_reg_index(ctx);
4576 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
4577 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
4578 op_POWER2_stfq();
4579 if (ra != 0)
4580 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
4583 /* stfqx */
4584 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2)
4586 /* NIP cannot be restored if the memory exception comes from an helper */
4587 gen_update_nip(ctx, ctx->nip - 4);
4588 gen_addr_reg_index(ctx);
4589 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
4590 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
4591 op_POWER2_stfq();
4594 /* BookE specific instructions */
4595 /* XXX: not implemented on 440 ? */
4596 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI)
4598 /* XXX: TODO */
4599 GEN_EXCP_INVAL(ctx);
4602 /* XXX: not implemented on 440 ? */
4603 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA)
4605 #if defined(CONFIG_USER_ONLY)
4606 GEN_EXCP_PRIVOPC(ctx);
4607 #else
4608 if (unlikely(!ctx->supervisor)) {
4609 GEN_EXCP_PRIVOPC(ctx);
4610 return;
4612 gen_addr_reg_index(ctx);
4613 /* Use the same micro-ops as for tlbie */
4614 #if defined(TARGET_PPC64)
4615 if (ctx->sf_mode)
4616 gen_op_tlbie_64();
4617 else
4618 #endif
4619 gen_op_tlbie();
4620 #endif
4623 /* All 405 MAC instructions are translated here */
4624 static always_inline void gen_405_mulladd_insn (DisasContext *ctx,
4625 int opc2, int opc3,
4626 int ra, int rb, int rt, int Rc)
4628 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[ra]);
4629 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rb]);
4630 switch (opc3 & 0x0D) {
4631 case 0x05:
4632 /* macchw - macchw. - macchwo - macchwo. */
4633 /* macchws - macchws. - macchwso - macchwso. */
4634 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
4635 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
4636 /* mulchw - mulchw. */
4637 gen_op_405_mulchw();
4638 break;
4639 case 0x04:
4640 /* macchwu - macchwu. - macchwuo - macchwuo. */
4641 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
4642 /* mulchwu - mulchwu. */
4643 gen_op_405_mulchwu();
4644 break;
4645 case 0x01:
4646 /* machhw - machhw. - machhwo - machhwo. */
4647 /* machhws - machhws. - machhwso - machhwso. */
4648 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
4649 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
4650 /* mulhhw - mulhhw. */
4651 gen_op_405_mulhhw();
4652 break;
4653 case 0x00:
4654 /* machhwu - machhwu. - machhwuo - machhwuo. */
4655 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
4656 /* mulhhwu - mulhhwu. */
4657 gen_op_405_mulhhwu();
4658 break;
4659 case 0x0D:
4660 /* maclhw - maclhw. - maclhwo - maclhwo. */
4661 /* maclhws - maclhws. - maclhwso - maclhwso. */
4662 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
4663 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
4664 /* mullhw - mullhw. */
4665 gen_op_405_mullhw();
4666 break;
4667 case 0x0C:
4668 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
4669 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
4670 /* mullhwu - mullhwu. */
4671 gen_op_405_mullhwu();
4672 break;
4674 if (opc2 & 0x02) {
4675 /* nmultiply-and-accumulate (0x0E) */
4676 gen_op_neg();
4678 if (opc2 & 0x04) {
4679 /* (n)multiply-and-accumulate (0x0C - 0x0E) */
4680 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rt]);
4681 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4682 gen_op_405_add_T0_T2();
4684 if (opc3 & 0x10) {
4685 /* Check overflow */
4686 if (opc3 & 0x01)
4687 gen_op_check_addo();
4688 else
4689 gen_op_405_check_ovu();
4691 if (opc3 & 0x02) {
4692 /* Saturate */
4693 if (opc3 & 0x01)
4694 gen_op_405_check_sat();
4695 else
4696 gen_op_405_check_satu();
4698 tcg_gen_mov_tl(cpu_gpr[rt], cpu_T[0]);
4699 if (unlikely(Rc) != 0) {
4700 /* Update Rc0 */
4701 gen_set_Rc0(ctx);
4705 #define GEN_MAC_HANDLER(name, opc2, opc3) \
4706 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) \
4708 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
4709 rD(ctx->opcode), Rc(ctx->opcode)); \
4712 /* macchw - macchw. */
4713 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
4714 /* macchwo - macchwo. */
4715 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
4716 /* macchws - macchws. */
4717 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
4718 /* macchwso - macchwso. */
4719 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
4720 /* macchwsu - macchwsu. */
4721 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
4722 /* macchwsuo - macchwsuo. */
4723 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
4724 /* macchwu - macchwu. */
4725 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
4726 /* macchwuo - macchwuo. */
4727 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
4728 /* machhw - machhw. */
4729 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
4730 /* machhwo - machhwo. */
4731 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
4732 /* machhws - machhws. */
4733 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
4734 /* machhwso - machhwso. */
4735 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
4736 /* machhwsu - machhwsu. */
4737 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
4738 /* machhwsuo - machhwsuo. */
4739 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
4740 /* machhwu - machhwu. */
4741 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
4742 /* machhwuo - machhwuo. */
4743 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
4744 /* maclhw - maclhw. */
4745 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
4746 /* maclhwo - maclhwo. */
4747 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
4748 /* maclhws - maclhws. */
4749 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
4750 /* maclhwso - maclhwso. */
4751 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
4752 /* maclhwu - maclhwu. */
4753 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
4754 /* maclhwuo - maclhwuo. */
4755 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
4756 /* maclhwsu - maclhwsu. */
4757 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
4758 /* maclhwsuo - maclhwsuo. */
4759 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
4760 /* nmacchw - nmacchw. */
4761 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
4762 /* nmacchwo - nmacchwo. */
4763 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
4764 /* nmacchws - nmacchws. */
4765 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
4766 /* nmacchwso - nmacchwso. */
4767 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
4768 /* nmachhw - nmachhw. */
4769 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
4770 /* nmachhwo - nmachhwo. */
4771 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
4772 /* nmachhws - nmachhws. */
4773 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
4774 /* nmachhwso - nmachhwso. */
4775 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
4776 /* nmaclhw - nmaclhw. */
4777 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
4778 /* nmaclhwo - nmaclhwo. */
4779 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
4780 /* nmaclhws - nmaclhws. */
4781 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
4782 /* nmaclhwso - nmaclhwso. */
4783 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
4785 /* mulchw - mulchw. */
4786 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
4787 /* mulchwu - mulchwu. */
4788 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
4789 /* mulhhw - mulhhw. */
4790 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
4791 /* mulhhwu - mulhhwu. */
4792 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
4793 /* mullhw - mullhw. */
4794 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
4795 /* mullhwu - mullhwu. */
4796 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
4798 /* mfdcr */
4799 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR)
4801 #if defined(CONFIG_USER_ONLY)
4802 GEN_EXCP_PRIVREG(ctx);
4803 #else
4804 uint32_t dcrn = SPR(ctx->opcode);
4806 if (unlikely(!ctx->supervisor)) {
4807 GEN_EXCP_PRIVREG(ctx);
4808 return;
4810 tcg_gen_movi_tl(cpu_T[0], dcrn);
4811 gen_op_load_dcr();
4812 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4813 #endif
4816 /* mtdcr */
4817 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR)
4819 #if defined(CONFIG_USER_ONLY)
4820 GEN_EXCP_PRIVREG(ctx);
4821 #else
4822 uint32_t dcrn = SPR(ctx->opcode);
4824 if (unlikely(!ctx->supervisor)) {
4825 GEN_EXCP_PRIVREG(ctx);
4826 return;
4828 tcg_gen_movi_tl(cpu_T[0], dcrn);
4829 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
4830 gen_op_store_dcr();
4831 #endif
4834 /* mfdcrx */
4835 /* XXX: not implemented on 440 ? */
4836 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX)
4838 #if defined(CONFIG_USER_ONLY)
4839 GEN_EXCP_PRIVREG(ctx);
4840 #else
4841 if (unlikely(!ctx->supervisor)) {
4842 GEN_EXCP_PRIVREG(ctx);
4843 return;
4845 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4846 gen_op_load_dcr();
4847 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4848 /* Note: Rc update flag set leads to undefined state of Rc0 */
4849 #endif
4852 /* mtdcrx */
4853 /* XXX: not implemented on 440 ? */
4854 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX)
4856 #if defined(CONFIG_USER_ONLY)
4857 GEN_EXCP_PRIVREG(ctx);
4858 #else
4859 if (unlikely(!ctx->supervisor)) {
4860 GEN_EXCP_PRIVREG(ctx);
4861 return;
4863 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4864 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
4865 gen_op_store_dcr();
4866 /* Note: Rc update flag set leads to undefined state of Rc0 */
4867 #endif
4870 /* mfdcrux (PPC 460) : user-mode access to DCR */
4871 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX)
4873 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4874 gen_op_load_dcr();
4875 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4876 /* Note: Rc update flag set leads to undefined state of Rc0 */
4879 /* mtdcrux (PPC 460) : user-mode access to DCR */
4880 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX)
4882 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4883 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
4884 gen_op_store_dcr();
4885 /* Note: Rc update flag set leads to undefined state of Rc0 */
4888 /* dccci */
4889 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON)
4891 #if defined(CONFIG_USER_ONLY)
4892 GEN_EXCP_PRIVOPC(ctx);
4893 #else
4894 if (unlikely(!ctx->supervisor)) {
4895 GEN_EXCP_PRIVOPC(ctx);
4896 return;
4898 /* interpreted as no-op */
4899 #endif
4902 /* dcread */
4903 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON)
4905 #if defined(CONFIG_USER_ONLY)
4906 GEN_EXCP_PRIVOPC(ctx);
4907 #else
4908 if (unlikely(!ctx->supervisor)) {
4909 GEN_EXCP_PRIVOPC(ctx);
4910 return;
4912 gen_addr_reg_index(ctx);
4913 op_ldst(lwz);
4914 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4915 #endif
4918 /* icbt */
4919 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT)
4921 /* interpreted as no-op */
4922 /* XXX: specification say this is treated as a load by the MMU
4923 * but does not generate any exception
4927 /* iccci */
4928 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON)
4930 #if defined(CONFIG_USER_ONLY)
4931 GEN_EXCP_PRIVOPC(ctx);
4932 #else
4933 if (unlikely(!ctx->supervisor)) {
4934 GEN_EXCP_PRIVOPC(ctx);
4935 return;
4937 /* interpreted as no-op */
4938 #endif
4941 /* icread */
4942 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON)
4944 #if defined(CONFIG_USER_ONLY)
4945 GEN_EXCP_PRIVOPC(ctx);
4946 #else
4947 if (unlikely(!ctx->supervisor)) {
4948 GEN_EXCP_PRIVOPC(ctx);
4949 return;
4951 /* interpreted as no-op */
4952 #endif
4955 /* rfci (supervisor only) */
4956 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP)
4958 #if defined(CONFIG_USER_ONLY)
4959 GEN_EXCP_PRIVOPC(ctx);
4960 #else
4961 if (unlikely(!ctx->supervisor)) {
4962 GEN_EXCP_PRIVOPC(ctx);
4963 return;
4965 /* Restore CPU state */
4966 gen_op_40x_rfci();
4967 GEN_SYNC(ctx);
4968 #endif
4971 GEN_HANDLER(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE)
4973 #if defined(CONFIG_USER_ONLY)
4974 GEN_EXCP_PRIVOPC(ctx);
4975 #else
4976 if (unlikely(!ctx->supervisor)) {
4977 GEN_EXCP_PRIVOPC(ctx);
4978 return;
4980 /* Restore CPU state */
4981 gen_op_rfci();
4982 GEN_SYNC(ctx);
4983 #endif
4986 /* BookE specific */
4987 /* XXX: not implemented on 440 ? */
4988 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI)
4990 #if defined(CONFIG_USER_ONLY)
4991 GEN_EXCP_PRIVOPC(ctx);
4992 #else
4993 if (unlikely(!ctx->supervisor)) {
4994 GEN_EXCP_PRIVOPC(ctx);
4995 return;
4997 /* Restore CPU state */
4998 gen_op_rfdi();
4999 GEN_SYNC(ctx);
5000 #endif
5003 /* XXX: not implemented on 440 ? */
5004 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI)
5006 #if defined(CONFIG_USER_ONLY)
5007 GEN_EXCP_PRIVOPC(ctx);
5008 #else
5009 if (unlikely(!ctx->supervisor)) {
5010 GEN_EXCP_PRIVOPC(ctx);
5011 return;
5013 /* Restore CPU state */
5014 gen_op_rfmci();
5015 GEN_SYNC(ctx);
5016 #endif
5019 /* TLB management - PowerPC 405 implementation */
5020 /* tlbre */
5021 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB)
5023 #if defined(CONFIG_USER_ONLY)
5024 GEN_EXCP_PRIVOPC(ctx);
5025 #else
5026 if (unlikely(!ctx->supervisor)) {
5027 GEN_EXCP_PRIVOPC(ctx);
5028 return;
5030 switch (rB(ctx->opcode)) {
5031 case 0:
5032 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5033 gen_op_4xx_tlbre_hi();
5034 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5035 break;
5036 case 1:
5037 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5038 gen_op_4xx_tlbre_lo();
5039 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5040 break;
5041 default:
5042 GEN_EXCP_INVAL(ctx);
5043 break;
5045 #endif
5048 /* tlbsx - tlbsx. */
5049 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB)
5051 #if defined(CONFIG_USER_ONLY)
5052 GEN_EXCP_PRIVOPC(ctx);
5053 #else
5054 if (unlikely(!ctx->supervisor)) {
5055 GEN_EXCP_PRIVOPC(ctx);
5056 return;
5058 gen_addr_reg_index(ctx);
5059 gen_op_4xx_tlbsx();
5060 if (Rc(ctx->opcode))
5061 gen_op_4xx_tlbsx_check();
5062 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5063 #endif
5066 /* tlbwe */
5067 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB)
5069 #if defined(CONFIG_USER_ONLY)
5070 GEN_EXCP_PRIVOPC(ctx);
5071 #else
5072 if (unlikely(!ctx->supervisor)) {
5073 GEN_EXCP_PRIVOPC(ctx);
5074 return;
5076 switch (rB(ctx->opcode)) {
5077 case 0:
5078 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5079 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5080 gen_op_4xx_tlbwe_hi();
5081 break;
5082 case 1:
5083 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5084 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5085 gen_op_4xx_tlbwe_lo();
5086 break;
5087 default:
5088 GEN_EXCP_INVAL(ctx);
5089 break;
5091 #endif
5094 /* TLB management - PowerPC 440 implementation */
5095 /* tlbre */
5096 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE)
5098 #if defined(CONFIG_USER_ONLY)
5099 GEN_EXCP_PRIVOPC(ctx);
5100 #else
5101 if (unlikely(!ctx->supervisor)) {
5102 GEN_EXCP_PRIVOPC(ctx);
5103 return;
5105 switch (rB(ctx->opcode)) {
5106 case 0:
5107 case 1:
5108 case 2:
5109 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5110 gen_op_440_tlbre(rB(ctx->opcode));
5111 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5112 break;
5113 default:
5114 GEN_EXCP_INVAL(ctx);
5115 break;
5117 #endif
5120 /* tlbsx - tlbsx. */
5121 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE)
5123 #if defined(CONFIG_USER_ONLY)
5124 GEN_EXCP_PRIVOPC(ctx);
5125 #else
5126 if (unlikely(!ctx->supervisor)) {
5127 GEN_EXCP_PRIVOPC(ctx);
5128 return;
5130 gen_addr_reg_index(ctx);
5131 gen_op_440_tlbsx();
5132 if (Rc(ctx->opcode))
5133 gen_op_4xx_tlbsx_check();
5134 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5135 #endif
5138 /* tlbwe */
5139 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE)
5141 #if defined(CONFIG_USER_ONLY)
5142 GEN_EXCP_PRIVOPC(ctx);
5143 #else
5144 if (unlikely(!ctx->supervisor)) {
5145 GEN_EXCP_PRIVOPC(ctx);
5146 return;
5148 switch (rB(ctx->opcode)) {
5149 case 0:
5150 case 1:
5151 case 2:
5152 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5153 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5154 gen_op_440_tlbwe(rB(ctx->opcode));
5155 break;
5156 default:
5157 GEN_EXCP_INVAL(ctx);
5158 break;
5160 #endif
5163 /* wrtee */
5164 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE)
5166 #if defined(CONFIG_USER_ONLY)
5167 GEN_EXCP_PRIVOPC(ctx);
5168 #else
5169 if (unlikely(!ctx->supervisor)) {
5170 GEN_EXCP_PRIVOPC(ctx);
5171 return;
5173 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rD(ctx->opcode)]);
5174 gen_op_wrte();
5175 /* Stop translation to have a chance to raise an exception
5176 * if we just set msr_ee to 1
5178 GEN_STOP(ctx);
5179 #endif
5182 /* wrteei */
5183 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000EFC01, PPC_WRTEE)
5185 #if defined(CONFIG_USER_ONLY)
5186 GEN_EXCP_PRIVOPC(ctx);
5187 #else
5188 if (unlikely(!ctx->supervisor)) {
5189 GEN_EXCP_PRIVOPC(ctx);
5190 return;
5192 tcg_gen_movi_tl(cpu_T[0], ctx->opcode & 0x00010000);
5193 gen_op_wrte();
5194 /* Stop translation to have a chance to raise an exception
5195 * if we just set msr_ee to 1
5197 GEN_STOP(ctx);
5198 #endif
5201 /* PowerPC 440 specific instructions */
5202 /* dlmzb */
5203 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC)
5205 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
5206 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
5207 gen_op_440_dlmzb();
5208 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
5209 gen_op_store_xer_bc();
5210 if (Rc(ctx->opcode)) {
5211 gen_op_440_dlmzb_update_Rc();
5212 tcg_gen_andi_i32(cpu_crf[0], cpu_T[0], 0xf);
5216 /* mbar replaces eieio on 440 */
5217 GEN_HANDLER(mbar, 0x1F, 0x16, 0x13, 0x001FF801, PPC_BOOKE)
5219 /* interpreted as no-op */
5222 /* msync replaces sync on 440 */
5223 GEN_HANDLER(msync, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE)
5225 /* interpreted as no-op */
5228 /* icbt */
5229 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, PPC_BOOKE)
5231 /* interpreted as no-op */
5232 /* XXX: specification say this is treated as a load by the MMU
5233 * but does not generate any exception
5237 /*** Altivec vector extension ***/
5238 /* Altivec registers moves */
5240 static always_inline void gen_load_avr(int t, int reg) {
5241 tcg_gen_mov_i64(cpu_AVRh[t], cpu_avrh[reg]);
5242 tcg_gen_mov_i64(cpu_AVRl[t], cpu_avrl[reg]);
5245 static always_inline void gen_store_avr(int reg, int t) {
5246 tcg_gen_mov_i64(cpu_avrh[reg], cpu_AVRh[t]);
5247 tcg_gen_mov_i64(cpu_avrl[reg], cpu_AVRl[t]);
5250 #define op_vr_ldst(name) (*gen_op_##name[ctx->mem_idx])()
5251 #define OP_VR_LD_TABLE(name) \
5252 static GenOpFunc *gen_op_vr_l##name[NB_MEM_FUNCS] = { \
5253 GEN_MEM_FUNCS(vr_l##name), \
5255 #define OP_VR_ST_TABLE(name) \
5256 static GenOpFunc *gen_op_vr_st##name[NB_MEM_FUNCS] = { \
5257 GEN_MEM_FUNCS(vr_st##name), \
5260 #define GEN_VR_LDX(name, opc2, opc3) \
5261 GEN_HANDLER(l##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \
5263 if (unlikely(!ctx->altivec_enabled)) { \
5264 GEN_EXCP_NO_VR(ctx); \
5265 return; \
5267 gen_addr_reg_index(ctx); \
5268 op_vr_ldst(vr_l##name); \
5269 gen_store_avr(rD(ctx->opcode), 0); \
5272 #define GEN_VR_STX(name, opc2, opc3) \
5273 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \
5275 if (unlikely(!ctx->altivec_enabled)) { \
5276 GEN_EXCP_NO_VR(ctx); \
5277 return; \
5279 gen_addr_reg_index(ctx); \
5280 gen_load_avr(0, rS(ctx->opcode)); \
5281 op_vr_ldst(vr_st##name); \
5284 OP_VR_LD_TABLE(vx);
5285 GEN_VR_LDX(vx, 0x07, 0x03);
5286 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
5287 #define gen_op_vr_lvxl gen_op_vr_lvx
5288 GEN_VR_LDX(vxl, 0x07, 0x0B);
5290 OP_VR_ST_TABLE(vx);
5291 GEN_VR_STX(vx, 0x07, 0x07);
5292 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
5293 #define gen_op_vr_stvxl gen_op_vr_stvx
5294 GEN_VR_STX(vxl, 0x07, 0x0F);
5296 /*** SPE extension ***/
5297 /* Register moves */
5299 static always_inline void gen_load_gpr64(TCGv t, int reg) {
5300 #if defined(TARGET_PPC64)
5301 tcg_gen_mov_i64(t, cpu_gpr[reg]);
5302 #else
5303 tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]);
5304 #endif
5307 static always_inline void gen_store_gpr64(int reg, TCGv t) {
5308 #if defined(TARGET_PPC64)
5309 tcg_gen_mov_i64(cpu_gpr[reg], t);
5310 #else
5311 tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
5312 TCGv tmp = tcg_temp_local_new(TCG_TYPE_I64);
5313 tcg_gen_shri_i64(tmp, t, 32);
5314 tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
5315 tcg_temp_free(tmp);
5316 #endif
5319 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
5320 GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type) \
5322 if (Rc(ctx->opcode)) \
5323 gen_##name1(ctx); \
5324 else \
5325 gen_##name0(ctx); \
5328 /* Handler for undefined SPE opcodes */
5329 static always_inline void gen_speundef (DisasContext *ctx)
5331 GEN_EXCP_INVAL(ctx);
5334 /* SPE load and stores */
5335 static always_inline void gen_addr_spe_imm_index (DisasContext *ctx, int sh)
5337 target_long simm = rB(ctx->opcode);
5339 if (rA(ctx->opcode) == 0) {
5340 tcg_gen_movi_tl(cpu_T[0], simm << sh);
5341 } else {
5342 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5343 if (likely(simm != 0))
5344 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], simm << sh);
5348 #define op_spe_ldst(name) (*gen_op_##name[ctx->mem_idx])()
5349 #define OP_SPE_LD_TABLE(name) \
5350 static GenOpFunc *gen_op_spe_l##name[NB_MEM_FUNCS] = { \
5351 GEN_MEM_FUNCS(spe_l##name), \
5353 #define OP_SPE_ST_TABLE(name) \
5354 static GenOpFunc *gen_op_spe_st##name[NB_MEM_FUNCS] = { \
5355 GEN_MEM_FUNCS(spe_st##name), \
5358 #define GEN_SPE_LD(name, sh) \
5359 static always_inline void gen_evl##name (DisasContext *ctx) \
5361 if (unlikely(!ctx->spe_enabled)) { \
5362 GEN_EXCP_NO_AP(ctx); \
5363 return; \
5365 gen_addr_spe_imm_index(ctx, sh); \
5366 op_spe_ldst(spe_l##name); \
5367 gen_store_gpr64(rD(ctx->opcode), cpu_T64[1]); \
5370 #define GEN_SPE_LDX(name) \
5371 static always_inline void gen_evl##name##x (DisasContext *ctx) \
5373 if (unlikely(!ctx->spe_enabled)) { \
5374 GEN_EXCP_NO_AP(ctx); \
5375 return; \
5377 gen_addr_reg_index(ctx); \
5378 op_spe_ldst(spe_l##name); \
5379 gen_store_gpr64(rD(ctx->opcode), cpu_T64[1]); \
5382 #define GEN_SPEOP_LD(name, sh) \
5383 OP_SPE_LD_TABLE(name); \
5384 GEN_SPE_LD(name, sh); \
5385 GEN_SPE_LDX(name)
5387 #define GEN_SPE_ST(name, sh) \
5388 static always_inline void gen_evst##name (DisasContext *ctx) \
5390 if (unlikely(!ctx->spe_enabled)) { \
5391 GEN_EXCP_NO_AP(ctx); \
5392 return; \
5394 gen_addr_spe_imm_index(ctx, sh); \
5395 gen_load_gpr64(cpu_T64[1], rS(ctx->opcode)); \
5396 op_spe_ldst(spe_st##name); \
5399 #define GEN_SPE_STX(name) \
5400 static always_inline void gen_evst##name##x (DisasContext *ctx) \
5402 if (unlikely(!ctx->spe_enabled)) { \
5403 GEN_EXCP_NO_AP(ctx); \
5404 return; \
5406 gen_addr_reg_index(ctx); \
5407 gen_load_gpr64(cpu_T64[1], rS(ctx->opcode)); \
5408 op_spe_ldst(spe_st##name); \
5411 #define GEN_SPEOP_ST(name, sh) \
5412 OP_SPE_ST_TABLE(name); \
5413 GEN_SPE_ST(name, sh); \
5414 GEN_SPE_STX(name)
5416 #define GEN_SPEOP_LDST(name, sh) \
5417 GEN_SPEOP_LD(name, sh); \
5418 GEN_SPEOP_ST(name, sh)
5420 /* SPE arithmetic and logic */
5421 #define GEN_SPEOP_ARITH2(name) \
5422 static always_inline void gen_##name (DisasContext *ctx) \
5424 if (unlikely(!ctx->spe_enabled)) { \
5425 GEN_EXCP_NO_AP(ctx); \
5426 return; \
5428 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
5429 gen_load_gpr64(cpu_T64[1], rB(ctx->opcode)); \
5430 gen_op_##name(); \
5431 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5434 #define GEN_SPEOP_ARITH1(name) \
5435 static always_inline void gen_##name (DisasContext *ctx) \
5437 if (unlikely(!ctx->spe_enabled)) { \
5438 GEN_EXCP_NO_AP(ctx); \
5439 return; \
5441 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
5442 gen_op_##name(); \
5443 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5446 #define GEN_SPEOP_COMP(name) \
5447 static always_inline void gen_##name (DisasContext *ctx) \
5449 if (unlikely(!ctx->spe_enabled)) { \
5450 GEN_EXCP_NO_AP(ctx); \
5451 return; \
5453 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
5454 gen_load_gpr64(cpu_T64[1], rB(ctx->opcode)); \
5455 gen_op_##name(); \
5456 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf); \
5459 /* Logical */
5460 GEN_SPEOP_ARITH2(evand);
5461 GEN_SPEOP_ARITH2(evandc);
5462 GEN_SPEOP_ARITH2(evxor);
5463 GEN_SPEOP_ARITH2(evor);
5464 GEN_SPEOP_ARITH2(evnor);
5465 GEN_SPEOP_ARITH2(eveqv);
5466 GEN_SPEOP_ARITH2(evorc);
5467 GEN_SPEOP_ARITH2(evnand);
5468 GEN_SPEOP_ARITH2(evsrwu);
5469 GEN_SPEOP_ARITH2(evsrws);
5470 GEN_SPEOP_ARITH2(evslw);
5471 GEN_SPEOP_ARITH2(evrlw);
5472 GEN_SPEOP_ARITH2(evmergehi);
5473 GEN_SPEOP_ARITH2(evmergelo);
5474 GEN_SPEOP_ARITH2(evmergehilo);
5475 GEN_SPEOP_ARITH2(evmergelohi);
5477 /* Arithmetic */
5478 GEN_SPEOP_ARITH2(evaddw);
5479 GEN_SPEOP_ARITH2(evsubfw);
5480 GEN_SPEOP_ARITH1(evabs);
5481 GEN_SPEOP_ARITH1(evneg);
5482 GEN_SPEOP_ARITH1(evextsb);
5483 GEN_SPEOP_ARITH1(evextsh);
5484 GEN_SPEOP_ARITH1(evrndw);
5485 GEN_SPEOP_ARITH1(evcntlzw);
5486 GEN_SPEOP_ARITH1(evcntlsw);
5487 static always_inline void gen_brinc (DisasContext *ctx)
5489 /* Note: brinc is usable even if SPE is disabled */
5490 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5491 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
5492 gen_op_brinc();
5493 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5496 #define GEN_SPEOP_ARITH_IMM2(name) \
5497 static always_inline void gen_##name##i (DisasContext *ctx) \
5499 if (unlikely(!ctx->spe_enabled)) { \
5500 GEN_EXCP_NO_AP(ctx); \
5501 return; \
5503 gen_load_gpr64(cpu_T64[0], rB(ctx->opcode)); \
5504 gen_op_splatwi_T1_64(rA(ctx->opcode)); \
5505 gen_op_##name(); \
5506 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5509 #define GEN_SPEOP_LOGIC_IMM2(name) \
5510 static always_inline void gen_##name##i (DisasContext *ctx) \
5512 if (unlikely(!ctx->spe_enabled)) { \
5513 GEN_EXCP_NO_AP(ctx); \
5514 return; \
5516 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
5517 gen_op_splatwi_T1_64(rB(ctx->opcode)); \
5518 gen_op_##name(); \
5519 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5522 GEN_SPEOP_ARITH_IMM2(evaddw);
5523 #define gen_evaddiw gen_evaddwi
5524 GEN_SPEOP_ARITH_IMM2(evsubfw);
5525 #define gen_evsubifw gen_evsubfwi
5526 GEN_SPEOP_LOGIC_IMM2(evslw);
5527 GEN_SPEOP_LOGIC_IMM2(evsrwu);
5528 #define gen_evsrwis gen_evsrwsi
5529 GEN_SPEOP_LOGIC_IMM2(evsrws);
5530 #define gen_evsrwiu gen_evsrwui
5531 GEN_SPEOP_LOGIC_IMM2(evrlw);
5533 static always_inline void gen_evsplati (DisasContext *ctx)
5535 int32_t imm = (int32_t)(rA(ctx->opcode) << 27) >> 27;
5537 gen_op_splatwi_T0_64(imm);
5538 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]);
5541 static always_inline void gen_evsplatfi (DisasContext *ctx)
5543 uint32_t imm = rA(ctx->opcode) << 27;
5545 gen_op_splatwi_T0_64(imm);
5546 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]);
5549 /* Comparison */
5550 GEN_SPEOP_COMP(evcmpgtu);
5551 GEN_SPEOP_COMP(evcmpgts);
5552 GEN_SPEOP_COMP(evcmpltu);
5553 GEN_SPEOP_COMP(evcmplts);
5554 GEN_SPEOP_COMP(evcmpeq);
5556 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE); ////
5557 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE);
5558 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE); ////
5559 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE);
5560 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE); ////
5561 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE); ////
5562 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE); ////
5563 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE); //
5564 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE); ////
5565 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE); ////
5566 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE); ////
5567 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE); ////
5568 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE); ////
5569 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE); ////
5570 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE); ////
5571 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE);
5572 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE); ////
5573 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE);
5574 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE); //
5575 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE);
5576 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE); ////
5577 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE); ////
5578 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); ////
5579 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); ////
5580 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); ////
5582 static always_inline void gen_evsel (DisasContext *ctx)
5584 if (unlikely(!ctx->spe_enabled)) {
5585 GEN_EXCP_NO_AP(ctx);
5586 return;
5588 tcg_gen_mov_i32(cpu_T[0], cpu_crf[ctx->opcode & 0x7]);
5589 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode));
5590 gen_load_gpr64(cpu_T64[1], rB(ctx->opcode));
5591 gen_op_evsel();
5592 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]);
5595 GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE)
5597 gen_evsel(ctx);
5599 GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE)
5601 gen_evsel(ctx);
5603 GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE)
5605 gen_evsel(ctx);
5607 GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE)
5609 gen_evsel(ctx);
5612 /* Load and stores */
5613 #if defined(TARGET_PPC64)
5614 /* In that case, we already have 64 bits load & stores
5615 * so, spe_ldd is equivalent to ld and spe_std is equivalent to std
5617 #define gen_op_spe_ldd_raw gen_op_ld_raw
5618 #define gen_op_spe_ldd_user gen_op_ld_user
5619 #define gen_op_spe_ldd_kernel gen_op_ld_kernel
5620 #define gen_op_spe_ldd_hypv gen_op_ld_hypv
5621 #define gen_op_spe_ldd_64_raw gen_op_ld_64_raw
5622 #define gen_op_spe_ldd_64_user gen_op_ld_64_user
5623 #define gen_op_spe_ldd_64_kernel gen_op_ld_64_kernel
5624 #define gen_op_spe_ldd_64_hypv gen_op_ld_64_hypv
5625 #define gen_op_spe_ldd_le_raw gen_op_ld_le_raw
5626 #define gen_op_spe_ldd_le_user gen_op_ld_le_user
5627 #define gen_op_spe_ldd_le_kernel gen_op_ld_le_kernel
5628 #define gen_op_spe_ldd_le_hypv gen_op_ld_le_hypv
5629 #define gen_op_spe_ldd_le_64_raw gen_op_ld_le_64_raw
5630 #define gen_op_spe_ldd_le_64_user gen_op_ld_le_64_user
5631 #define gen_op_spe_ldd_le_64_kernel gen_op_ld_le_64_kernel
5632 #define gen_op_spe_ldd_le_64_hypv gen_op_ld_le_64_hypv
5633 #define gen_op_spe_stdd_raw gen_op_std_raw
5634 #define gen_op_spe_stdd_user gen_op_std_user
5635 #define gen_op_spe_stdd_kernel gen_op_std_kernel
5636 #define gen_op_spe_stdd_hypv gen_op_std_hypv
5637 #define gen_op_spe_stdd_64_raw gen_op_std_64_raw
5638 #define gen_op_spe_stdd_64_user gen_op_std_64_user
5639 #define gen_op_spe_stdd_64_kernel gen_op_std_64_kernel
5640 #define gen_op_spe_stdd_64_hypv gen_op_std_64_hypv
5641 #define gen_op_spe_stdd_le_raw gen_op_std_le_raw
5642 #define gen_op_spe_stdd_le_user gen_op_std_le_user
5643 #define gen_op_spe_stdd_le_kernel gen_op_std_le_kernel
5644 #define gen_op_spe_stdd_le_hypv gen_op_std_le_hypv
5645 #define gen_op_spe_stdd_le_64_raw gen_op_std_le_64_raw
5646 #define gen_op_spe_stdd_le_64_user gen_op_std_le_64_user
5647 #define gen_op_spe_stdd_le_64_kernel gen_op_std_le_64_kernel
5648 #define gen_op_spe_stdd_le_64_hypv gen_op_std_le_64_hypv
5649 #endif /* defined(TARGET_PPC64) */
5650 GEN_SPEOP_LDST(dd, 3);
5651 GEN_SPEOP_LDST(dw, 3);
5652 GEN_SPEOP_LDST(dh, 3);
5653 GEN_SPEOP_LDST(whe, 2);
5654 GEN_SPEOP_LD(whou, 2);
5655 GEN_SPEOP_LD(whos, 2);
5656 GEN_SPEOP_ST(who, 2);
5658 #if defined(TARGET_PPC64)
5659 /* In that case, spe_stwwo is equivalent to stw */
5660 #define gen_op_spe_stwwo_raw gen_op_stw_raw
5661 #define gen_op_spe_stwwo_user gen_op_stw_user
5662 #define gen_op_spe_stwwo_kernel gen_op_stw_kernel
5663 #define gen_op_spe_stwwo_hypv gen_op_stw_hypv
5664 #define gen_op_spe_stwwo_le_raw gen_op_stw_le_raw
5665 #define gen_op_spe_stwwo_le_user gen_op_stw_le_user
5666 #define gen_op_spe_stwwo_le_kernel gen_op_stw_le_kernel
5667 #define gen_op_spe_stwwo_le_hypv gen_op_stw_le_hypv
5668 #define gen_op_spe_stwwo_64_raw gen_op_stw_64_raw
5669 #define gen_op_spe_stwwo_64_user gen_op_stw_64_user
5670 #define gen_op_spe_stwwo_64_kernel gen_op_stw_64_kernel
5671 #define gen_op_spe_stwwo_64_hypv gen_op_stw_64_hypv
5672 #define gen_op_spe_stwwo_le_64_raw gen_op_stw_le_64_raw
5673 #define gen_op_spe_stwwo_le_64_user gen_op_stw_le_64_user
5674 #define gen_op_spe_stwwo_le_64_kernel gen_op_stw_le_64_kernel
5675 #define gen_op_spe_stwwo_le_64_hypv gen_op_stw_le_64_hypv
5676 #endif
5677 #define _GEN_OP_SPE_STWWE(suffix) \
5678 static always_inline void gen_op_spe_stwwe_##suffix (void) \
5680 gen_op_srli32_T1_64(); \
5681 gen_op_spe_stwwo_##suffix(); \
5683 #define _GEN_OP_SPE_STWWE_LE(suffix) \
5684 static always_inline void gen_op_spe_stwwe_le_##suffix (void) \
5686 gen_op_srli32_T1_64(); \
5687 gen_op_spe_stwwo_le_##suffix(); \
5689 #if defined(TARGET_PPC64)
5690 #define GEN_OP_SPE_STWWE(suffix) \
5691 _GEN_OP_SPE_STWWE(suffix); \
5692 _GEN_OP_SPE_STWWE_LE(suffix); \
5693 static always_inline void gen_op_spe_stwwe_64_##suffix (void) \
5695 gen_op_srli32_T1_64(); \
5696 gen_op_spe_stwwo_64_##suffix(); \
5698 static always_inline void gen_op_spe_stwwe_le_64_##suffix (void) \
5700 gen_op_srli32_T1_64(); \
5701 gen_op_spe_stwwo_le_64_##suffix(); \
5703 #else
5704 #define GEN_OP_SPE_STWWE(suffix) \
5705 _GEN_OP_SPE_STWWE(suffix); \
5706 _GEN_OP_SPE_STWWE_LE(suffix)
5707 #endif
5708 #if defined(CONFIG_USER_ONLY)
5709 GEN_OP_SPE_STWWE(raw);
5710 #else /* defined(CONFIG_USER_ONLY) */
5711 GEN_OP_SPE_STWWE(user);
5712 GEN_OP_SPE_STWWE(kernel);
5713 GEN_OP_SPE_STWWE(hypv);
5714 #endif /* defined(CONFIG_USER_ONLY) */
5715 GEN_SPEOP_ST(wwe, 2);
5716 GEN_SPEOP_ST(wwo, 2);
5718 #define GEN_SPE_LDSPLAT(name, op, suffix) \
5719 static always_inline void gen_op_spe_l##name##_##suffix (void) \
5721 gen_op_##op##_##suffix(); \
5722 gen_op_splatw_T1_64(); \
5725 #define GEN_OP_SPE_LHE(suffix) \
5726 static always_inline void gen_op_spe_lhe_##suffix (void) \
5728 gen_op_spe_lh_##suffix(); \
5729 gen_op_sli16_T1_64(); \
5732 #define GEN_OP_SPE_LHX(suffix) \
5733 static always_inline void gen_op_spe_lhx_##suffix (void) \
5735 gen_op_spe_lh_##suffix(); \
5736 gen_op_extsh_T1_64(); \
5739 #if defined(CONFIG_USER_ONLY)
5740 GEN_OP_SPE_LHE(raw);
5741 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, raw);
5742 GEN_OP_SPE_LHE(le_raw);
5743 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_raw);
5744 GEN_SPE_LDSPLAT(hhousplat, spe_lh, raw);
5745 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_raw);
5746 GEN_OP_SPE_LHX(raw);
5747 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, raw);
5748 GEN_OP_SPE_LHX(le_raw);
5749 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_raw);
5750 #if defined(TARGET_PPC64)
5751 GEN_OP_SPE_LHE(64_raw);
5752 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_raw);
5753 GEN_OP_SPE_LHE(le_64_raw);
5754 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_raw);
5755 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_raw);
5756 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_raw);
5757 GEN_OP_SPE_LHX(64_raw);
5758 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_raw);
5759 GEN_OP_SPE_LHX(le_64_raw);
5760 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_raw);
5761 #endif
5762 #else
5763 GEN_OP_SPE_LHE(user);
5764 GEN_OP_SPE_LHE(kernel);
5765 GEN_OP_SPE_LHE(hypv);
5766 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, user);
5767 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, kernel);
5768 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, hypv);
5769 GEN_OP_SPE_LHE(le_user);
5770 GEN_OP_SPE_LHE(le_kernel);
5771 GEN_OP_SPE_LHE(le_hypv);
5772 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_user);
5773 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_kernel);
5774 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_hypv);
5775 GEN_SPE_LDSPLAT(hhousplat, spe_lh, user);
5776 GEN_SPE_LDSPLAT(hhousplat, spe_lh, kernel);
5777 GEN_SPE_LDSPLAT(hhousplat, spe_lh, hypv);
5778 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_user);
5779 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_kernel);
5780 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_hypv);
5781 GEN_OP_SPE_LHX(user);
5782 GEN_OP_SPE_LHX(kernel);
5783 GEN_OP_SPE_LHX(hypv);
5784 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, user);
5785 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, kernel);
5786 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, hypv);
5787 GEN_OP_SPE_LHX(le_user);
5788 GEN_OP_SPE_LHX(le_kernel);
5789 GEN_OP_SPE_LHX(le_hypv);
5790 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_user);
5791 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_kernel);
5792 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_hypv);
5793 #if defined(TARGET_PPC64)
5794 GEN_OP_SPE_LHE(64_user);
5795 GEN_OP_SPE_LHE(64_kernel);
5796 GEN_OP_SPE_LHE(64_hypv);
5797 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_user);
5798 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_kernel);
5799 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_hypv);
5800 GEN_OP_SPE_LHE(le_64_user);
5801 GEN_OP_SPE_LHE(le_64_kernel);
5802 GEN_OP_SPE_LHE(le_64_hypv);
5803 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_user);
5804 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_kernel);
5805 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_hypv);
5806 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_user);
5807 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_kernel);
5808 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_hypv);
5809 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_user);
5810 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_kernel);
5811 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_hypv);
5812 GEN_OP_SPE_LHX(64_user);
5813 GEN_OP_SPE_LHX(64_kernel);
5814 GEN_OP_SPE_LHX(64_hypv);
5815 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_user);
5816 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_kernel);
5817 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_hypv);
5818 GEN_OP_SPE_LHX(le_64_user);
5819 GEN_OP_SPE_LHX(le_64_kernel);
5820 GEN_OP_SPE_LHX(le_64_hypv);
5821 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_user);
5822 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_kernel);
5823 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_hypv);
5824 #endif
5825 #endif
5826 GEN_SPEOP_LD(hhesplat, 1);
5827 GEN_SPEOP_LD(hhousplat, 1);
5828 GEN_SPEOP_LD(hhossplat, 1);
5829 GEN_SPEOP_LD(wwsplat, 2);
5830 GEN_SPEOP_LD(whsplat, 2);
5832 GEN_SPE(evlddx, evldd, 0x00, 0x0C, 0x00000000, PPC_SPE); //
5833 GEN_SPE(evldwx, evldw, 0x01, 0x0C, 0x00000000, PPC_SPE); //
5834 GEN_SPE(evldhx, evldh, 0x02, 0x0C, 0x00000000, PPC_SPE); //
5835 GEN_SPE(evlhhesplatx, evlhhesplat, 0x04, 0x0C, 0x00000000, PPC_SPE); //
5836 GEN_SPE(evlhhousplatx, evlhhousplat, 0x06, 0x0C, 0x00000000, PPC_SPE); //
5837 GEN_SPE(evlhhossplatx, evlhhossplat, 0x07, 0x0C, 0x00000000, PPC_SPE); //
5838 GEN_SPE(evlwhex, evlwhe, 0x08, 0x0C, 0x00000000, PPC_SPE); //
5839 GEN_SPE(evlwhoux, evlwhou, 0x0A, 0x0C, 0x00000000, PPC_SPE); //
5840 GEN_SPE(evlwhosx, evlwhos, 0x0B, 0x0C, 0x00000000, PPC_SPE); //
5841 GEN_SPE(evlwwsplatx, evlwwsplat, 0x0C, 0x0C, 0x00000000, PPC_SPE); //
5842 GEN_SPE(evlwhsplatx, evlwhsplat, 0x0E, 0x0C, 0x00000000, PPC_SPE); //
5843 GEN_SPE(evstddx, evstdd, 0x10, 0x0C, 0x00000000, PPC_SPE); //
5844 GEN_SPE(evstdwx, evstdw, 0x11, 0x0C, 0x00000000, PPC_SPE); //
5845 GEN_SPE(evstdhx, evstdh, 0x12, 0x0C, 0x00000000, PPC_SPE); //
5846 GEN_SPE(evstwhex, evstwhe, 0x18, 0x0C, 0x00000000, PPC_SPE); //
5847 GEN_SPE(evstwhox, evstwho, 0x1A, 0x0C, 0x00000000, PPC_SPE); //
5848 GEN_SPE(evstwwex, evstwwe, 0x1C, 0x0C, 0x00000000, PPC_SPE); //
5849 GEN_SPE(evstwwox, evstwwo, 0x1E, 0x0C, 0x00000000, PPC_SPE); //
5851 /* Multiply and add - TODO */
5852 #if 0
5853 GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0x00000000, PPC_SPE);
5854 GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0x00000000, PPC_SPE);
5855 GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, PPC_SPE);
5856 GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0x00000000, PPC_SPE);
5857 GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, PPC_SPE);
5858 GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0x00000000, PPC_SPE);
5859 GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0x00000000, PPC_SPE);
5860 GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0x00000000, PPC_SPE);
5861 GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, PPC_SPE);
5862 GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0x00000000, PPC_SPE);
5863 GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, PPC_SPE);
5864 GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0x00000000, PPC_SPE);
5866 GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0x00000000, PPC_SPE);
5867 GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, PPC_SPE);
5868 GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, PPC_SPE);
5869 GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0x00000000, PPC_SPE);
5870 GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0x00000000, PPC_SPE);
5871 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE);
5872 GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0x00000000, PPC_SPE);
5873 GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0x00000000, PPC_SPE);
5874 GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, PPC_SPE);
5875 GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, PPC_SPE);
5876 GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0x00000000, PPC_SPE);
5877 GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0x00000000, PPC_SPE);
5878 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE);
5879 GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0x00000000, PPC_SPE);
5881 GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, PPC_SPE);
5882 GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, PPC_SPE);
5883 GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, PPC_SPE);
5884 GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, PPC_SPE);
5885 GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, PPC_SPE);
5886 GEN_SPE(evmra, speundef, 0x07, 0x13, 0x0000F800, PPC_SPE);
5888 GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, PPC_SPE);
5889 GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0x00000000, PPC_SPE);
5890 GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, PPC_SPE);
5891 GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0x00000000, PPC_SPE);
5892 GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, PPC_SPE);
5893 GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0x00000000, PPC_SPE);
5894 GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, PPC_SPE);
5895 GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0x00000000, PPC_SPE);
5896 GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, PPC_SPE);
5897 GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0x00000000, PPC_SPE);
5898 GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, PPC_SPE);
5899 GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0x00000000, PPC_SPE);
5901 GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, PPC_SPE);
5902 GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, PPC_SPE);
5903 GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0x00000000, PPC_SPE);
5904 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE);
5905 GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0x00000000, PPC_SPE);
5907 GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, PPC_SPE);
5908 GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0x00000000, PPC_SPE);
5909 GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, PPC_SPE);
5910 GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0x00000000, PPC_SPE);
5911 GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, PPC_SPE);
5912 GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0x00000000, PPC_SPE);
5913 GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, PPC_SPE);
5914 GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0x00000000, PPC_SPE);
5915 GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, PPC_SPE);
5916 GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0x00000000, PPC_SPE);
5917 GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, PPC_SPE);
5918 GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0x00000000, PPC_SPE);
5920 GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, PPC_SPE);
5921 GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, PPC_SPE);
5922 GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0x00000000, PPC_SPE);
5923 GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, PPC_SPE);
5924 GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE);
5925 #endif
5927 /*** SPE floating-point extension ***/
5928 #define GEN_SPEFPUOP_CONV(name) \
5929 static always_inline void gen_##name (DisasContext *ctx) \
5931 gen_load_gpr64(cpu_T64[0], rB(ctx->opcode)); \
5932 gen_op_##name(); \
5933 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5936 /* Single precision floating-point vectors operations */
5937 /* Arithmetic */
5938 GEN_SPEOP_ARITH2(evfsadd);
5939 GEN_SPEOP_ARITH2(evfssub);
5940 GEN_SPEOP_ARITH2(evfsmul);
5941 GEN_SPEOP_ARITH2(evfsdiv);
5942 GEN_SPEOP_ARITH1(evfsabs);
5943 GEN_SPEOP_ARITH1(evfsnabs);
5944 GEN_SPEOP_ARITH1(evfsneg);
5945 /* Conversion */
5946 GEN_SPEFPUOP_CONV(evfscfui);
5947 GEN_SPEFPUOP_CONV(evfscfsi);
5948 GEN_SPEFPUOP_CONV(evfscfuf);
5949 GEN_SPEFPUOP_CONV(evfscfsf);
5950 GEN_SPEFPUOP_CONV(evfsctui);
5951 GEN_SPEFPUOP_CONV(evfsctsi);
5952 GEN_SPEFPUOP_CONV(evfsctuf);
5953 GEN_SPEFPUOP_CONV(evfsctsf);
5954 GEN_SPEFPUOP_CONV(evfsctuiz);
5955 GEN_SPEFPUOP_CONV(evfsctsiz);
5956 /* Comparison */
5957 GEN_SPEOP_COMP(evfscmpgt);
5958 GEN_SPEOP_COMP(evfscmplt);
5959 GEN_SPEOP_COMP(evfscmpeq);
5960 GEN_SPEOP_COMP(evfststgt);
5961 GEN_SPEOP_COMP(evfststlt);
5962 GEN_SPEOP_COMP(evfststeq);
5964 /* Opcodes definitions */
5965 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPEFPU); //
5966 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPEFPU); //
5967 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPEFPU); //
5968 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPEFPU); //
5969 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPEFPU); //
5970 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPEFPU); //
5971 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPEFPU); //
5972 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPEFPU); //
5973 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPEFPU); //
5974 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPEFPU); //
5975 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPEFPU); //
5976 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPEFPU); //
5977 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPEFPU); //
5978 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPEFPU); //
5980 /* Single precision floating-point operations */
5981 /* Arithmetic */
5982 GEN_SPEOP_ARITH2(efsadd);
5983 GEN_SPEOP_ARITH2(efssub);
5984 GEN_SPEOP_ARITH2(efsmul);
5985 GEN_SPEOP_ARITH2(efsdiv);
5986 GEN_SPEOP_ARITH1(efsabs);
5987 GEN_SPEOP_ARITH1(efsnabs);
5988 GEN_SPEOP_ARITH1(efsneg);
5989 /* Conversion */
5990 GEN_SPEFPUOP_CONV(efscfui);
5991 GEN_SPEFPUOP_CONV(efscfsi);
5992 GEN_SPEFPUOP_CONV(efscfuf);
5993 GEN_SPEFPUOP_CONV(efscfsf);
5994 GEN_SPEFPUOP_CONV(efsctui);
5995 GEN_SPEFPUOP_CONV(efsctsi);
5996 GEN_SPEFPUOP_CONV(efsctuf);
5997 GEN_SPEFPUOP_CONV(efsctsf);
5998 GEN_SPEFPUOP_CONV(efsctuiz);
5999 GEN_SPEFPUOP_CONV(efsctsiz);
6000 GEN_SPEFPUOP_CONV(efscfd);
6001 /* Comparison */
6002 GEN_SPEOP_COMP(efscmpgt);
6003 GEN_SPEOP_COMP(efscmplt);
6004 GEN_SPEOP_COMP(efscmpeq);
6005 GEN_SPEOP_COMP(efststgt);
6006 GEN_SPEOP_COMP(efststlt);
6007 GEN_SPEOP_COMP(efststeq);
6009 /* Opcodes definitions */
6010 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPEFPU); //
6011 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPEFPU); //
6012 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPEFPU); //
6013 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPEFPU); //
6014 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPEFPU); //
6015 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPEFPU); //
6016 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPEFPU); //
6017 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPEFPU); //
6018 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPEFPU); //
6019 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPEFPU); //
6020 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPEFPU); //
6021 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPEFPU); //
6022 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPEFPU); //
6023 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPEFPU); //
6025 /* Double precision floating-point operations */
6026 /* Arithmetic */
6027 GEN_SPEOP_ARITH2(efdadd);
6028 GEN_SPEOP_ARITH2(efdsub);
6029 GEN_SPEOP_ARITH2(efdmul);
6030 GEN_SPEOP_ARITH2(efddiv);
6031 GEN_SPEOP_ARITH1(efdabs);
6032 GEN_SPEOP_ARITH1(efdnabs);
6033 GEN_SPEOP_ARITH1(efdneg);
6034 /* Conversion */
6036 GEN_SPEFPUOP_CONV(efdcfui);
6037 GEN_SPEFPUOP_CONV(efdcfsi);
6038 GEN_SPEFPUOP_CONV(efdcfuf);
6039 GEN_SPEFPUOP_CONV(efdcfsf);
6040 GEN_SPEFPUOP_CONV(efdctui);
6041 GEN_SPEFPUOP_CONV(efdctsi);
6042 GEN_SPEFPUOP_CONV(efdctuf);
6043 GEN_SPEFPUOP_CONV(efdctsf);
6044 GEN_SPEFPUOP_CONV(efdctuiz);
6045 GEN_SPEFPUOP_CONV(efdctsiz);
6046 GEN_SPEFPUOP_CONV(efdcfs);
6047 GEN_SPEFPUOP_CONV(efdcfuid);
6048 GEN_SPEFPUOP_CONV(efdcfsid);
6049 GEN_SPEFPUOP_CONV(efdctuidz);
6050 GEN_SPEFPUOP_CONV(efdctsidz);
6051 /* Comparison */
6052 GEN_SPEOP_COMP(efdcmpgt);
6053 GEN_SPEOP_COMP(efdcmplt);
6054 GEN_SPEOP_COMP(efdcmpeq);
6055 GEN_SPEOP_COMP(efdtstgt);
6056 GEN_SPEOP_COMP(efdtstlt);
6057 GEN_SPEOP_COMP(efdtsteq);
6059 /* Opcodes definitions */
6060 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPEFPU); //
6061 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPEFPU); //
6062 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPEFPU); //
6063 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPEFPU); //
6064 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPEFPU); //
6065 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPEFPU); //
6066 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPEFPU); //
6067 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPEFPU); //
6068 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPEFPU); //
6069 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPEFPU); //
6070 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPEFPU); //
6071 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPEFPU); //
6072 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPEFPU); //
6073 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPEFPU); //
6074 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPEFPU); //
6075 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPEFPU); //
6077 /* End opcode list */
6078 GEN_OPCODE_MARK(end);
6080 #include "translate_init.c"
6081 #include "helper_regs.h"
6083 /*****************************************************************************/
6084 /* Misc PowerPC helpers */
6085 void cpu_dump_state (CPUState *env, FILE *f,
6086 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
6087 int flags)
6089 #define RGPL 4
6090 #define RFPL 4
6092 int i;
6094 cpu_fprintf(f, "NIP " ADDRX " LR " ADDRX " CTR " ADDRX " XER %08x\n",
6095 env->nip, env->lr, env->ctr, hreg_load_xer(env));
6096 cpu_fprintf(f, "MSR " ADDRX " HID0 " ADDRX " HF " ADDRX " idx %d\n",
6097 env->msr, env->spr[SPR_HID0], env->hflags, env->mmu_idx);
6098 #if !defined(NO_TIMER_DUMP)
6099 cpu_fprintf(f, "TB %08x %08x "
6100 #if !defined(CONFIG_USER_ONLY)
6101 "DECR %08x"
6102 #endif
6103 "\n",
6104 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
6105 #if !defined(CONFIG_USER_ONLY)
6106 , cpu_ppc_load_decr(env)
6107 #endif
6109 #endif
6110 for (i = 0; i < 32; i++) {
6111 if ((i & (RGPL - 1)) == 0)
6112 cpu_fprintf(f, "GPR%02d", i);
6113 cpu_fprintf(f, " " REGX, ppc_dump_gpr(env, i));
6114 if ((i & (RGPL - 1)) == (RGPL - 1))
6115 cpu_fprintf(f, "\n");
6117 cpu_fprintf(f, "CR ");
6118 for (i = 0; i < 8; i++)
6119 cpu_fprintf(f, "%01x", env->crf[i]);
6120 cpu_fprintf(f, " [");
6121 for (i = 0; i < 8; i++) {
6122 char a = '-';
6123 if (env->crf[i] & 0x08)
6124 a = 'L';
6125 else if (env->crf[i] & 0x04)
6126 a = 'G';
6127 else if (env->crf[i] & 0x02)
6128 a = 'E';
6129 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
6131 cpu_fprintf(f, " ] RES " ADDRX "\n", env->reserve);
6132 for (i = 0; i < 32; i++) {
6133 if ((i & (RFPL - 1)) == 0)
6134 cpu_fprintf(f, "FPR%02d", i);
6135 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
6136 if ((i & (RFPL - 1)) == (RFPL - 1))
6137 cpu_fprintf(f, "\n");
6139 #if !defined(CONFIG_USER_ONLY)
6140 cpu_fprintf(f, "SRR0 " ADDRX " SRR1 " ADDRX " SDR1 " ADDRX "\n",
6141 env->spr[SPR_SRR0], env->spr[SPR_SRR1], env->sdr1);
6142 #endif
6144 #undef RGPL
6145 #undef RFPL
6148 void cpu_dump_statistics (CPUState *env, FILE*f,
6149 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
6150 int flags)
6152 #if defined(DO_PPC_STATISTICS)
6153 opc_handler_t **t1, **t2, **t3, *handler;
6154 int op1, op2, op3;
6156 t1 = env->opcodes;
6157 for (op1 = 0; op1 < 64; op1++) {
6158 handler = t1[op1];
6159 if (is_indirect_opcode(handler)) {
6160 t2 = ind_table(handler);
6161 for (op2 = 0; op2 < 32; op2++) {
6162 handler = t2[op2];
6163 if (is_indirect_opcode(handler)) {
6164 t3 = ind_table(handler);
6165 for (op3 = 0; op3 < 32; op3++) {
6166 handler = t3[op3];
6167 if (handler->count == 0)
6168 continue;
6169 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
6170 "%016llx %lld\n",
6171 op1, op2, op3, op1, (op3 << 5) | op2,
6172 handler->oname,
6173 handler->count, handler->count);
6175 } else {
6176 if (handler->count == 0)
6177 continue;
6178 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
6179 "%016llx %lld\n",
6180 op1, op2, op1, op2, handler->oname,
6181 handler->count, handler->count);
6184 } else {
6185 if (handler->count == 0)
6186 continue;
6187 cpu_fprintf(f, "%02x (%02x ) %16s: %016llx %lld\n",
6188 op1, op1, handler->oname,
6189 handler->count, handler->count);
6192 #endif
6195 /*****************************************************************************/
6196 static always_inline void gen_intermediate_code_internal (CPUState *env,
6197 TranslationBlock *tb,
6198 int search_pc)
6200 DisasContext ctx, *ctxp = &ctx;
6201 opc_handler_t **table, *handler;
6202 target_ulong pc_start;
6203 uint16_t *gen_opc_end;
6204 int supervisor, little_endian;
6205 int j, lj = -1;
6206 int num_insns;
6207 int max_insns;
6209 pc_start = tb->pc;
6210 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6211 #if defined(OPTIMIZE_FPRF_UPDATE)
6212 gen_fprf_ptr = gen_fprf_buf;
6213 #endif
6214 ctx.nip = pc_start;
6215 ctx.tb = tb;
6216 ctx.exception = POWERPC_EXCP_NONE;
6217 ctx.spr_cb = env->spr_cb;
6218 supervisor = env->mmu_idx;
6219 #if !defined(CONFIG_USER_ONLY)
6220 ctx.supervisor = supervisor;
6221 #endif
6222 little_endian = env->hflags & (1 << MSR_LE) ? 1 : 0;
6223 #if defined(TARGET_PPC64)
6224 ctx.sf_mode = msr_sf;
6225 ctx.mem_idx = (supervisor << 2) | (msr_sf << 1) | little_endian;
6226 #else
6227 ctx.mem_idx = (supervisor << 1) | little_endian;
6228 #endif
6229 ctx.dcache_line_size = env->dcache_line_size;
6230 ctx.fpu_enabled = msr_fp;
6231 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
6232 ctx.spe_enabled = msr_spe;
6233 else
6234 ctx.spe_enabled = 0;
6235 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
6236 ctx.altivec_enabled = msr_vr;
6237 else
6238 ctx.altivec_enabled = 0;
6239 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
6240 ctx.singlestep_enabled = CPU_SINGLE_STEP;
6241 else
6242 ctx.singlestep_enabled = 0;
6243 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
6244 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
6245 if (unlikely(env->singlestep_enabled))
6246 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
6247 #if defined (DO_SINGLE_STEP) && 0
6248 /* Single step trace mode */
6249 msr_se = 1;
6250 #endif
6251 num_insns = 0;
6252 max_insns = tb->cflags & CF_COUNT_MASK;
6253 if (max_insns == 0)
6254 max_insns = CF_COUNT_MASK;
6256 gen_icount_start();
6257 /* Set env in case of segfault during code fetch */
6258 while (ctx.exception == POWERPC_EXCP_NONE && gen_opc_ptr < gen_opc_end) {
6259 if (unlikely(env->nb_breakpoints > 0)) {
6260 for (j = 0; j < env->nb_breakpoints; j++) {
6261 if (env->breakpoints[j] == ctx.nip) {
6262 gen_update_nip(&ctx, ctx.nip);
6263 gen_op_debug();
6264 break;
6268 if (unlikely(search_pc)) {
6269 j = gen_opc_ptr - gen_opc_buf;
6270 if (lj < j) {
6271 lj++;
6272 while (lj < j)
6273 gen_opc_instr_start[lj++] = 0;
6274 gen_opc_pc[lj] = ctx.nip;
6275 gen_opc_instr_start[lj] = 1;
6276 gen_opc_icount[lj] = num_insns;
6279 #if defined PPC_DEBUG_DISAS
6280 if (loglevel & CPU_LOG_TB_IN_ASM) {
6281 fprintf(logfile, "----------------\n");
6282 fprintf(logfile, "nip=" ADDRX " super=%d ir=%d\n",
6283 ctx.nip, supervisor, (int)msr_ir);
6285 #endif
6286 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
6287 gen_io_start();
6288 if (unlikely(little_endian)) {
6289 ctx.opcode = bswap32(ldl_code(ctx.nip));
6290 } else {
6291 ctx.opcode = ldl_code(ctx.nip);
6293 #if defined PPC_DEBUG_DISAS
6294 if (loglevel & CPU_LOG_TB_IN_ASM) {
6295 fprintf(logfile, "translate opcode %08x (%02x %02x %02x) (%s)\n",
6296 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
6297 opc3(ctx.opcode), little_endian ? "little" : "big");
6299 #endif
6300 ctx.nip += 4;
6301 table = env->opcodes;
6302 num_insns++;
6303 handler = table[opc1(ctx.opcode)];
6304 if (is_indirect_opcode(handler)) {
6305 table = ind_table(handler);
6306 handler = table[opc2(ctx.opcode)];
6307 if (is_indirect_opcode(handler)) {
6308 table = ind_table(handler);
6309 handler = table[opc3(ctx.opcode)];
6312 /* Is opcode *REALLY* valid ? */
6313 if (unlikely(handler->handler == &gen_invalid)) {
6314 if (loglevel != 0) {
6315 fprintf(logfile, "invalid/unsupported opcode: "
6316 "%02x - %02x - %02x (%08x) " ADDRX " %d\n",
6317 opc1(ctx.opcode), opc2(ctx.opcode),
6318 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
6319 } else {
6320 printf("invalid/unsupported opcode: "
6321 "%02x - %02x - %02x (%08x) " ADDRX " %d\n",
6322 opc1(ctx.opcode), opc2(ctx.opcode),
6323 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
6325 } else {
6326 if (unlikely((ctx.opcode & handler->inval) != 0)) {
6327 if (loglevel != 0) {
6328 fprintf(logfile, "invalid bits: %08x for opcode: "
6329 "%02x - %02x - %02x (%08x) " ADDRX "\n",
6330 ctx.opcode & handler->inval, opc1(ctx.opcode),
6331 opc2(ctx.opcode), opc3(ctx.opcode),
6332 ctx.opcode, ctx.nip - 4);
6333 } else {
6334 printf("invalid bits: %08x for opcode: "
6335 "%02x - %02x - %02x (%08x) " ADDRX "\n",
6336 ctx.opcode & handler->inval, opc1(ctx.opcode),
6337 opc2(ctx.opcode), opc3(ctx.opcode),
6338 ctx.opcode, ctx.nip - 4);
6340 GEN_EXCP_INVAL(ctxp);
6341 break;
6344 (*(handler->handler))(&ctx);
6345 #if defined(DO_PPC_STATISTICS)
6346 handler->count++;
6347 #endif
6348 /* Check trace mode exceptions */
6349 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
6350 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
6351 ctx.exception != POWERPC_SYSCALL &&
6352 ctx.exception != POWERPC_EXCP_TRAP &&
6353 ctx.exception != POWERPC_EXCP_BRANCH)) {
6354 GEN_EXCP(ctxp, POWERPC_EXCP_TRACE, 0);
6355 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
6356 (env->singlestep_enabled) ||
6357 num_insns >= max_insns)) {
6358 /* if we reach a page boundary or are single stepping, stop
6359 * generation
6361 break;
6363 #if defined (DO_SINGLE_STEP)
6364 break;
6365 #endif
6367 if (tb->cflags & CF_LAST_IO)
6368 gen_io_end();
6369 if (ctx.exception == POWERPC_EXCP_NONE) {
6370 gen_goto_tb(&ctx, 0, ctx.nip);
6371 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
6372 if (unlikely(env->singlestep_enabled)) {
6373 gen_update_nip(&ctx, ctx.nip);
6374 gen_op_debug();
6376 /* Generate the return instruction */
6377 tcg_gen_exit_tb(0);
6379 gen_icount_end(tb, num_insns);
6380 *gen_opc_ptr = INDEX_op_end;
6381 if (unlikely(search_pc)) {
6382 j = gen_opc_ptr - gen_opc_buf;
6383 lj++;
6384 while (lj <= j)
6385 gen_opc_instr_start[lj++] = 0;
6386 } else {
6387 tb->size = ctx.nip - pc_start;
6388 tb->icount = num_insns;
6390 #if defined(DEBUG_DISAS)
6391 if (loglevel & CPU_LOG_TB_CPU) {
6392 fprintf(logfile, "---------------- excp: %04x\n", ctx.exception);
6393 cpu_dump_state(env, logfile, fprintf, 0);
6395 if (loglevel & CPU_LOG_TB_IN_ASM) {
6396 int flags;
6397 flags = env->bfd_mach;
6398 flags |= little_endian << 16;
6399 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6400 target_disas(logfile, pc_start, ctx.nip - pc_start, flags);
6401 fprintf(logfile, "\n");
6403 #endif
6406 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
6408 gen_intermediate_code_internal(env, tb, 0);
6411 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
6413 gen_intermediate_code_internal(env, tb, 1);
6416 void gen_pc_load(CPUState *env, TranslationBlock *tb,
6417 unsigned long searched_pc, int pc_pos, void *puc)
6419 int type, c;
6420 /* for PPC, we need to look at the micro operation to get the
6421 * access type */
6422 env->nip = gen_opc_pc[pc_pos];
6423 c = gen_opc_buf[pc_pos];
6424 switch(c) {
6425 #if defined(CONFIG_USER_ONLY)
6426 #define CASE3(op)\
6427 case INDEX_op_ ## op ## _raw
6428 #else
6429 #define CASE3(op)\
6430 case INDEX_op_ ## op ## _user:\
6431 case INDEX_op_ ## op ## _kernel:\
6432 case INDEX_op_ ## op ## _hypv
6433 #endif
6435 CASE3(stfd):
6436 CASE3(stfs):
6437 CASE3(lfd):
6438 CASE3(lfs):
6439 type = ACCESS_FLOAT;
6440 break;
6441 CASE3(lwarx):
6442 type = ACCESS_RES;
6443 break;
6444 CASE3(stwcx):
6445 type = ACCESS_RES;
6446 break;
6447 CASE3(eciwx):
6448 CASE3(ecowx):
6449 type = ACCESS_EXT;
6450 break;
6451 default:
6452 type = ACCESS_INT;
6453 break;
6455 env->access_type = type;