2 * PowerPC emulation micro-operations for qemu.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "op_mem_access.h"
23 /*** Integer load and store multiple ***/
24 void OPPROTO
glue(op_lmw
, MEMSUFFIX
) (void)
26 glue(do_lmw
, MEMSUFFIX
)(PARAM1
);
30 #if defined(TARGET_PPC64)
31 void OPPROTO
glue(op_lmw_64
, MEMSUFFIX
) (void)
33 glue(do_lmw_64
, MEMSUFFIX
)(PARAM1
);
38 void OPPROTO
glue(op_lmw_le
, MEMSUFFIX
) (void)
40 glue(do_lmw_le
, MEMSUFFIX
)(PARAM1
);
44 #if defined(TARGET_PPC64)
45 void OPPROTO
glue(op_lmw_le_64
, MEMSUFFIX
) (void)
47 glue(do_lmw_le_64
, MEMSUFFIX
)(PARAM1
);
52 void OPPROTO
glue(op_stmw
, MEMSUFFIX
) (void)
54 glue(do_stmw
, MEMSUFFIX
)(PARAM1
);
58 #if defined(TARGET_PPC64)
59 void OPPROTO
glue(op_stmw_64
, MEMSUFFIX
) (void)
61 glue(do_stmw_64
, MEMSUFFIX
)(PARAM1
);
66 void OPPROTO
glue(op_stmw_le
, MEMSUFFIX
) (void)
68 glue(do_stmw_le
, MEMSUFFIX
)(PARAM1
);
72 #if defined(TARGET_PPC64)
73 void OPPROTO
glue(op_stmw_le_64
, MEMSUFFIX
) (void)
75 glue(do_stmw_le_64
, MEMSUFFIX
)(PARAM1
);
80 /*** Integer load and store strings ***/
81 void OPPROTO
glue(op_lswi
, MEMSUFFIX
) (void)
83 glue(do_lsw
, MEMSUFFIX
)(PARAM1
);
87 #if defined(TARGET_PPC64)
88 void OPPROTO
glue(op_lswi_64
, MEMSUFFIX
) (void)
90 glue(do_lsw_64
, MEMSUFFIX
)(PARAM1
);
95 /* PPC32 specification says we must generate an exception if
96 * rA is in the range of registers to be loaded.
97 * In an other hand, IBM says this is valid, but rA won't be loaded.
98 * For now, I'll follow the spec...
100 void OPPROTO
glue(op_lswx
, MEMSUFFIX
) (void)
102 /* Note: T1 comes from xer_bc then no cast is needed */
103 if (likely(T1
!= 0)) {
104 if (unlikely((PARAM1
< PARAM2
&& (PARAM1
+ T1
) > PARAM2
) ||
105 (PARAM1
< PARAM3
&& (PARAM1
+ T1
) > PARAM3
))) {
106 do_raise_exception_err(POWERPC_EXCP_PROGRAM
,
108 POWERPC_EXCP_INVAL_LSWX
);
110 glue(do_lsw
, MEMSUFFIX
)(PARAM1
);
116 #if defined(TARGET_PPC64)
117 void OPPROTO
glue(op_lswx_64
, MEMSUFFIX
) (void)
119 /* Note: T1 comes from xer_bc then no cast is needed */
120 if (likely(T1
!= 0)) {
121 if (unlikely((PARAM1
< PARAM2
&& (PARAM1
+ T1
) > PARAM2
) ||
122 (PARAM1
< PARAM3
&& (PARAM1
+ T1
) > PARAM3
))) {
123 do_raise_exception_err(POWERPC_EXCP_PROGRAM
,
125 POWERPC_EXCP_INVAL_LSWX
);
127 glue(do_lsw_64
, MEMSUFFIX
)(PARAM1
);
134 void OPPROTO
glue(op_stsw
, MEMSUFFIX
) (void)
136 glue(do_stsw
, MEMSUFFIX
)(PARAM1
);
140 #if defined(TARGET_PPC64)
141 void OPPROTO
glue(op_stsw_64
, MEMSUFFIX
) (void)
143 glue(do_stsw_64
, MEMSUFFIX
)(PARAM1
);
148 /*** Floating-point store ***/
149 #define PPC_STF_OP(name, op) \
150 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
152 glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \
156 #if defined(TARGET_PPC64)
157 #define PPC_STF_OP_64(name, op) \
158 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
160 glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \
165 static always_inline
void glue(stfs
, MEMSUFFIX
) (target_ulong EA
, float64 d
)
167 glue(stfl
, MEMSUFFIX
)(EA
, float64_to_float32(d
, &env
->fp_status
));
170 static always_inline
void glue(stfiw
, MEMSUFFIX
) (target_ulong EA
, float64 d
)
174 /* Store the low order 32 bits without any conversion */
176 glue(st32
, MEMSUFFIX
)(EA
, u
.l
.lower
);
179 PPC_STF_OP(fd
, stfq
);
180 PPC_STF_OP(fs
, stfs
);
181 PPC_STF_OP(fiw
, stfiw
);
182 #if defined(TARGET_PPC64)
183 PPC_STF_OP_64(fd
, stfq
);
184 PPC_STF_OP_64(fs
, stfs
);
185 PPC_STF_OP_64(fiw
, stfiw
);
188 static always_inline
void glue(stfqr
, MEMSUFFIX
) (target_ulong EA
, float64 d
)
193 u
.ll
= bswap64(u
.ll
);
194 glue(stfq
, MEMSUFFIX
)(EA
, u
.d
);
197 static always_inline
void glue(stfsr
, MEMSUFFIX
) (target_ulong EA
, float64 d
)
201 u
.f
= float64_to_float32(d
, &env
->fp_status
);
203 glue(stfl
, MEMSUFFIX
)(EA
, u
.f
);
206 static always_inline
void glue(stfiwr
, MEMSUFFIX
) (target_ulong EA
, float64 d
)
210 /* Store the low order 32 bits without any conversion */
212 u
.l
.lower
= bswap32(u
.l
.lower
);
213 glue(st32
, MEMSUFFIX
)(EA
, u
.l
.lower
);
216 PPC_STF_OP(fd_le
, stfqr
);
217 PPC_STF_OP(fs_le
, stfsr
);
218 PPC_STF_OP(fiw_le
, stfiwr
);
219 #if defined(TARGET_PPC64)
220 PPC_STF_OP_64(fd_le
, stfqr
);
221 PPC_STF_OP_64(fs_le
, stfsr
);
222 PPC_STF_OP_64(fiw_le
, stfiwr
);
225 /*** Floating-point load ***/
226 #define PPC_LDF_OP(name, op) \
227 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
229 FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \
233 #if defined(TARGET_PPC64)
234 #define PPC_LDF_OP_64(name, op) \
235 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
237 FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \
242 static always_inline float64
glue(ldfs
, MEMSUFFIX
) (target_ulong EA
)
244 return float32_to_float64(glue(ldfl
, MEMSUFFIX
)(EA
), &env
->fp_status
);
247 PPC_LDF_OP(fd
, ldfq
);
248 PPC_LDF_OP(fs
, ldfs
);
249 #if defined(TARGET_PPC64)
250 PPC_LDF_OP_64(fd
, ldfq
);
251 PPC_LDF_OP_64(fs
, ldfs
);
254 static always_inline float64
glue(ldfqr
, MEMSUFFIX
) (target_ulong EA
)
258 u
.d
= glue(ldfq
, MEMSUFFIX
)(EA
);
259 u
.ll
= bswap64(u
.ll
);
264 static always_inline float64
glue(ldfsr
, MEMSUFFIX
) (target_ulong EA
)
268 u
.f
= glue(ldfl
, MEMSUFFIX
)(EA
);
271 return float32_to_float64(u
.f
, &env
->fp_status
);
274 PPC_LDF_OP(fd_le
, ldfqr
);
275 PPC_LDF_OP(fs_le
, ldfsr
);
276 #if defined(TARGET_PPC64)
277 PPC_LDF_OP_64(fd_le
, ldfqr
);
278 PPC_LDF_OP_64(fs_le
, ldfsr
);
281 /* Load and set reservation */
282 void OPPROTO
glue(op_lwarx
, MEMSUFFIX
) (void)
284 if (unlikely(T0
& 0x03)) {
285 do_raise_exception(POWERPC_EXCP_ALIGN
);
287 T1
= glue(ldu32
, MEMSUFFIX
)((uint32_t)T0
);
288 env
->reserve
= (uint32_t)T0
;
293 #if defined(TARGET_PPC64)
294 void OPPROTO
glue(op_lwarx_64
, MEMSUFFIX
) (void)
296 if (unlikely(T0
& 0x03)) {
297 do_raise_exception(POWERPC_EXCP_ALIGN
);
299 T1
= glue(ldu32
, MEMSUFFIX
)((uint64_t)T0
);
300 env
->reserve
= (uint64_t)T0
;
305 void OPPROTO
glue(op_ldarx
, MEMSUFFIX
) (void)
307 if (unlikely(T0
& 0x03)) {
308 do_raise_exception(POWERPC_EXCP_ALIGN
);
310 T1
= glue(ldu64
, MEMSUFFIX
)((uint32_t)T0
);
311 env
->reserve
= (uint32_t)T0
;
316 void OPPROTO
glue(op_ldarx_64
, MEMSUFFIX
) (void)
318 if (unlikely(T0
& 0x03)) {
319 do_raise_exception(POWERPC_EXCP_ALIGN
);
321 T1
= glue(ldu64
, MEMSUFFIX
)((uint64_t)T0
);
322 env
->reserve
= (uint64_t)T0
;
328 void OPPROTO
glue(op_lwarx_le
, MEMSUFFIX
) (void)
330 if (unlikely(T0
& 0x03)) {
331 do_raise_exception(POWERPC_EXCP_ALIGN
);
333 T1
= glue(ldu32r
, MEMSUFFIX
)((uint32_t)T0
);
334 env
->reserve
= (uint32_t)T0
;
339 #if defined(TARGET_PPC64)
340 void OPPROTO
glue(op_lwarx_le_64
, MEMSUFFIX
) (void)
342 if (unlikely(T0
& 0x03)) {
343 do_raise_exception(POWERPC_EXCP_ALIGN
);
345 T1
= glue(ldu32r
, MEMSUFFIX
)((uint64_t)T0
);
346 env
->reserve
= (uint64_t)T0
;
351 void OPPROTO
glue(op_ldarx_le
, MEMSUFFIX
) (void)
353 if (unlikely(T0
& 0x03)) {
354 do_raise_exception(POWERPC_EXCP_ALIGN
);
356 T1
= glue(ldu64r
, MEMSUFFIX
)((uint32_t)T0
);
357 env
->reserve
= (uint32_t)T0
;
362 void OPPROTO
glue(op_ldarx_le_64
, MEMSUFFIX
) (void)
364 if (unlikely(T0
& 0x03)) {
365 do_raise_exception(POWERPC_EXCP_ALIGN
);
367 T1
= glue(ldu64r
, MEMSUFFIX
)((uint64_t)T0
);
368 env
->reserve
= (uint64_t)T0
;
374 /* Store with reservation */
375 void OPPROTO
glue(op_stwcx
, MEMSUFFIX
) (void)
377 if (unlikely(T0
& 0x03)) {
378 do_raise_exception(POWERPC_EXCP_ALIGN
);
380 if (unlikely(env
->reserve
!= (uint32_t)T0
)) {
381 env
->crf
[0] = xer_so
;
383 glue(st32
, MEMSUFFIX
)((uint32_t)T0
, T1
);
384 env
->crf
[0] = xer_so
| 0x02;
387 env
->reserve
= (target_ulong
)-1ULL;
391 #if defined(TARGET_PPC64)
392 void OPPROTO
glue(op_stwcx_64
, MEMSUFFIX
) (void)
394 if (unlikely(T0
& 0x03)) {
395 do_raise_exception(POWERPC_EXCP_ALIGN
);
397 if (unlikely(env
->reserve
!= (uint64_t)T0
)) {
398 env
->crf
[0] = xer_so
;
400 glue(st32
, MEMSUFFIX
)((uint64_t)T0
, T1
);
401 env
->crf
[0] = xer_so
| 0x02;
404 env
->reserve
= (target_ulong
)-1ULL;
408 void OPPROTO
glue(op_stdcx
, MEMSUFFIX
) (void)
410 if (unlikely(T0
& 0x03)) {
411 do_raise_exception(POWERPC_EXCP_ALIGN
);
413 if (unlikely(env
->reserve
!= (uint32_t)T0
)) {
414 env
->crf
[0] = xer_so
;
416 glue(st64
, MEMSUFFIX
)((uint32_t)T0
, T1
);
417 env
->crf
[0] = xer_so
| 0x02;
420 env
->reserve
= (target_ulong
)-1ULL;
424 void OPPROTO
glue(op_stdcx_64
, MEMSUFFIX
) (void)
426 if (unlikely(T0
& 0x03)) {
427 do_raise_exception(POWERPC_EXCP_ALIGN
);
429 if (unlikely(env
->reserve
!= (uint64_t)T0
)) {
430 env
->crf
[0] = xer_so
;
432 glue(st64
, MEMSUFFIX
)((uint64_t)T0
, T1
);
433 env
->crf
[0] = xer_so
| 0x02;
436 env
->reserve
= (target_ulong
)-1ULL;
441 void OPPROTO
glue(op_stwcx_le
, MEMSUFFIX
) (void)
443 if (unlikely(T0
& 0x03)) {
444 do_raise_exception(POWERPC_EXCP_ALIGN
);
446 if (unlikely(env
->reserve
!= (uint32_t)T0
)) {
447 env
->crf
[0] = xer_so
;
449 glue(st32r
, MEMSUFFIX
)((uint32_t)T0
, T1
);
450 env
->crf
[0] = xer_so
| 0x02;
453 env
->reserve
= (target_ulong
)-1ULL;
457 #if defined(TARGET_PPC64)
458 void OPPROTO
glue(op_stwcx_le_64
, MEMSUFFIX
) (void)
460 if (unlikely(T0
& 0x03)) {
461 do_raise_exception(POWERPC_EXCP_ALIGN
);
463 if (unlikely(env
->reserve
!= (uint64_t)T0
)) {
464 env
->crf
[0] = xer_so
;
466 glue(st32r
, MEMSUFFIX
)((uint64_t)T0
, T1
);
467 env
->crf
[0] = xer_so
| 0x02;
470 env
->reserve
= (target_ulong
)-1ULL;
474 void OPPROTO
glue(op_stdcx_le
, MEMSUFFIX
) (void)
476 if (unlikely(T0
& 0x03)) {
477 do_raise_exception(POWERPC_EXCP_ALIGN
);
479 if (unlikely(env
->reserve
!= (uint32_t)T0
)) {
480 env
->crf
[0] = xer_so
;
482 glue(st64r
, MEMSUFFIX
)((uint32_t)T0
, T1
);
483 env
->crf
[0] = xer_so
| 0x02;
486 env
->reserve
= (target_ulong
)-1ULL;
490 void OPPROTO
glue(op_stdcx_le_64
, MEMSUFFIX
) (void)
492 if (unlikely(T0
& 0x03)) {
493 do_raise_exception(POWERPC_EXCP_ALIGN
);
495 if (unlikely(env
->reserve
!= (uint64_t)T0
)) {
496 env
->crf
[0] = xer_so
;
498 glue(st64r
, MEMSUFFIX
)((uint64_t)T0
, T1
);
499 env
->crf
[0] = xer_so
| 0x02;
502 env
->reserve
= (target_ulong
)-1ULL;
507 void OPPROTO
glue(op_dcbz_l32
, MEMSUFFIX
) (void)
509 T0
&= ~((uint32_t)31);
510 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x00), 0);
511 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x04), 0);
512 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x08), 0);
513 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x0C), 0);
514 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x10), 0);
515 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x14), 0);
516 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x18), 0);
517 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x1C), 0);
521 void OPPROTO
glue(op_dcbz_l64
, MEMSUFFIX
) (void)
523 T0
&= ~((uint32_t)63);
524 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x00), 0);
525 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x04), 0);
526 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x08), 0);
527 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x0C), 0);
528 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x10), 0);
529 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x14), 0);
530 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x18), 0);
531 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x1C), 0);
532 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x20UL
), 0);
533 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x24UL
), 0);
534 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x28UL
), 0);
535 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x2CUL
), 0);
536 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x30UL
), 0);
537 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x34UL
), 0);
538 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x38UL
), 0);
539 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x3CUL
), 0);
543 void OPPROTO
glue(op_dcbz_l128
, MEMSUFFIX
) (void)
545 T0
&= ~((uint32_t)127);
546 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x00), 0);
547 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x04), 0);
548 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x08), 0);
549 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x0C), 0);
550 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x10), 0);
551 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x14), 0);
552 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x18), 0);
553 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x1C), 0);
554 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x20UL
), 0);
555 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x24UL
), 0);
556 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x28UL
), 0);
557 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x2CUL
), 0);
558 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x30UL
), 0);
559 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x34UL
), 0);
560 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x38UL
), 0);
561 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x3CUL
), 0);
562 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x40UL
), 0);
563 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x44UL
), 0);
564 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x48UL
), 0);
565 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x4CUL
), 0);
566 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x50UL
), 0);
567 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x54UL
), 0);
568 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x58UL
), 0);
569 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x5CUL
), 0);
570 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x60UL
), 0);
571 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x64UL
), 0);
572 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x68UL
), 0);
573 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x6CUL
), 0);
574 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x70UL
), 0);
575 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x74UL
), 0);
576 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x78UL
), 0);
577 glue(st32
, MEMSUFFIX
)((uint32_t)(T0
+ 0x7CUL
), 0);
581 void OPPROTO
glue(op_dcbz
, MEMSUFFIX
) (void)
583 glue(do_dcbz
, MEMSUFFIX
)();
587 #if defined(TARGET_PPC64)
588 void OPPROTO
glue(op_dcbz_l32_64
, MEMSUFFIX
) (void)
590 T0
&= ~((uint64_t)31);
591 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x00), 0);
592 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x04), 0);
593 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x08), 0);
594 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x0C), 0);
595 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x10), 0);
596 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x14), 0);
597 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x18), 0);
598 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x1C), 0);
602 void OPPROTO
glue(op_dcbz_l64_64
, MEMSUFFIX
) (void)
604 T0
&= ~((uint64_t)63);
605 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x00), 0);
606 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x04), 0);
607 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x08), 0);
608 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x0C), 0);
609 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x10), 0);
610 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x14), 0);
611 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x18), 0);
612 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x1C), 0);
613 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x20UL
), 0);
614 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x24UL
), 0);
615 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x28UL
), 0);
616 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x2CUL
), 0);
617 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x30UL
), 0);
618 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x34UL
), 0);
619 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x38UL
), 0);
620 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x3CUL
), 0);
624 void OPPROTO
glue(op_dcbz_l128_64
, MEMSUFFIX
) (void)
626 T0
&= ~((uint64_t)127);
627 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x00), 0);
628 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x04), 0);
629 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x08), 0);
630 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x0C), 0);
631 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x10), 0);
632 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x14), 0);
633 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x18), 0);
634 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x1C), 0);
635 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x20UL
), 0);
636 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x24UL
), 0);
637 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x28UL
), 0);
638 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x2CUL
), 0);
639 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x30UL
), 0);
640 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x34UL
), 0);
641 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x38UL
), 0);
642 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x3CUL
), 0);
643 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x40UL
), 0);
644 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x44UL
), 0);
645 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x48UL
), 0);
646 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x4CUL
), 0);
647 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x50UL
), 0);
648 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x54UL
), 0);
649 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x58UL
), 0);
650 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x5CUL
), 0);
651 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x60UL
), 0);
652 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x64UL
), 0);
653 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x68UL
), 0);
654 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x6CUL
), 0);
655 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x70UL
), 0);
656 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x74UL
), 0);
657 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x78UL
), 0);
658 glue(st32
, MEMSUFFIX
)((uint64_t)(T0
+ 0x7CUL
), 0);
662 void OPPROTO
glue(op_dcbz_64
, MEMSUFFIX
) (void)
664 glue(do_dcbz_64
, MEMSUFFIX
)();
669 /* Instruction cache block invalidate */
670 void OPPROTO
glue(op_icbi
, MEMSUFFIX
) (void)
672 glue(do_icbi
, MEMSUFFIX
)();
676 #if defined(TARGET_PPC64)
677 void OPPROTO
glue(op_icbi_64
, MEMSUFFIX
) (void)
679 glue(do_icbi_64
, MEMSUFFIX
)();
684 /* External access */
685 void OPPROTO
glue(op_eciwx
, MEMSUFFIX
) (void)
687 T1
= glue(ldu32
, MEMSUFFIX
)((uint32_t)T0
);
691 #if defined(TARGET_PPC64)
692 void OPPROTO
glue(op_eciwx_64
, MEMSUFFIX
) (void)
694 T1
= glue(ldu32
, MEMSUFFIX
)((uint64_t)T0
);
699 void OPPROTO
glue(op_ecowx
, MEMSUFFIX
) (void)
701 glue(st32
, MEMSUFFIX
)((uint32_t)T0
, T1
);
705 #if defined(TARGET_PPC64)
706 void OPPROTO
glue(op_ecowx_64
, MEMSUFFIX
) (void)
708 glue(st32
, MEMSUFFIX
)((uint64_t)T0
, T1
);
713 void OPPROTO
glue(op_eciwx_le
, MEMSUFFIX
) (void)
715 T1
= glue(ldu32r
, MEMSUFFIX
)((uint32_t)T0
);
719 #if defined(TARGET_PPC64)
720 void OPPROTO
glue(op_eciwx_le_64
, MEMSUFFIX
) (void)
722 T1
= glue(ldu32r
, MEMSUFFIX
)((uint64_t)T0
);
727 void OPPROTO
glue(op_ecowx_le
, MEMSUFFIX
) (void)
729 glue(st32r
, MEMSUFFIX
)((uint32_t)T0
, T1
);
733 #if defined(TARGET_PPC64)
734 void OPPROTO
glue(op_ecowx_le_64
, MEMSUFFIX
) (void)
736 glue(st32r
, MEMSUFFIX
)((uint64_t)T0
, T1
);
741 /* XXX: those micro-ops need tests ! */
742 /* PowerPC 601 specific instructions (POWER bridge) */
743 void OPPROTO
glue(op_POWER_lscbx
, MEMSUFFIX
) (void)
745 /* When byte count is 0, do nothing */
746 if (likely(T1
!= 0)) {
747 glue(do_POWER_lscbx
, MEMSUFFIX
)(PARAM1
, PARAM2
, PARAM3
);
752 /* POWER2 quad load and store */
753 /* XXX: TAGs are not managed */
754 void OPPROTO
glue(op_POWER2_lfq
, MEMSUFFIX
) (void)
756 glue(do_POWER2_lfq
, MEMSUFFIX
)();
760 void glue(op_POWER2_lfq_le
, MEMSUFFIX
) (void)
762 glue(do_POWER2_lfq_le
, MEMSUFFIX
)();
766 void OPPROTO
glue(op_POWER2_stfq
, MEMSUFFIX
) (void)
768 glue(do_POWER2_stfq
, MEMSUFFIX
)();
772 void OPPROTO
glue(op_POWER2_stfq_le
, MEMSUFFIX
) (void)
774 glue(do_POWER2_stfq_le
, MEMSUFFIX
)();
778 /* Altivec vector extension */
779 #if defined(WORDS_BIGENDIAN)
786 void OPPROTO
glue(op_vr_lvx
, MEMSUFFIX
) (void)
788 AVR0
.u64
[VR_DWORD0
] = glue(ldu64
, MEMSUFFIX
)((uint32_t)T0
);
789 AVR0
.u64
[VR_DWORD1
] = glue(ldu64
, MEMSUFFIX
)((uint32_t)T0
+ 8);
792 void OPPROTO
glue(op_vr_lvx_le
, MEMSUFFIX
) (void)
794 AVR0
.u64
[VR_DWORD1
] = glue(ldu64r
, MEMSUFFIX
)((uint32_t)T0
);
795 AVR0
.u64
[VR_DWORD0
] = glue(ldu64r
, MEMSUFFIX
)((uint32_t)T0
+ 8);
798 void OPPROTO
glue(op_vr_stvx
, MEMSUFFIX
) (void)
800 glue(st64
, MEMSUFFIX
)((uint32_t)T0
, AVR0
.u64
[VR_DWORD0
]);
801 glue(st64
, MEMSUFFIX
)((uint32_t)T0
+ 8, AVR0
.u64
[VR_DWORD1
]);
804 void OPPROTO
glue(op_vr_stvx_le
, MEMSUFFIX
) (void)
806 glue(st64r
, MEMSUFFIX
)((uint32_t)T0
, AVR0
.u64
[VR_DWORD1
]);
807 glue(st64r
, MEMSUFFIX
)((uint32_t)T0
+ 8, AVR0
.u64
[VR_DWORD0
]);
810 #if defined(TARGET_PPC64)
811 void OPPROTO
glue(op_vr_lvx_64
, MEMSUFFIX
) (void)
813 AVR0
.u64
[VR_DWORD0
] = glue(ldu64
, MEMSUFFIX
)((uint64_t)T0
);
814 AVR0
.u64
[VR_DWORD1
] = glue(ldu64
, MEMSUFFIX
)((uint64_t)T0
+ 8);
817 void OPPROTO
glue(op_vr_lvx_le_64
, MEMSUFFIX
) (void)
819 AVR0
.u64
[VR_DWORD1
] = glue(ldu64r
, MEMSUFFIX
)((uint64_t)T0
);
820 AVR0
.u64
[VR_DWORD0
] = glue(ldu64r
, MEMSUFFIX
)((uint64_t)T0
+ 8);
823 void OPPROTO
glue(op_vr_stvx_64
, MEMSUFFIX
) (void)
825 glue(st64
, MEMSUFFIX
)((uint64_t)T0
, AVR0
.u64
[VR_DWORD0
]);
826 glue(st64
, MEMSUFFIX
)((uint64_t)T0
+ 8, AVR0
.u64
[VR_DWORD1
]);
829 void OPPROTO
glue(op_vr_stvx_le_64
, MEMSUFFIX
) (void)
831 glue(st64r
, MEMSUFFIX
)((uint64_t)T0
, AVR0
.u64
[VR_DWORD1
]);
832 glue(st64r
, MEMSUFFIX
)((uint64_t)T0
+ 8, AVR0
.u64
[VR_DWORD0
]);
839 #define _PPC_SPE_LD_OP(name, op) \
840 void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void) \
842 T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0); \
846 #if defined(TARGET_PPC64)
847 #define _PPC_SPE_LD_OP_64(name, op) \
848 void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void) \
850 T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0); \
853 #define PPC_SPE_LD_OP(name, op) \
854 _PPC_SPE_LD_OP(name, op); \
855 _PPC_SPE_LD_OP_64(name, op)
857 #define PPC_SPE_LD_OP(name, op) \
858 _PPC_SPE_LD_OP(name, op)
861 #define _PPC_SPE_ST_OP(name, op) \
862 void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void) \
864 glue(op, MEMSUFFIX)((uint32_t)T0, T1_64); \
868 #if defined(TARGET_PPC64)
869 #define _PPC_SPE_ST_OP_64(name, op) \
870 void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void) \
872 glue(op, MEMSUFFIX)((uint64_t)T0, T1_64); \
875 #define PPC_SPE_ST_OP(name, op) \
876 _PPC_SPE_ST_OP(name, op); \
877 _PPC_SPE_ST_OP_64(name, op)
879 #define PPC_SPE_ST_OP(name, op) \
880 _PPC_SPE_ST_OP(name, op)
883 PPC_SPE_LD_OP(dd
, ldu64
);
884 PPC_SPE_ST_OP(dd
, st64
);
885 PPC_SPE_LD_OP(dd_le
, ldu64r
);
886 PPC_SPE_ST_OP(dd_le
, st64r
);
887 static always_inline
uint64_t glue(spe_ldw
, MEMSUFFIX
) (target_ulong EA
)
890 ret
= (uint64_t)glue(ldu32
, MEMSUFFIX
)(EA
) << 32;
891 ret
|= (uint64_t)glue(ldu32
, MEMSUFFIX
)(EA
+ 4);
894 PPC_SPE_LD_OP(dw
, spe_ldw
);
895 static always_inline
void glue(spe_stdw
, MEMSUFFIX
) (target_ulong EA
,
898 glue(st32
, MEMSUFFIX
)(EA
, data
>> 32);
899 glue(st32
, MEMSUFFIX
)(EA
+ 4, data
);
901 PPC_SPE_ST_OP(dw
, spe_stdw
);
902 static always_inline
uint64_t glue(spe_ldw_le
, MEMSUFFIX
) (target_ulong EA
)
905 ret
= (uint64_t)glue(ldu32r
, MEMSUFFIX
)(EA
) << 32;
906 ret
|= (uint64_t)glue(ldu32r
, MEMSUFFIX
)(EA
+ 4);
909 PPC_SPE_LD_OP(dw_le
, spe_ldw_le
);
910 static always_inline
void glue(spe_stdw_le
, MEMSUFFIX
) (target_ulong EA
,
913 glue(st32r
, MEMSUFFIX
)(EA
, data
>> 32);
914 glue(st32r
, MEMSUFFIX
)(EA
+ 4, data
);
916 PPC_SPE_ST_OP(dw_le
, spe_stdw_le
);
917 static always_inline
uint64_t glue(spe_ldh
, MEMSUFFIX
) (target_ulong EA
)
920 ret
= (uint64_t)glue(ldu16
, MEMSUFFIX
)(EA
) << 48;
921 ret
|= (uint64_t)glue(ldu16
, MEMSUFFIX
)(EA
+ 2) << 32;
922 ret
|= (uint64_t)glue(ldu16
, MEMSUFFIX
)(EA
+ 4) << 16;
923 ret
|= (uint64_t)glue(ldu16
, MEMSUFFIX
)(EA
+ 6);
926 PPC_SPE_LD_OP(dh
, spe_ldh
);
927 static always_inline
void glue(spe_stdh
, MEMSUFFIX
) (target_ulong EA
,
930 glue(st16
, MEMSUFFIX
)(EA
, data
>> 48);
931 glue(st16
, MEMSUFFIX
)(EA
+ 2, data
>> 32);
932 glue(st16
, MEMSUFFIX
)(EA
+ 4, data
>> 16);
933 glue(st16
, MEMSUFFIX
)(EA
+ 6, data
);
935 PPC_SPE_ST_OP(dh
, spe_stdh
);
936 static always_inline
uint64_t glue(spe_ldh_le
, MEMSUFFIX
) (target_ulong EA
)
939 ret
= (uint64_t)glue(ldu16r
, MEMSUFFIX
)(EA
) << 48;
940 ret
|= (uint64_t)glue(ldu16r
, MEMSUFFIX
)(EA
+ 2) << 32;
941 ret
|= (uint64_t)glue(ldu16r
, MEMSUFFIX
)(EA
+ 4) << 16;
942 ret
|= (uint64_t)glue(ldu16r
, MEMSUFFIX
)(EA
+ 6);
945 PPC_SPE_LD_OP(dh_le
, spe_ldh_le
);
946 static always_inline
void glue(spe_stdh_le
, MEMSUFFIX
) (target_ulong EA
,
949 glue(st16r
, MEMSUFFIX
)(EA
, data
>> 48);
950 glue(st16r
, MEMSUFFIX
)(EA
+ 2, data
>> 32);
951 glue(st16r
, MEMSUFFIX
)(EA
+ 4, data
>> 16);
952 glue(st16r
, MEMSUFFIX
)(EA
+ 6, data
);
954 PPC_SPE_ST_OP(dh_le
, spe_stdh_le
);
955 static always_inline
uint64_t glue(spe_lwhe
, MEMSUFFIX
) (target_ulong EA
)
958 ret
= (uint64_t)glue(ldu16
, MEMSUFFIX
)(EA
) << 48;
959 ret
|= (uint64_t)glue(ldu16
, MEMSUFFIX
)(EA
+ 2) << 16;
962 PPC_SPE_LD_OP(whe
, spe_lwhe
);
963 static always_inline
void glue(spe_stwhe
, MEMSUFFIX
) (target_ulong EA
,
966 glue(st16
, MEMSUFFIX
)(EA
, data
>> 48);
967 glue(st16
, MEMSUFFIX
)(EA
+ 2, data
>> 16);
969 PPC_SPE_ST_OP(whe
, spe_stwhe
);
970 static always_inline
uint64_t glue(spe_lwhe_le
, MEMSUFFIX
) (target_ulong EA
)
973 ret
= (uint64_t)glue(ldu16r
, MEMSUFFIX
)(EA
) << 48;
974 ret
|= (uint64_t)glue(ldu16r
, MEMSUFFIX
)(EA
+ 2) << 16;
977 PPC_SPE_LD_OP(whe_le
, spe_lwhe_le
);
978 static always_inline
void glue(spe_stwhe_le
, MEMSUFFIX
) (target_ulong EA
,
981 glue(st16r
, MEMSUFFIX
)(EA
, data
>> 48);
982 glue(st16r
, MEMSUFFIX
)(EA
+ 2, data
>> 16);
984 PPC_SPE_ST_OP(whe_le
, spe_stwhe_le
);
985 static always_inline
uint64_t glue(spe_lwhou
, MEMSUFFIX
) (target_ulong EA
)
988 ret
= (uint64_t)glue(ldu16
, MEMSUFFIX
)(EA
) << 32;
989 ret
|= (uint64_t)glue(ldu16
, MEMSUFFIX
)(EA
+ 2);
992 PPC_SPE_LD_OP(whou
, spe_lwhou
);
993 static always_inline
uint64_t glue(spe_lwhos
, MEMSUFFIX
) (target_ulong EA
)
996 ret
= ((uint64_t)((int32_t)glue(lds16
, MEMSUFFIX
)(EA
))) << 32;
997 ret
|= (uint64_t)((int32_t)glue(lds16
, MEMSUFFIX
)(EA
+ 2));
1000 PPC_SPE_LD_OP(whos
, spe_lwhos
);
1001 static always_inline
void glue(spe_stwho
, MEMSUFFIX
) (target_ulong EA
,
1004 glue(st16
, MEMSUFFIX
)(EA
, data
>> 32);
1005 glue(st16
, MEMSUFFIX
)(EA
+ 2, data
);
1007 PPC_SPE_ST_OP(who
, spe_stwho
);
1008 static always_inline
uint64_t glue(spe_lwhou_le
, MEMSUFFIX
) (target_ulong EA
)
1011 ret
= (uint64_t)glue(ldu16r
, MEMSUFFIX
)(EA
) << 32;
1012 ret
|= (uint64_t)glue(ldu16r
, MEMSUFFIX
)(EA
+ 2);
1015 PPC_SPE_LD_OP(whou_le
, spe_lwhou_le
);
1016 static always_inline
uint64_t glue(spe_lwhos_le
, MEMSUFFIX
) (target_ulong EA
)
1019 ret
= ((uint64_t)((int32_t)glue(lds16r
, MEMSUFFIX
)(EA
))) << 32;
1020 ret
|= (uint64_t)((int32_t)glue(lds16r
, MEMSUFFIX
)(EA
+ 2));
1023 PPC_SPE_LD_OP(whos_le
, spe_lwhos_le
);
1024 static always_inline
void glue(spe_stwho_le
, MEMSUFFIX
) (target_ulong EA
,
1027 glue(st16r
, MEMSUFFIX
)(EA
, data
>> 32);
1028 glue(st16r
, MEMSUFFIX
)(EA
+ 2, data
);
1030 PPC_SPE_ST_OP(who_le
, spe_stwho_le
);
1031 static always_inline
void glue(spe_stwwo
, MEMSUFFIX
) (target_ulong EA
,
1034 glue(st32
, MEMSUFFIX
)(EA
, data
);
1036 PPC_SPE_ST_OP(wwo
, spe_stwwo
);
1037 static always_inline
void glue(spe_stwwo_le
, MEMSUFFIX
) (target_ulong EA
,
1040 glue(st32r
, MEMSUFFIX
)(EA
, data
);
1042 PPC_SPE_ST_OP(wwo_le
, spe_stwwo_le
);
1043 static always_inline
uint64_t glue(spe_lh
, MEMSUFFIX
) (target_ulong EA
)
1046 tmp
= glue(ldu16
, MEMSUFFIX
)(EA
);
1047 return ((uint64_t)tmp
<< 48) | ((uint64_t)tmp
<< 16);
1049 PPC_SPE_LD_OP(h
, spe_lh
);
1050 static always_inline
uint64_t glue(spe_lh_le
, MEMSUFFIX
) (target_ulong EA
)
1053 tmp
= glue(ldu16r
, MEMSUFFIX
)(EA
);
1054 return ((uint64_t)tmp
<< 48) | ((uint64_t)tmp
<< 16);
1056 PPC_SPE_LD_OP(h_le
, spe_lh_le
);
1057 static always_inline
uint64_t glue(spe_lwwsplat
, MEMSUFFIX
) (target_ulong EA
)
1060 tmp
= glue(ldu32
, MEMSUFFIX
)(EA
);
1061 return ((uint64_t)tmp
<< 32) | (uint64_t)tmp
;
1063 PPC_SPE_LD_OP(wwsplat
, spe_lwwsplat
);
1064 static always_inline
1065 uint64_t glue(spe_lwwsplat_le
, MEMSUFFIX
) (target_ulong EA
)
1068 tmp
= glue(ldu32r
, MEMSUFFIX
)(EA
);
1069 return ((uint64_t)tmp
<< 32) | (uint64_t)tmp
;
1071 PPC_SPE_LD_OP(wwsplat_le
, spe_lwwsplat_le
);
1072 static always_inline
uint64_t glue(spe_lwhsplat
, MEMSUFFIX
) (target_ulong EA
)
1076 tmp
= glue(ldu16
, MEMSUFFIX
)(EA
);
1077 ret
= ((uint64_t)tmp
<< 48) | ((uint64_t)tmp
<< 32);
1078 tmp
= glue(ldu16
, MEMSUFFIX
)(EA
+ 2);
1079 ret
|= ((uint64_t)tmp
<< 16) | (uint64_t)tmp
;
1082 PPC_SPE_LD_OP(whsplat
, spe_lwhsplat
);
1083 static always_inline
1084 uint64_t glue(spe_lwhsplat_le
, MEMSUFFIX
) (target_ulong EA
)
1088 tmp
= glue(ldu16r
, MEMSUFFIX
)(EA
);
1089 ret
= ((uint64_t)tmp
<< 48) | ((uint64_t)tmp
<< 32);
1090 tmp
= glue(ldu16r
, MEMSUFFIX
)(EA
+ 2);
1091 ret
|= ((uint64_t)tmp
<< 16) | (uint64_t)tmp
;
1094 PPC_SPE_LD_OP(whsplat_le
, spe_lwhsplat_le
);