2 * PowerPC emulation micro-operations for qemu.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 static inline uint16_t glue(ld16r
, MEMSUFFIX
) (target_ulong EA
)
23 uint16_t tmp
= glue(lduw
, MEMSUFFIX
)(EA
);
24 return ((tmp
& 0xFF00) >> 8) | ((tmp
& 0x00FF) << 8);
27 static inline int32_t glue(ld16rs
, MEMSUFFIX
) (target_ulong EA
)
29 int16_t tmp
= glue(lduw
, MEMSUFFIX
)(EA
);
30 return (int16_t)((tmp
& 0xFF00) >> 8) | ((tmp
& 0x00FF) << 8);
33 static inline uint32_t glue(ld32r
, MEMSUFFIX
) (target_ulong EA
)
35 uint32_t tmp
= glue(ldl
, MEMSUFFIX
)(EA
);
36 return ((tmp
& 0xFF000000) >> 24) | ((tmp
& 0x00FF0000) >> 8) |
37 ((tmp
& 0x0000FF00) << 8) | ((tmp
& 0x000000FF) << 24);
40 #if defined(TARGET_PPC64) || defined(TARGET_PPCSPE)
41 static inline uint64_t glue(ld64r
, MEMSUFFIX
) (target_ulong EA
)
43 uint64_t tmp
= glue(ldq
, MEMSUFFIX
)(EA
);
44 return ((tmp
& 0xFF00000000000000ULL
) >> 56) |
45 ((tmp
& 0x00FF000000000000ULL
) >> 40) |
46 ((tmp
& 0x0000FF0000000000ULL
) >> 24) |
47 ((tmp
& 0x000000FF00000000ULL
) >> 8) |
48 ((tmp
& 0x00000000FF000000ULL
) << 8) |
49 ((tmp
& 0x0000000000FF0000ULL
) << 24) |
50 ((tmp
& 0x000000000000FF00ULL
) << 40) |
51 ((tmp
& 0x00000000000000FFULL
) << 54);
55 #if defined(TARGET_PPC64)
56 static inline int64_t glue(ldsl
, MEMSUFFIX
) (target_ulong EA
)
58 return (int32_t)glue(ldl
, MEMSUFFIX
)(EA
);
61 static inline int64_t glue(ld32rs
, MEMSUFFIX
) (target_ulong EA
)
63 uint32_t tmp
= glue(ldl
, MEMSUFFIX
)(EA
);
64 return (int32_t)((tmp
& 0xFF000000) >> 24) | ((tmp
& 0x00FF0000) >> 8) |
65 ((tmp
& 0x0000FF00) << 8) | ((tmp
& 0x000000FF) << 24);
69 static inline void glue(st16r
, MEMSUFFIX
) (target_ulong EA
, uint16_t data
)
71 uint16_t tmp
= ((data
& 0xFF00) >> 8) | ((data
& 0x00FF) << 8);
72 glue(stw
, MEMSUFFIX
)(EA
, tmp
);
75 static inline void glue(st32r
, MEMSUFFIX
) (target_ulong EA
, uint32_t data
)
77 uint32_t tmp
= ((data
& 0xFF000000) >> 24) | ((data
& 0x00FF0000) >> 8) |
78 ((data
& 0x0000FF00) << 8) | ((data
& 0x000000FF) << 24);
79 glue(stl
, MEMSUFFIX
)(EA
, tmp
);
82 #if defined(TARGET_PPC64) || defined(TARGET_PPCSPE)
83 static inline void glue(st64r
, MEMSUFFIX
) (target_ulong EA
, uint64_t data
)
85 uint64_t tmp
= ((data
& 0xFF00000000000000ULL
) >> 56) |
86 ((data
& 0x00FF000000000000ULL
) >> 40) |
87 ((data
& 0x0000FF0000000000ULL
) >> 24) |
88 ((data
& 0x000000FF00000000ULL
) >> 8) |
89 ((data
& 0x00000000FF000000ULL
) << 8) |
90 ((data
& 0x0000000000FF0000ULL
) << 24) |
91 ((data
& 0x000000000000FF00ULL
) << 40) |
92 ((data
& 0x00000000000000FFULL
) << 56);
93 glue(stq
, MEMSUFFIX
)(EA
, tmp
);
97 /*** Integer load ***/
98 #define PPC_LD_OP(name, op) \
99 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
101 T1 = glue(op, MEMSUFFIX)((uint32_t)T0); \
105 #if defined(TARGET_PPC64)
106 #define PPC_LD_OP_64(name, op) \
107 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
109 T1 = glue(op, MEMSUFFIX)((uint64_t)T0); \
114 #define PPC_ST_OP(name, op) \
115 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
117 glue(op, MEMSUFFIX)((uint32_t)T0, T1); \
121 #if defined(TARGET_PPC64)
122 #define PPC_ST_OP_64(name, op) \
123 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
125 glue(op, MEMSUFFIX)((uint64_t)T0, T1); \
134 #if defined(TARGET_PPC64)
137 PPC_LD_OP_64(d
, ldq
);
138 PPC_LD_OP_64(wa
, ldsl
);
139 PPC_LD_OP_64(bz
, ldub
);
140 PPC_LD_OP_64(ha
, ldsw
);
141 PPC_LD_OP_64(hz
, lduw
);
142 PPC_LD_OP_64(wz
, ldl
);
145 PPC_LD_OP(ha_le
, ld16rs
);
146 PPC_LD_OP(hz_le
, ld16r
);
147 PPC_LD_OP(wz_le
, ld32r
);
148 #if defined(TARGET_PPC64)
149 PPC_LD_OP(d_le
, ld64r
);
150 PPC_LD_OP(wa_le
, ld32rs
);
151 PPC_LD_OP_64(d_le
, ld64r
);
152 PPC_LD_OP_64(wa_le
, ld32rs
);
153 PPC_LD_OP_64(ha_le
, ld16rs
);
154 PPC_LD_OP_64(hz_le
, ld16r
);
155 PPC_LD_OP_64(wz_le
, ld32r
);
158 /*** Integer store ***/
162 #if defined(TARGET_PPC64)
164 PPC_ST_OP_64(d
, stq
);
165 PPC_ST_OP_64(b
, stb
);
166 PPC_ST_OP_64(h
, stw
);
167 PPC_ST_OP_64(w
, stl
);
170 PPC_ST_OP(h_le
, st16r
);
171 PPC_ST_OP(w_le
, st32r
);
172 #if defined(TARGET_PPC64)
173 PPC_ST_OP(d_le
, st64r
);
174 PPC_ST_OP_64(d_le
, st64r
);
175 PPC_ST_OP_64(h_le
, st16r
);
176 PPC_ST_OP_64(w_le
, st32r
);
179 /*** Integer load and store with byte reverse ***/
180 PPC_LD_OP(hbr
, ld16r
);
181 PPC_LD_OP(wbr
, ld32r
);
182 PPC_ST_OP(hbr
, st16r
);
183 PPC_ST_OP(wbr
, st32r
);
184 #if defined(TARGET_PPC64)
185 PPC_LD_OP_64(hbr
, ld16r
);
186 PPC_LD_OP_64(wbr
, ld32r
);
187 PPC_ST_OP_64(hbr
, st16r
);
188 PPC_ST_OP_64(wbr
, st32r
);
191 PPC_LD_OP(hbr_le
, lduw
);
192 PPC_LD_OP(wbr_le
, ldl
);
193 PPC_ST_OP(hbr_le
, stw
);
194 PPC_ST_OP(wbr_le
, stl
);
195 #if defined(TARGET_PPC64)
196 PPC_LD_OP_64(hbr_le
, lduw
);
197 PPC_LD_OP_64(wbr_le
, ldl
);
198 PPC_ST_OP_64(hbr_le
, stw
);
199 PPC_ST_OP_64(wbr_le
, stl
);
202 /*** Integer load and store multiple ***/
203 void OPPROTO
glue(op_lmw
, MEMSUFFIX
) (void)
205 glue(do_lmw
, MEMSUFFIX
)(PARAM1
);
209 #if defined(TARGET_PPC64)
210 void OPPROTO
glue(op_lmw_64
, MEMSUFFIX
) (void)
212 glue(do_lmw_64
, MEMSUFFIX
)(PARAM1
);
217 void OPPROTO
glue(op_lmw_le
, MEMSUFFIX
) (void)
219 glue(do_lmw_le
, MEMSUFFIX
)(PARAM1
);
223 #if defined(TARGET_PPC64)
224 void OPPROTO
glue(op_lmw_le_64
, MEMSUFFIX
) (void)
226 glue(do_lmw_le_64
, MEMSUFFIX
)(PARAM1
);
231 void OPPROTO
glue(op_stmw
, MEMSUFFIX
) (void)
233 glue(do_stmw
, MEMSUFFIX
)(PARAM1
);
237 #if defined(TARGET_PPC64)
238 void OPPROTO
glue(op_stmw_64
, MEMSUFFIX
) (void)
240 glue(do_stmw_64
, MEMSUFFIX
)(PARAM1
);
245 void OPPROTO
glue(op_stmw_le
, MEMSUFFIX
) (void)
247 glue(do_stmw_le
, MEMSUFFIX
)(PARAM1
);
251 #if defined(TARGET_PPC64)
252 void OPPROTO
glue(op_stmw_le_64
, MEMSUFFIX
) (void)
254 glue(do_stmw_le_64
, MEMSUFFIX
)(PARAM1
);
259 /*** Integer load and store strings ***/
260 void OPPROTO
glue(op_lswi
, MEMSUFFIX
) (void)
262 glue(do_lsw
, MEMSUFFIX
)(PARAM1
);
266 #if defined(TARGET_PPC64)
267 void OPPROTO
glue(op_lswi_64
, MEMSUFFIX
) (void)
269 glue(do_lsw_64
, MEMSUFFIX
)(PARAM1
);
274 void OPPROTO
glue(op_lswi_le
, MEMSUFFIX
) (void)
276 glue(do_lsw_le
, MEMSUFFIX
)(PARAM1
);
280 #if defined(TARGET_PPC64)
281 void OPPROTO
glue(op_lswi_le_64
, MEMSUFFIX
) (void)
283 glue(do_lsw_le_64
, MEMSUFFIX
)(PARAM1
);
288 /* PPC32 specification says we must generate an exception if
289 * rA is in the range of registers to be loaded.
290 * In an other hand, IBM says this is valid, but rA won't be loaded.
291 * For now, I'll follow the spec...
293 void OPPROTO
glue(op_lswx
, MEMSUFFIX
) (void)
295 /* Note: T1 comes from xer_bc then no cast is needed */
296 if (likely(T1
!= 0)) {
297 if (unlikely((PARAM1
< PARAM2
&& (PARAM1
+ T1
) > PARAM2
) ||
298 (PARAM1
< PARAM3
&& (PARAM1
+ T1
) > PARAM3
))) {
299 do_raise_exception_err(EXCP_PROGRAM
, EXCP_INVAL
| EXCP_INVAL_LSWX
);
301 glue(do_lsw
, MEMSUFFIX
)(PARAM1
);
307 #if defined(TARGET_PPC64)
308 void OPPROTO
glue(op_lswx_64
, MEMSUFFIX
) (void)
310 /* Note: T1 comes from xer_bc then no cast is needed */
311 if (likely(T1
!= 0)) {
312 if (unlikely((PARAM1
< PARAM2
&& (PARAM1
+ T1
) > PARAM2
) ||
313 (PARAM1
< PARAM3
&& (PARAM1
+ T1
) > PARAM3
))) {
314 do_raise_exception_err(EXCP_PROGRAM
, EXCP_INVAL
| EXCP_INVAL_LSWX
);
316 glue(do_lsw_64
, MEMSUFFIX
)(PARAM1
);
323 void OPPROTO
glue(op_lswx_le
, MEMSUFFIX
) (void)
325 /* Note: T1 comes from xer_bc then no cast is needed */
326 if (likely(T1
!= 0)) {
327 if (unlikely((PARAM1
< PARAM2
&& (PARAM1
+ T1
) > PARAM2
) ||
328 (PARAM1
< PARAM3
&& (PARAM1
+ T1
) > PARAM3
))) {
329 do_raise_exception_err(EXCP_PROGRAM
, EXCP_INVAL
| EXCP_INVAL_LSWX
);
331 glue(do_lsw_le
, MEMSUFFIX
)(PARAM1
);
337 #if defined(TARGET_PPC64)
338 void OPPROTO
glue(op_lswx_le_64
, MEMSUFFIX
) (void)
340 /* Note: T1 comes from xer_bc then no cast is needed */
341 if (likely(T1
!= 0)) {
342 if (unlikely((PARAM1
< PARAM2
&& (PARAM1
+ T1
) > PARAM2
) ||
343 (PARAM1
< PARAM3
&& (PARAM1
+ T1
) > PARAM3
))) {
344 do_raise_exception_err(EXCP_PROGRAM
, EXCP_INVAL
| EXCP_INVAL_LSWX
);
346 glue(do_lsw_le_64
, MEMSUFFIX
)(PARAM1
);
353 void OPPROTO
glue(op_stsw
, MEMSUFFIX
) (void)
355 glue(do_stsw
, MEMSUFFIX
)(PARAM1
);
359 #if defined(TARGET_PPC64)
360 void OPPROTO
glue(op_stsw_64
, MEMSUFFIX
) (void)
362 glue(do_stsw_64
, MEMSUFFIX
)(PARAM1
);
367 void OPPROTO
glue(op_stsw_le
, MEMSUFFIX
) (void)
369 glue(do_stsw_le
, MEMSUFFIX
)(PARAM1
);
373 #if defined(TARGET_PPC64)
374 void OPPROTO
glue(op_stsw_le_64
, MEMSUFFIX
) (void)
376 glue(do_stsw_le_64
, MEMSUFFIX
)(PARAM1
);
381 /*** Floating-point store ***/
382 #define PPC_STF_OP(name, op) \
383 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
385 glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \
389 #if defined(TARGET_PPC64)
390 #define PPC_STF_OP_64(name, op) \
391 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
393 glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \
398 PPC_STF_OP(fd
, stfq
);
399 PPC_STF_OP(fs
, stfl
);
400 #if defined(TARGET_PPC64)
401 PPC_STF_OP_64(fd
, stfq
);
402 PPC_STF_OP_64(fs
, stfl
);
405 static inline void glue(stfqr
, MEMSUFFIX
) (target_ulong EA
, double d
)
413 u
.u
= ((u
.u
& 0xFF00000000000000ULL
) >> 56) |
414 ((u
.u
& 0x00FF000000000000ULL
) >> 40) |
415 ((u
.u
& 0x0000FF0000000000ULL
) >> 24) |
416 ((u
.u
& 0x000000FF00000000ULL
) >> 8) |
417 ((u
.u
& 0x00000000FF000000ULL
) << 8) |
418 ((u
.u
& 0x0000000000FF0000ULL
) << 24) |
419 ((u
.u
& 0x000000000000FF00ULL
) << 40) |
420 ((u
.u
& 0x00000000000000FFULL
) << 56);
421 glue(stfq
, MEMSUFFIX
)(EA
, u
.d
);
424 static inline void glue(stflr
, MEMSUFFIX
) (target_ulong EA
, float f
)
432 u
.u
= ((u
.u
& 0xFF000000UL
) >> 24) |
433 ((u
.u
& 0x00FF0000ULL
) >> 8) |
434 ((u
.u
& 0x0000FF00UL
) << 8) |
435 ((u
.u
& 0x000000FFULL
) << 24);
436 glue(stfl
, MEMSUFFIX
)(EA
, u
.f
);
439 PPC_STF_OP(fd_le
, stfqr
);
440 PPC_STF_OP(fs_le
, stflr
);
441 #if defined(TARGET_PPC64)
442 PPC_STF_OP_64(fd_le
, stfqr
);
443 PPC_STF_OP_64(fs_le
, stflr
);
446 /*** Floating-point load ***/
447 #define PPC_LDF_OP(name, op) \
448 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
450 FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \
454 #if defined(TARGET_PPC64)
455 #define PPC_LDF_OP_64(name, op) \
456 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
458 FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \
463 PPC_LDF_OP(fd
, ldfq
);
464 PPC_LDF_OP(fs
, ldfl
);
465 #if defined(TARGET_PPC64)
466 PPC_LDF_OP_64(fd
, ldfq
);
467 PPC_LDF_OP_64(fs
, ldfl
);
470 static inline double glue(ldfqr
, MEMSUFFIX
) (target_ulong EA
)
477 u
.d
= glue(ldfq
, MEMSUFFIX
)(EA
);
478 u
.u
= ((u
.u
& 0xFF00000000000000ULL
) >> 56) |
479 ((u
.u
& 0x00FF000000000000ULL
) >> 40) |
480 ((u
.u
& 0x0000FF0000000000ULL
) >> 24) |
481 ((u
.u
& 0x000000FF00000000ULL
) >> 8) |
482 ((u
.u
& 0x00000000FF000000ULL
) << 8) |
483 ((u
.u
& 0x0000000000FF0000ULL
) << 24) |
484 ((u
.u
& 0x000000000000FF00ULL
) << 40) |
485 ((u
.u
& 0x00000000000000FFULL
) << 56);
490 static inline float glue(ldflr
, MEMSUFFIX
) (target_ulong EA
)
497 u
.f
= glue(ldfl
, MEMSUFFIX
)(EA
);
498 u
.u
= ((u
.u
& 0xFF000000UL
) >> 24) |
499 ((u
.u
& 0x00FF0000ULL
) >> 8) |
500 ((u
.u
& 0x0000FF00UL
) << 8) |
501 ((u
.u
& 0x000000FFULL
) << 24);
506 PPC_LDF_OP(fd_le
, ldfqr
);
507 PPC_LDF_OP(fs_le
, ldflr
);
508 #if defined(TARGET_PPC64)
509 PPC_LDF_OP_64(fd_le
, ldfqr
);
510 PPC_LDF_OP_64(fs_le
, ldflr
);
513 /* Load and set reservation */
514 void OPPROTO
glue(op_lwarx
, MEMSUFFIX
) (void)
516 if (unlikely(T0
& 0x03)) {
517 do_raise_exception(EXCP_ALIGN
);
519 T1
= glue(ldl
, MEMSUFFIX
)((uint32_t)T0
);
520 regs
->reserve
= (uint32_t)T0
;
525 #if defined(TARGET_PPC64)
526 void OPPROTO
glue(op_lwarx_64
, MEMSUFFIX
) (void)
528 if (unlikely(T0
& 0x03)) {
529 do_raise_exception(EXCP_ALIGN
);
531 T1
= glue(ldl
, MEMSUFFIX
)((uint64_t)T0
);
532 regs
->reserve
= (uint64_t)T0
;
537 void OPPROTO
glue(op_ldarx
, MEMSUFFIX
) (void)
539 if (unlikely(T0
& 0x03)) {
540 do_raise_exception(EXCP_ALIGN
);
542 T1
= glue(ldq
, MEMSUFFIX
)((uint32_t)T0
);
543 regs
->reserve
= (uint32_t)T0
;
548 void OPPROTO
glue(op_ldarx_64
, MEMSUFFIX
) (void)
550 if (unlikely(T0
& 0x03)) {
551 do_raise_exception(EXCP_ALIGN
);
553 T1
= glue(ldq
, MEMSUFFIX
)((uint64_t)T0
);
554 regs
->reserve
= (uint64_t)T0
;
560 void OPPROTO
glue(op_lwarx_le
, MEMSUFFIX
) (void)
562 if (unlikely(T0
& 0x03)) {
563 do_raise_exception(EXCP_ALIGN
);
565 T1
= glue(ld32r
, MEMSUFFIX
)((uint32_t)T0
);
566 regs
->reserve
= (uint32_t)T0
;
571 #if defined(TARGET_PPC64)
572 void OPPROTO
glue(op_lwarx_le_64
, MEMSUFFIX
) (void)
574 if (unlikely(T0
& 0x03)) {
575 do_raise_exception(EXCP_ALIGN
);
577 T1
= glue(ld32r
, MEMSUFFIX
)((uint64_t)T0
);
578 regs
->reserve
= (uint64_t)T0
;
583 void OPPROTO
glue(op_ldarx_le
, MEMSUFFIX
) (void)
585 if (unlikely(T0
& 0x03)) {
586 do_raise_exception(EXCP_ALIGN
);
588 T1
= glue(ld64r
, MEMSUFFIX
)((uint32_t)T0
);
589 regs
->reserve
= (uint32_t)T0
;
594 void OPPROTO
glue(op_ldarx_le_64
, MEMSUFFIX
) (void)
596 if (unlikely(T0
& 0x03)) {
597 do_raise_exception(EXCP_ALIGN
);
599 T1
= glue(ld64r
, MEMSUFFIX
)((uint64_t)T0
);
600 regs
->reserve
= (uint64_t)T0
;
606 /* Store with reservation */
607 void OPPROTO
glue(op_stwcx
, MEMSUFFIX
) (void)
609 if (unlikely(T0
& 0x03)) {
610 do_raise_exception(EXCP_ALIGN
);
612 if (unlikely(regs
->reserve
!= (uint32_t)T0
)) {
613 env
->crf
[0] = xer_ov
;
615 glue(stl
, MEMSUFFIX
)((uint32_t)T0
, T1
);
616 env
->crf
[0] = xer_ov
| 0x02;
623 #if defined(TARGET_PPC64)
624 void OPPROTO
glue(op_stwcx_64
, MEMSUFFIX
) (void)
626 if (unlikely(T0
& 0x03)) {
627 do_raise_exception(EXCP_ALIGN
);
629 if (unlikely(regs
->reserve
!= (uint64_t)T0
)) {
630 env
->crf
[0] = xer_ov
;
632 glue(stl
, MEMSUFFIX
)((uint64_t)T0
, T1
);
633 env
->crf
[0] = xer_ov
| 0x02;
640 void OPPROTO
glue(op_stdcx
, MEMSUFFIX
) (void)
642 if (unlikely(T0
& 0x03)) {
643 do_raise_exception(EXCP_ALIGN
);
645 if (unlikely(regs
->reserve
!= (uint32_t)T0
)) {
646 env
->crf
[0] = xer_ov
;
648 glue(stq
, MEMSUFFIX
)((uint32_t)T0
, T1
);
649 env
->crf
[0] = xer_ov
| 0x02;
656 void OPPROTO
glue(op_stdcx_64
, MEMSUFFIX
) (void)
658 if (unlikely(T0
& 0x03)) {
659 do_raise_exception(EXCP_ALIGN
);
661 if (unlikely(regs
->reserve
!= (uint64_t)T0
)) {
662 env
->crf
[0] = xer_ov
;
664 glue(stq
, MEMSUFFIX
)((uint64_t)T0
, T1
);
665 env
->crf
[0] = xer_ov
| 0x02;
673 void OPPROTO
glue(op_stwcx_le
, MEMSUFFIX
) (void)
675 if (unlikely(T0
& 0x03)) {
676 do_raise_exception(EXCP_ALIGN
);
678 if (unlikely(regs
->reserve
!= (uint32_t)T0
)) {
679 env
->crf
[0] = xer_ov
;
681 glue(st32r
, MEMSUFFIX
)((uint32_t)T0
, T1
);
682 env
->crf
[0] = xer_ov
| 0x02;
689 #if defined(TARGET_PPC64)
690 void OPPROTO
glue(op_stwcx_le_64
, MEMSUFFIX
) (void)
692 if (unlikely(T0
& 0x03)) {
693 do_raise_exception(EXCP_ALIGN
);
695 if (unlikely(regs
->reserve
!= (uint64_t)T0
)) {
696 env
->crf
[0] = xer_ov
;
698 glue(st32r
, MEMSUFFIX
)((uint64_t)T0
, T1
);
699 env
->crf
[0] = xer_ov
| 0x02;
706 void OPPROTO
glue(op_stdcx_le
, MEMSUFFIX
) (void)
708 if (unlikely(T0
& 0x03)) {
709 do_raise_exception(EXCP_ALIGN
);
711 if (unlikely(regs
->reserve
!= (uint32_t)T0
)) {
712 env
->crf
[0] = xer_ov
;
714 glue(st64r
, MEMSUFFIX
)((uint32_t)T0
, T1
);
715 env
->crf
[0] = xer_ov
| 0x02;
722 void OPPROTO
glue(op_stdcx_le_64
, MEMSUFFIX
) (void)
724 if (unlikely(T0
& 0x03)) {
725 do_raise_exception(EXCP_ALIGN
);
727 if (unlikely(regs
->reserve
!= (uint64_t)T0
)) {
728 env
->crf
[0] = xer_ov
;
730 glue(st64r
, MEMSUFFIX
)((uint64_t)T0
, T1
);
731 env
->crf
[0] = xer_ov
| 0x02;
739 void OPPROTO
glue(op_dcbz
, MEMSUFFIX
) (void)
741 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x00), 0);
742 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x04), 0);
743 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x08), 0);
744 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x0C), 0);
745 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x10), 0);
746 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x14), 0);
747 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x18), 0);
748 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x1C), 0);
749 #if DCACHE_LINE_SIZE == 64
750 /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
751 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x20UL
), 0);
752 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x24UL
), 0);
753 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x28UL
), 0);
754 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x2CUL
), 0);
755 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x30UL
), 0);
756 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x34UL
), 0);
757 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x38UL
), 0);
758 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x3CUL
), 0);
763 #if defined(TARGET_PPC64)
764 void OPPROTO
glue(op_dcbz_64
, MEMSUFFIX
) (void)
766 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x00), 0);
767 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x04), 0);
768 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x08), 0);
769 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x0C), 0);
770 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x10), 0);
771 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x14), 0);
772 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x18), 0);
773 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x1C), 0);
774 #if DCACHE_LINE_SIZE == 64
775 /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
776 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x20UL
), 0);
777 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x24UL
), 0);
778 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x28UL
), 0);
779 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x2CUL
), 0);
780 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x30UL
), 0);
781 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x34UL
), 0);
782 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x38UL
), 0);
783 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x3CUL
), 0);
789 /* Instruction cache block invalidate */
790 void OPPROTO
glue(op_icbi
, MEMSUFFIX
) (void)
792 glue(do_icbi
, MEMSUFFIX
)();
796 #if defined(TARGET_PPC64)
797 void OPPROTO
glue(op_icbi_64
, MEMSUFFIX
) (void)
799 glue(do_icbi_64
, MEMSUFFIX
)();
804 /* External access */
805 void OPPROTO
glue(op_eciwx
, MEMSUFFIX
) (void)
807 T1
= glue(ldl
, MEMSUFFIX
)((uint32_t)T0
);
811 #if defined(TARGET_PPC64)
812 void OPPROTO
glue(op_eciwx_64
, MEMSUFFIX
) (void)
814 T1
= glue(ldl
, MEMSUFFIX
)((uint64_t)T0
);
819 void OPPROTO
glue(op_ecowx
, MEMSUFFIX
) (void)
821 glue(stl
, MEMSUFFIX
)((uint32_t)T0
, T1
);
825 #if defined(TARGET_PPC64)
826 void OPPROTO
glue(op_ecowx_64
, MEMSUFFIX
) (void)
828 glue(stl
, MEMSUFFIX
)((uint64_t)T0
, T1
);
833 void OPPROTO
glue(op_eciwx_le
, MEMSUFFIX
) (void)
835 T1
= glue(ld32r
, MEMSUFFIX
)((uint32_t)T0
);
839 #if defined(TARGET_PPC64)
840 void OPPROTO
glue(op_eciwx_le_64
, MEMSUFFIX
) (void)
842 T1
= glue(ld32r
, MEMSUFFIX
)((uint64_t)T0
);
847 void OPPROTO
glue(op_ecowx_le
, MEMSUFFIX
) (void)
849 glue(st32r
, MEMSUFFIX
)((uint32_t)T0
, T1
);
853 #if defined(TARGET_PPC64)
854 void OPPROTO
glue(op_ecowx_le_64
, MEMSUFFIX
) (void)
856 glue(st32r
, MEMSUFFIX
)((uint64_t)T0
, T1
);
861 /* XXX: those micro-ops need tests ! */
862 /* PowerPC 601 specific instructions (POWER bridge) */
863 void OPPROTO
glue(op_POWER_lscbx
, MEMSUFFIX
) (void)
865 /* When byte count is 0, do nothing */
866 if (likely(T1
!= 0)) {
867 glue(do_POWER_lscbx
, MEMSUFFIX
)(PARAM1
, PARAM2
, PARAM3
);
872 /* POWER2 quad load and store */
873 /* XXX: TAGs are not managed */
874 void OPPROTO
glue(op_POWER2_lfq
, MEMSUFFIX
) (void)
876 glue(do_POWER2_lfq
, MEMSUFFIX
)();
880 void glue(op_POWER2_lfq_le
, MEMSUFFIX
) (void)
882 glue(do_POWER2_lfq_le
, MEMSUFFIX
)();
886 void OPPROTO
glue(op_POWER2_stfq
, MEMSUFFIX
) (void)
888 glue(do_POWER2_stfq
, MEMSUFFIX
)();
892 void OPPROTO
glue(op_POWER2_stfq_le
, MEMSUFFIX
) (void)
894 glue(do_POWER2_stfq_le
, MEMSUFFIX
)();
898 #if defined(TARGET_PPCSPE)
900 #define _PPC_SPE_LD_OP(name, op) \
901 void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void) \
903 T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0); \
907 #if defined(TARGET_PPC64)
908 #define _PPC_SPE_LD_OP_64(name, op) \
909 void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void) \
911 T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0); \
914 #define PPC_SPE_LD_OP(name, op) \
915 _PPC_SPE_LD_OP(name, op); \
916 _PPC_SPE_LD_OP_64(name, op)
918 #define PPC_SPE_LD_OP(name, op) \
919 _PPC_SPE_LD_OP(name, op)
923 #define _PPC_SPE_ST_OP(name, op) \
924 void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void) \
926 glue(op, MEMSUFFIX)((uint32_t)T0, T1_64); \
930 #if defined(TARGET_PPC64)
931 #define _PPC_SPE_ST_OP_64(name, op) \
932 void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void) \
934 glue(op, MEMSUFFIX)((uint64_t)T0, T1_64); \
937 #define PPC_SPE_ST_OP(name, op) \
938 _PPC_SPE_ST_OP(name, op); \
939 _PPC_SPE_ST_OP_64(name, op)
941 #define PPC_SPE_ST_OP(name, op) \
942 _PPC_SPE_ST_OP(name, op)
945 #if !defined(TARGET_PPC64)
946 PPC_SPE_LD_OP(dd
, ldq
);
947 PPC_SPE_ST_OP(dd
, stq
);
948 PPC_SPE_LD_OP(dd_le
, ld64r
);
949 PPC_SPE_ST_OP(dd_le
, st64r
);
951 static inline uint64_t glue(spe_ldw
, MEMSUFFIX
) (target_ulong EA
)
954 ret
= (uint64_t)glue(ldl
, MEMSUFFIX
)(EA
) << 32;
955 ret
|= (uint64_t)glue(ldl
, MEMSUFFIX
)(EA
+ 4);
958 PPC_SPE_LD_OP(dw
, spe_ldw
);
959 static inline void glue(spe_stdw
, MEMSUFFIX
) (target_ulong EA
, uint64_t data
)
961 glue(stl
, MEMSUFFIX
)(EA
, data
>> 32);
962 glue(stl
, MEMSUFFIX
)(EA
+ 4, data
);
964 PPC_SPE_ST_OP(dw
, spe_stdw
);
965 static inline uint64_t glue(spe_ldw_le
, MEMSUFFIX
) (target_ulong EA
)
968 ret
= (uint64_t)glue(ld32r
, MEMSUFFIX
)(EA
) << 32;
969 ret
|= (uint64_t)glue(ld32r
, MEMSUFFIX
)(EA
+ 4);
972 PPC_SPE_LD_OP(dw_le
, spe_ldw_le
);
973 static inline void glue(spe_stdw_le
, MEMSUFFIX
) (target_ulong EA
,
976 glue(st32r
, MEMSUFFIX
)(EA
, data
>> 32);
977 glue(st32r
, MEMSUFFIX
)(EA
+ 4, data
);
979 PPC_SPE_ST_OP(dw_le
, spe_stdw_le
);
980 static inline uint64_t glue(spe_ldh
, MEMSUFFIX
) (target_ulong EA
)
983 ret
= (uint64_t)glue(lduw
, MEMSUFFIX
)(EA
) << 48;
984 ret
|= (uint64_t)glue(lduw
, MEMSUFFIX
)(EA
+ 2) << 32;
985 ret
|= (uint64_t)glue(lduw
, MEMSUFFIX
)(EA
+ 4) << 16;
986 ret
|= (uint64_t)glue(lduw
, MEMSUFFIX
)(EA
+ 6);
989 PPC_SPE_LD_OP(dh
, spe_ldh
);
990 static inline void glue(spe_stdh
, MEMSUFFIX
) (target_ulong EA
, uint64_t data
)
992 glue(stw
, MEMSUFFIX
)(EA
, data
>> 48);
993 glue(stw
, MEMSUFFIX
)(EA
+ 2, data
>> 32);
994 glue(stw
, MEMSUFFIX
)(EA
+ 4, data
>> 16);
995 glue(stw
, MEMSUFFIX
)(EA
+ 6, data
);
997 PPC_SPE_ST_OP(dh
, spe_stdh
);
998 static inline uint64_t glue(spe_ldh_le
, MEMSUFFIX
) (target_ulong EA
)
1001 ret
= (uint64_t)glue(ld16r
, MEMSUFFIX
)(EA
) << 48;
1002 ret
|= (uint64_t)glue(ld16r
, MEMSUFFIX
)(EA
+ 2) << 32;
1003 ret
|= (uint64_t)glue(ld16r
, MEMSUFFIX
)(EA
+ 4) << 16;
1004 ret
|= (uint64_t)glue(ld16r
, MEMSUFFIX
)(EA
+ 6);
1007 PPC_SPE_LD_OP(dh_le
, spe_ldh_le
);
1008 static inline void glue(spe_stdh_le
, MEMSUFFIX
) (target_ulong EA
,
1011 glue(st16r
, MEMSUFFIX
)(EA
, data
>> 48);
1012 glue(st16r
, MEMSUFFIX
)(EA
+ 2, data
>> 32);
1013 glue(st16r
, MEMSUFFIX
)(EA
+ 4, data
>> 16);
1014 glue(st16r
, MEMSUFFIX
)(EA
+ 6, data
);
1016 PPC_SPE_ST_OP(dh_le
, spe_stdh_le
);
1017 static inline uint64_t glue(spe_lwhe
, MEMSUFFIX
) (target_ulong EA
)
1020 ret
= (uint64_t)glue(lduw
, MEMSUFFIX
)(EA
) << 48;
1021 ret
|= (uint64_t)glue(lduw
, MEMSUFFIX
)(EA
+ 2) << 16;
1024 PPC_SPE_LD_OP(whe
, spe_lwhe
);
1025 static inline void glue(spe_stwhe
, MEMSUFFIX
) (target_ulong EA
, uint64_t data
)
1027 glue(stw
, MEMSUFFIX
)(EA
, data
>> 48);
1028 glue(stw
, MEMSUFFIX
)(EA
+ 2, data
>> 16);
1030 PPC_SPE_ST_OP(whe
, spe_stwhe
);
1031 static inline uint64_t glue(spe_lwhe_le
, MEMSUFFIX
) (target_ulong EA
)
1034 ret
= (uint64_t)glue(ld16r
, MEMSUFFIX
)(EA
) << 48;
1035 ret
|= (uint64_t)glue(ld16r
, MEMSUFFIX
)(EA
+ 2) << 16;
1038 PPC_SPE_LD_OP(whe_le
, spe_lwhe_le
);
1039 static inline void glue(spe_stwhe_le
, MEMSUFFIX
) (target_ulong EA
,
1042 glue(st16r
, MEMSUFFIX
)(EA
, data
>> 48);
1043 glue(st16r
, MEMSUFFIX
)(EA
+ 2, data
>> 16);
1045 PPC_SPE_ST_OP(whe_le
, spe_stwhe_le
);
1046 static inline uint64_t glue(spe_lwhou
, MEMSUFFIX
) (target_ulong EA
)
1049 ret
= (uint64_t)glue(lduw
, MEMSUFFIX
)(EA
) << 32;
1050 ret
|= (uint64_t)glue(lduw
, MEMSUFFIX
)(EA
+ 2);
1053 PPC_SPE_LD_OP(whou
, spe_lwhou
);
1054 static inline uint64_t glue(spe_lwhos
, MEMSUFFIX
) (target_ulong EA
)
1057 ret
= ((uint64_t)((int32_t)glue(ldsw
, MEMSUFFIX
)(EA
))) << 32;
1058 ret
|= (uint64_t)((int32_t)glue(ldsw
, MEMSUFFIX
)(EA
+ 2));
1061 PPC_SPE_LD_OP(whos
, spe_lwhos
);
1062 static inline void glue(spe_stwho
, MEMSUFFIX
) (target_ulong EA
, uint64_t data
)
1064 glue(stw
, MEMSUFFIX
)(EA
, data
>> 32);
1065 glue(stw
, MEMSUFFIX
)(EA
+ 2, data
);
1067 PPC_SPE_ST_OP(who
, spe_stwho
);
1068 static inline uint64_t glue(spe_lwhou_le
, MEMSUFFIX
) (target_ulong EA
)
1071 ret
= (uint64_t)glue(ld16r
, MEMSUFFIX
)(EA
) << 32;
1072 ret
|= (uint64_t)glue(ld16r
, MEMSUFFIX
)(EA
+ 2);
1075 PPC_SPE_LD_OP(whou_le
, spe_lwhou_le
);
1076 static inline uint64_t glue(spe_lwhos_le
, MEMSUFFIX
) (target_ulong EA
)
1079 ret
= ((uint64_t)((int32_t)glue(ld16rs
, MEMSUFFIX
)(EA
))) << 32;
1080 ret
|= (uint64_t)((int32_t)glue(ld16rs
, MEMSUFFIX
)(EA
+ 2));
1083 PPC_SPE_LD_OP(whos_le
, spe_lwhos_le
);
1084 static inline void glue(spe_stwho_le
, MEMSUFFIX
) (target_ulong EA
,
1087 glue(st16r
, MEMSUFFIX
)(EA
, data
>> 32);
1088 glue(st16r
, MEMSUFFIX
)(EA
+ 2, data
);
1090 PPC_SPE_ST_OP(who_le
, spe_stwho_le
);
1091 #if !defined(TARGET_PPC64)
1092 static inline void glue(spe_stwwo
, MEMSUFFIX
) (target_ulong EA
, uint64_t data
)
1094 glue(stl
, MEMSUFFIX
)(EA
, data
);
1096 PPC_SPE_ST_OP(wwo
, spe_stwwo
);
1097 static inline void glue(spe_stwwo_le
, MEMSUFFIX
) (target_ulong EA
,
1100 glue(st32r
, MEMSUFFIX
)(EA
, data
);
1102 PPC_SPE_ST_OP(wwo_le
, spe_stwwo_le
);
1104 static inline uint64_t glue(spe_lh
, MEMSUFFIX
) (target_ulong EA
)
1107 tmp
= glue(lduw
, MEMSUFFIX
)(EA
);
1108 return ((uint64_t)tmp
<< 48) | ((uint64_t)tmp
<< 16);
1110 PPC_SPE_LD_OP(h
, spe_lh
);
1111 static inline uint64_t glue(spe_lh_le
, MEMSUFFIX
) (target_ulong EA
)
1114 tmp
= glue(ld16r
, MEMSUFFIX
)(EA
);
1115 return ((uint64_t)tmp
<< 48) | ((uint64_t)tmp
<< 16);
1117 PPC_SPE_LD_OP(h_le
, spe_lh_le
);
1118 static inline uint64_t glue(spe_lwwsplat
, MEMSUFFIX
) (target_ulong EA
)
1121 tmp
= glue(ldl
, MEMSUFFIX
)(EA
);
1122 return ((uint64_t)tmp
<< 32) | (uint64_t)tmp
;
1124 PPC_SPE_LD_OP(wwsplat
, spe_lwwsplat
);
1125 static inline uint64_t glue(spe_lwwsplat_le
, MEMSUFFIX
) (target_ulong EA
)
1128 tmp
= glue(ld32r
, MEMSUFFIX
)(EA
);
1129 return ((uint64_t)tmp
<< 32) | (uint64_t)tmp
;
1131 PPC_SPE_LD_OP(wwsplat_le
, spe_lwwsplat_le
);
1132 static inline uint64_t glue(spe_lwhsplat
, MEMSUFFIX
) (target_ulong EA
)
1136 tmp
= glue(lduw
, MEMSUFFIX
)(EA
);
1137 ret
= ((uint64_t)tmp
<< 48) | ((uint64_t)tmp
<< 32);
1138 tmp
= glue(lduw
, MEMSUFFIX
)(EA
+ 2);
1139 ret
|= ((uint64_t)tmp
<< 16) | (uint64_t)tmp
;
1142 PPC_SPE_LD_OP(whsplat
, spe_lwhsplat
);
1143 static inline uint64_t glue(spe_lwhsplat_le
, MEMSUFFIX
) (target_ulong EA
)
1147 tmp
= glue(ld16r
, MEMSUFFIX
)(EA
);
1148 ret
= ((uint64_t)tmp
<< 48) | ((uint64_t)tmp
<< 32);
1149 tmp
= glue(ld16r
, MEMSUFFIX
)(EA
+ 2);
1150 ret
|= ((uint64_t)tmp
<< 16) | (uint64_t)tmp
;
1153 PPC_SPE_LD_OP(whsplat_le
, spe_lwhsplat_le
);
1154 #endif /* defined(TARGET_PPCSPE) */