2 * PowerPC emulation micro-operations for qemu.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 static inline uint16_t glue(ld16r
, MEMSUFFIX
) (target_ulong EA
)
23 uint16_t tmp
= glue(lduw
, MEMSUFFIX
)(EA
);
24 return ((tmp
& 0xFF00) >> 8) | ((tmp
& 0x00FF) << 8);
27 static inline int32_t glue(ld16rs
, MEMSUFFIX
) (target_ulong EA
)
29 int16_t tmp
= glue(lduw
, MEMSUFFIX
)(EA
);
30 return (int16_t)((tmp
& 0xFF00) >> 8) | ((tmp
& 0x00FF) << 8);
33 static inline uint32_t glue(ld32r
, MEMSUFFIX
) (target_ulong EA
)
35 uint32_t tmp
= glue(ldl
, MEMSUFFIX
)(EA
);
36 return ((tmp
& 0xFF000000) >> 24) | ((tmp
& 0x00FF0000) >> 8) |
37 ((tmp
& 0x0000FF00) << 8) | ((tmp
& 0x000000FF) << 24);
40 #if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
41 static inline uint64_t glue(ld64r
, MEMSUFFIX
) (target_ulong EA
)
43 uint64_t tmp
= glue(ldq
, MEMSUFFIX
)(EA
);
44 return ((tmp
& 0xFF00000000000000ULL
) >> 56) |
45 ((tmp
& 0x00FF000000000000ULL
) >> 40) |
46 ((tmp
& 0x0000FF0000000000ULL
) >> 24) |
47 ((tmp
& 0x000000FF00000000ULL
) >> 8) |
48 ((tmp
& 0x00000000FF000000ULL
) << 8) |
49 ((tmp
& 0x0000000000FF0000ULL
) << 24) |
50 ((tmp
& 0x000000000000FF00ULL
) << 40) |
51 ((tmp
& 0x00000000000000FFULL
) << 54);
55 #if defined(TARGET_PPC64)
56 static inline int64_t glue(ldsl
, MEMSUFFIX
) (target_ulong EA
)
58 return (int32_t)glue(ldl
, MEMSUFFIX
)(EA
);
61 static inline int64_t glue(ld32rs
, MEMSUFFIX
) (target_ulong EA
)
63 uint32_t tmp
= glue(ldl
, MEMSUFFIX
)(EA
);
64 return (int32_t)((tmp
& 0xFF000000) >> 24) | ((tmp
& 0x00FF0000) >> 8) |
65 ((tmp
& 0x0000FF00) << 8) | ((tmp
& 0x000000FF) << 24);
69 static inline void glue(st16r
, MEMSUFFIX
) (target_ulong EA
, uint16_t data
)
71 uint16_t tmp
= ((data
& 0xFF00) >> 8) | ((data
& 0x00FF) << 8);
72 glue(stw
, MEMSUFFIX
)(EA
, tmp
);
75 static inline void glue(st32r
, MEMSUFFIX
) (target_ulong EA
, uint32_t data
)
77 uint32_t tmp
= ((data
& 0xFF000000) >> 24) | ((data
& 0x00FF0000) >> 8) |
78 ((data
& 0x0000FF00) << 8) | ((data
& 0x000000FF) << 24);
79 glue(stl
, MEMSUFFIX
)(EA
, tmp
);
82 #if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
83 static inline void glue(st64r
, MEMSUFFIX
) (target_ulong EA
, uint64_t data
)
85 uint64_t tmp
= ((data
& 0xFF00000000000000ULL
) >> 56) |
86 ((data
& 0x00FF000000000000ULL
) >> 40) |
87 ((data
& 0x0000FF0000000000ULL
) >> 24) |
88 ((data
& 0x000000FF00000000ULL
) >> 8) |
89 ((data
& 0x00000000FF000000ULL
) << 8) |
90 ((data
& 0x0000000000FF0000ULL
) << 24) |
91 ((data
& 0x000000000000FF00ULL
) << 40) |
92 ((data
& 0x00000000000000FFULL
) << 56);
93 glue(stq
, MEMSUFFIX
)(EA
, tmp
);
97 /*** Integer load ***/
98 #define PPC_LD_OP(name, op) \
99 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
101 T1 = glue(op, MEMSUFFIX)((uint32_t)T0); \
105 #if defined(TARGET_PPC64)
106 #define PPC_LD_OP_64(name, op) \
107 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
109 T1 = glue(op, MEMSUFFIX)((uint64_t)T0); \
114 #define PPC_ST_OP(name, op) \
115 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
117 glue(op, MEMSUFFIX)((uint32_t)T0, T1); \
121 #if defined(TARGET_PPC64)
122 #define PPC_ST_OP_64(name, op) \
123 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
125 glue(op, MEMSUFFIX)((uint64_t)T0, T1); \
134 #if defined(TARGET_PPC64)
137 PPC_LD_OP_64(d
, ldq
);
138 PPC_LD_OP_64(wa
, ldsl
);
139 PPC_LD_OP_64(bz
, ldub
);
140 PPC_LD_OP_64(ha
, ldsw
);
141 PPC_LD_OP_64(hz
, lduw
);
142 PPC_LD_OP_64(wz
, ldl
);
145 PPC_LD_OP(ha_le
, ld16rs
);
146 PPC_LD_OP(hz_le
, ld16r
);
147 PPC_LD_OP(wz_le
, ld32r
);
148 #if defined(TARGET_PPC64)
149 PPC_LD_OP(d_le
, ld64r
);
150 PPC_LD_OP(wa_le
, ld32rs
);
151 PPC_LD_OP_64(d_le
, ld64r
);
152 PPC_LD_OP_64(wa_le
, ld32rs
);
153 PPC_LD_OP_64(ha_le
, ld16rs
);
154 PPC_LD_OP_64(hz_le
, ld16r
);
155 PPC_LD_OP_64(wz_le
, ld32r
);
158 /*** Integer store ***/
162 #if defined(TARGET_PPC64)
164 PPC_ST_OP_64(d
, stq
);
165 PPC_ST_OP_64(b
, stb
);
166 PPC_ST_OP_64(h
, stw
);
167 PPC_ST_OP_64(w
, stl
);
170 PPC_ST_OP(h_le
, st16r
);
171 PPC_ST_OP(w_le
, st32r
);
172 #if defined(TARGET_PPC64)
173 PPC_ST_OP(d_le
, st64r
);
174 PPC_ST_OP_64(d_le
, st64r
);
175 PPC_ST_OP_64(h_le
, st16r
);
176 PPC_ST_OP_64(w_le
, st32r
);
179 /*** Integer load and store with byte reverse ***/
180 PPC_LD_OP(hbr
, ld16r
);
181 PPC_LD_OP(wbr
, ld32r
);
182 PPC_ST_OP(hbr
, st16r
);
183 PPC_ST_OP(wbr
, st32r
);
184 #if defined(TARGET_PPC64)
185 PPC_LD_OP_64(hbr
, ld16r
);
186 PPC_LD_OP_64(wbr
, ld32r
);
187 PPC_ST_OP_64(hbr
, st16r
);
188 PPC_ST_OP_64(wbr
, st32r
);
191 PPC_LD_OP(hbr_le
, lduw
);
192 PPC_LD_OP(wbr_le
, ldl
);
193 PPC_ST_OP(hbr_le
, stw
);
194 PPC_ST_OP(wbr_le
, stl
);
195 #if defined(TARGET_PPC64)
196 PPC_LD_OP_64(hbr_le
, lduw
);
197 PPC_LD_OP_64(wbr_le
, ldl
);
198 PPC_ST_OP_64(hbr_le
, stw
);
199 PPC_ST_OP_64(wbr_le
, stl
);
202 /*** Integer load and store multiple ***/
203 void OPPROTO
glue(op_lmw
, MEMSUFFIX
) (void)
205 glue(do_lmw
, MEMSUFFIX
)(PARAM1
);
209 #if defined(TARGET_PPC64)
210 void OPPROTO
glue(op_lmw_64
, MEMSUFFIX
) (void)
212 glue(do_lmw_64
, MEMSUFFIX
)(PARAM1
);
217 void OPPROTO
glue(op_lmw_le
, MEMSUFFIX
) (void)
219 glue(do_lmw_le
, MEMSUFFIX
)(PARAM1
);
223 #if defined(TARGET_PPC64)
224 void OPPROTO
glue(op_lmw_le_64
, MEMSUFFIX
) (void)
226 glue(do_lmw_le_64
, MEMSUFFIX
)(PARAM1
);
231 void OPPROTO
glue(op_stmw
, MEMSUFFIX
) (void)
233 glue(do_stmw
, MEMSUFFIX
)(PARAM1
);
237 #if defined(TARGET_PPC64)
238 void OPPROTO
glue(op_stmw_64
, MEMSUFFIX
) (void)
240 glue(do_stmw_64
, MEMSUFFIX
)(PARAM1
);
245 void OPPROTO
glue(op_stmw_le
, MEMSUFFIX
) (void)
247 glue(do_stmw_le
, MEMSUFFIX
)(PARAM1
);
251 #if defined(TARGET_PPC64)
252 void OPPROTO
glue(op_stmw_le_64
, MEMSUFFIX
) (void)
254 glue(do_stmw_le_64
, MEMSUFFIX
)(PARAM1
);
259 /*** Integer load and store strings ***/
260 void OPPROTO
glue(op_lswi
, MEMSUFFIX
) (void)
262 glue(do_lsw
, MEMSUFFIX
)(PARAM1
);
266 #if defined(TARGET_PPC64)
267 void OPPROTO
glue(op_lswi_64
, MEMSUFFIX
) (void)
269 glue(do_lsw_64
, MEMSUFFIX
)(PARAM1
);
274 void OPPROTO
glue(op_lswi_le
, MEMSUFFIX
) (void)
276 glue(do_lsw_le
, MEMSUFFIX
)(PARAM1
);
280 #if defined(TARGET_PPC64)
281 void OPPROTO
glue(op_lswi_le_64
, MEMSUFFIX
) (void)
283 glue(do_lsw_le_64
, MEMSUFFIX
)(PARAM1
);
288 /* PPC32 specification says we must generate an exception if
289 * rA is in the range of registers to be loaded.
290 * In an other hand, IBM says this is valid, but rA won't be loaded.
291 * For now, I'll follow the spec...
293 void OPPROTO
glue(op_lswx
, MEMSUFFIX
) (void)
295 /* Note: T1 comes from xer_bc then no cast is needed */
296 if (likely(T1
!= 0)) {
297 if (unlikely((PARAM1
< PARAM2
&& (PARAM1
+ T1
) > PARAM2
) ||
298 (PARAM1
< PARAM3
&& (PARAM1
+ T1
) > PARAM3
))) {
299 do_raise_exception_err(POWERPC_EXCP_PROGRAM
,
301 POWERPC_EXCP_INVAL_LSWX
);
303 glue(do_lsw
, MEMSUFFIX
)(PARAM1
);
309 #if defined(TARGET_PPC64)
310 void OPPROTO
glue(op_lswx_64
, MEMSUFFIX
) (void)
312 /* Note: T1 comes from xer_bc then no cast is needed */
313 if (likely(T1
!= 0)) {
314 if (unlikely((PARAM1
< PARAM2
&& (PARAM1
+ T1
) > PARAM2
) ||
315 (PARAM1
< PARAM3
&& (PARAM1
+ T1
) > PARAM3
))) {
316 do_raise_exception_err(POWERPC_EXCP_PROGRAM
,
318 POWERPC_EXCP_INVAL_LSWX
);
320 glue(do_lsw_64
, MEMSUFFIX
)(PARAM1
);
327 void OPPROTO
glue(op_lswx_le
, MEMSUFFIX
) (void)
329 /* Note: T1 comes from xer_bc then no cast is needed */
330 if (likely(T1
!= 0)) {
331 if (unlikely((PARAM1
< PARAM2
&& (PARAM1
+ T1
) > PARAM2
) ||
332 (PARAM1
< PARAM3
&& (PARAM1
+ T1
) > PARAM3
))) {
333 do_raise_exception_err(POWERPC_EXCP_PROGRAM
,
335 POWERPC_EXCP_INVAL_LSWX
);
337 glue(do_lsw_le
, MEMSUFFIX
)(PARAM1
);
343 #if defined(TARGET_PPC64)
344 void OPPROTO
glue(op_lswx_le_64
, MEMSUFFIX
) (void)
346 /* Note: T1 comes from xer_bc then no cast is needed */
347 if (likely(T1
!= 0)) {
348 if (unlikely((PARAM1
< PARAM2
&& (PARAM1
+ T1
) > PARAM2
) ||
349 (PARAM1
< PARAM3
&& (PARAM1
+ T1
) > PARAM3
))) {
350 do_raise_exception_err(POWERPC_EXCP_PROGRAM
,
352 POWERPC_EXCP_INVAL_LSWX
);
354 glue(do_lsw_le_64
, MEMSUFFIX
)(PARAM1
);
361 void OPPROTO
glue(op_stsw
, MEMSUFFIX
) (void)
363 glue(do_stsw
, MEMSUFFIX
)(PARAM1
);
367 #if defined(TARGET_PPC64)
368 void OPPROTO
glue(op_stsw_64
, MEMSUFFIX
) (void)
370 glue(do_stsw_64
, MEMSUFFIX
)(PARAM1
);
375 void OPPROTO
glue(op_stsw_le
, MEMSUFFIX
) (void)
377 glue(do_stsw_le
, MEMSUFFIX
)(PARAM1
);
381 #if defined(TARGET_PPC64)
382 void OPPROTO
glue(op_stsw_le_64
, MEMSUFFIX
) (void)
384 glue(do_stsw_le_64
, MEMSUFFIX
)(PARAM1
);
389 /*** Floating-point store ***/
390 #define PPC_STF_OP(name, op) \
391 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
393 glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \
397 #if defined(TARGET_PPC64)
398 #define PPC_STF_OP_64(name, op) \
399 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
401 glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \
406 static inline void glue(stfs
, MEMSUFFIX
) (target_ulong EA
, double d
)
408 glue(stfl
, MEMSUFFIX
)(EA
, float64_to_float32(d
, &env
->fp_status
));
411 static inline void glue(stfiwx
, MEMSUFFIX
) (target_ulong EA
, double d
)
418 /* Store the low order 32 bits without any conversion */
420 glue(stl
, MEMSUFFIX
)(EA
, u
.u
);
423 PPC_STF_OP(fd
, stfq
);
424 PPC_STF_OP(fs
, stfs
);
425 PPC_STF_OP(fiwx
, stfiwx
);
426 #if defined(TARGET_PPC64)
427 PPC_STF_OP_64(fd
, stfq
);
428 PPC_STF_OP_64(fs
, stfs
);
429 PPC_STF_OP_64(fiwx
, stfiwx
);
432 static inline void glue(stfqr
, MEMSUFFIX
) (target_ulong EA
, double d
)
440 u
.u
= ((u
.u
& 0xFF00000000000000ULL
) >> 56) |
441 ((u
.u
& 0x00FF000000000000ULL
) >> 40) |
442 ((u
.u
& 0x0000FF0000000000ULL
) >> 24) |
443 ((u
.u
& 0x000000FF00000000ULL
) >> 8) |
444 ((u
.u
& 0x00000000FF000000ULL
) << 8) |
445 ((u
.u
& 0x0000000000FF0000ULL
) << 24) |
446 ((u
.u
& 0x000000000000FF00ULL
) << 40) |
447 ((u
.u
& 0x00000000000000FFULL
) << 56);
448 glue(stfq
, MEMSUFFIX
)(EA
, u
.d
);
451 static inline void glue(stfsr
, MEMSUFFIX
) (target_ulong EA
, double d
)
458 u
.f
= float64_to_float32(d
, &env
->fp_status
);
459 u
.u
= ((u
.u
& 0xFF000000UL
) >> 24) |
460 ((u
.u
& 0x00FF0000ULL
) >> 8) |
461 ((u
.u
& 0x0000FF00UL
) << 8) |
462 ((u
.u
& 0x000000FFULL
) << 24);
463 glue(stfl
, MEMSUFFIX
)(EA
, u
.f
);
466 static inline void glue(stfiwxr
, MEMSUFFIX
) (target_ulong EA
, double d
)
473 /* Store the low order 32 bits without any conversion */
475 u
.u
= ((u
.u
& 0xFF000000UL
) >> 24) |
476 ((u
.u
& 0x00FF0000ULL
) >> 8) |
477 ((u
.u
& 0x0000FF00UL
) << 8) |
478 ((u
.u
& 0x000000FFULL
) << 24);
479 glue(stl
, MEMSUFFIX
)(EA
, u
.u
);
483 PPC_STF_OP(fd_le
, stfqr
);
484 PPC_STF_OP(fs_le
, stfsr
);
485 PPC_STF_OP(fiwx_le
, stfiwxr
);
486 #if defined(TARGET_PPC64)
487 PPC_STF_OP_64(fd_le
, stfqr
);
488 PPC_STF_OP_64(fs_le
, stfsr
);
489 PPC_STF_OP_64(fiwx_le
, stfiwxr
);
492 /*** Floating-point load ***/
493 #define PPC_LDF_OP(name, op) \
494 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
496 FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \
500 #if defined(TARGET_PPC64)
501 #define PPC_LDF_OP_64(name, op) \
502 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
504 FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \
509 static inline double glue(ldfs
, MEMSUFFIX
) (target_ulong EA
)
511 return float32_to_float64(glue(ldfl
, MEMSUFFIX
)(EA
), &env
->fp_status
);
514 PPC_LDF_OP(fd
, ldfq
);
515 PPC_LDF_OP(fs
, ldfs
);
516 #if defined(TARGET_PPC64)
517 PPC_LDF_OP_64(fd
, ldfq
);
518 PPC_LDF_OP_64(fs
, ldfs
);
521 static inline double glue(ldfqr
, MEMSUFFIX
) (target_ulong EA
)
528 u
.d
= glue(ldfq
, MEMSUFFIX
)(EA
);
529 u
.u
= ((u
.u
& 0xFF00000000000000ULL
) >> 56) |
530 ((u
.u
& 0x00FF000000000000ULL
) >> 40) |
531 ((u
.u
& 0x0000FF0000000000ULL
) >> 24) |
532 ((u
.u
& 0x000000FF00000000ULL
) >> 8) |
533 ((u
.u
& 0x00000000FF000000ULL
) << 8) |
534 ((u
.u
& 0x0000000000FF0000ULL
) << 24) |
535 ((u
.u
& 0x000000000000FF00ULL
) << 40) |
536 ((u
.u
& 0x00000000000000FFULL
) << 56);
541 static inline double glue(ldfsr
, MEMSUFFIX
) (target_ulong EA
)
548 u
.f
= glue(ldfl
, MEMSUFFIX
)(EA
);
549 u
.u
= ((u
.u
& 0xFF000000UL
) >> 24) |
550 ((u
.u
& 0x00FF0000ULL
) >> 8) |
551 ((u
.u
& 0x0000FF00UL
) << 8) |
552 ((u
.u
& 0x000000FFULL
) << 24);
554 return float32_to_float64(u
.f
, &env
->fp_status
);
557 PPC_LDF_OP(fd_le
, ldfqr
);
558 PPC_LDF_OP(fs_le
, ldfsr
);
559 #if defined(TARGET_PPC64)
560 PPC_LDF_OP_64(fd_le
, ldfqr
);
561 PPC_LDF_OP_64(fs_le
, ldfsr
);
564 /* Load and set reservation */
565 void OPPROTO
glue(op_lwarx
, MEMSUFFIX
) (void)
567 if (unlikely(T0
& 0x03)) {
568 do_raise_exception(POWERPC_EXCP_ALIGN
);
570 T1
= glue(ldl
, MEMSUFFIX
)((uint32_t)T0
);
571 env
->reserve
= (uint32_t)T0
;
576 #if defined(TARGET_PPC64)
577 void OPPROTO
glue(op_lwarx_64
, MEMSUFFIX
) (void)
579 if (unlikely(T0
& 0x03)) {
580 do_raise_exception(POWERPC_EXCP_ALIGN
);
582 T1
= glue(ldl
, MEMSUFFIX
)((uint64_t)T0
);
583 env
->reserve
= (uint64_t)T0
;
588 void OPPROTO
glue(op_ldarx
, MEMSUFFIX
) (void)
590 if (unlikely(T0
& 0x03)) {
591 do_raise_exception(POWERPC_EXCP_ALIGN
);
593 T1
= glue(ldq
, MEMSUFFIX
)((uint32_t)T0
);
594 env
->reserve
= (uint32_t)T0
;
599 void OPPROTO
glue(op_ldarx_64
, MEMSUFFIX
) (void)
601 if (unlikely(T0
& 0x03)) {
602 do_raise_exception(POWERPC_EXCP_ALIGN
);
604 T1
= glue(ldq
, MEMSUFFIX
)((uint64_t)T0
);
605 env
->reserve
= (uint64_t)T0
;
611 void OPPROTO
glue(op_lwarx_le
, MEMSUFFIX
) (void)
613 if (unlikely(T0
& 0x03)) {
614 do_raise_exception(POWERPC_EXCP_ALIGN
);
616 T1
= glue(ld32r
, MEMSUFFIX
)((uint32_t)T0
);
617 env
->reserve
= (uint32_t)T0
;
622 #if defined(TARGET_PPC64)
623 void OPPROTO
glue(op_lwarx_le_64
, MEMSUFFIX
) (void)
625 if (unlikely(T0
& 0x03)) {
626 do_raise_exception(POWERPC_EXCP_ALIGN
);
628 T1
= glue(ld32r
, MEMSUFFIX
)((uint64_t)T0
);
629 env
->reserve
= (uint64_t)T0
;
634 void OPPROTO
glue(op_ldarx_le
, MEMSUFFIX
) (void)
636 if (unlikely(T0
& 0x03)) {
637 do_raise_exception(POWERPC_EXCP_ALIGN
);
639 T1
= glue(ld64r
, MEMSUFFIX
)((uint32_t)T0
);
640 env
->reserve
= (uint32_t)T0
;
645 void OPPROTO
glue(op_ldarx_le_64
, MEMSUFFIX
) (void)
647 if (unlikely(T0
& 0x03)) {
648 do_raise_exception(POWERPC_EXCP_ALIGN
);
650 T1
= glue(ld64r
, MEMSUFFIX
)((uint64_t)T0
);
651 env
->reserve
= (uint64_t)T0
;
657 /* Store with reservation */
658 void OPPROTO
glue(op_stwcx
, MEMSUFFIX
) (void)
660 if (unlikely(T0
& 0x03)) {
661 do_raise_exception(POWERPC_EXCP_ALIGN
);
663 if (unlikely(env
->reserve
!= (uint32_t)T0
)) {
664 env
->crf
[0] = xer_so
;
666 glue(stl
, MEMSUFFIX
)((uint32_t)T0
, T1
);
667 env
->crf
[0] = xer_so
| 0x02;
674 #if defined(TARGET_PPC64)
675 void OPPROTO
glue(op_stwcx_64
, MEMSUFFIX
) (void)
677 if (unlikely(T0
& 0x03)) {
678 do_raise_exception(POWERPC_EXCP_ALIGN
);
680 if (unlikely(env
->reserve
!= (uint64_t)T0
)) {
681 env
->crf
[0] = xer_so
;
683 glue(stl
, MEMSUFFIX
)((uint64_t)T0
, T1
);
684 env
->crf
[0] = xer_so
| 0x02;
691 void OPPROTO
glue(op_stdcx
, MEMSUFFIX
) (void)
693 if (unlikely(T0
& 0x03)) {
694 do_raise_exception(POWERPC_EXCP_ALIGN
);
696 if (unlikely(env
->reserve
!= (uint32_t)T0
)) {
697 env
->crf
[0] = xer_so
;
699 glue(stq
, MEMSUFFIX
)((uint32_t)T0
, T1
);
700 env
->crf
[0] = xer_so
| 0x02;
707 void OPPROTO
glue(op_stdcx_64
, MEMSUFFIX
) (void)
709 if (unlikely(T0
& 0x03)) {
710 do_raise_exception(POWERPC_EXCP_ALIGN
);
712 if (unlikely(env
->reserve
!= (uint64_t)T0
)) {
713 env
->crf
[0] = xer_so
;
715 glue(stq
, MEMSUFFIX
)((uint64_t)T0
, T1
);
716 env
->crf
[0] = xer_so
| 0x02;
724 void OPPROTO
glue(op_stwcx_le
, MEMSUFFIX
) (void)
726 if (unlikely(T0
& 0x03)) {
727 do_raise_exception(POWERPC_EXCP_ALIGN
);
729 if (unlikely(env
->reserve
!= (uint32_t)T0
)) {
730 env
->crf
[0] = xer_so
;
732 glue(st32r
, MEMSUFFIX
)((uint32_t)T0
, T1
);
733 env
->crf
[0] = xer_so
| 0x02;
740 #if defined(TARGET_PPC64)
741 void OPPROTO
glue(op_stwcx_le_64
, MEMSUFFIX
) (void)
743 if (unlikely(T0
& 0x03)) {
744 do_raise_exception(POWERPC_EXCP_ALIGN
);
746 if (unlikely(env
->reserve
!= (uint64_t)T0
)) {
747 env
->crf
[0] = xer_so
;
749 glue(st32r
, MEMSUFFIX
)((uint64_t)T0
, T1
);
750 env
->crf
[0] = xer_so
| 0x02;
757 void OPPROTO
glue(op_stdcx_le
, MEMSUFFIX
) (void)
759 if (unlikely(T0
& 0x03)) {
760 do_raise_exception(POWERPC_EXCP_ALIGN
);
762 if (unlikely(env
->reserve
!= (uint32_t)T0
)) {
763 env
->crf
[0] = xer_so
;
765 glue(st64r
, MEMSUFFIX
)((uint32_t)T0
, T1
);
766 env
->crf
[0] = xer_so
| 0x02;
773 void OPPROTO
glue(op_stdcx_le_64
, MEMSUFFIX
) (void)
775 if (unlikely(T0
& 0x03)) {
776 do_raise_exception(POWERPC_EXCP_ALIGN
);
778 if (unlikely(env
->reserve
!= (uint64_t)T0
)) {
779 env
->crf
[0] = xer_so
;
781 glue(st64r
, MEMSUFFIX
)((uint64_t)T0
, T1
);
782 env
->crf
[0] = xer_so
| 0x02;
790 void OPPROTO
glue(op_dcbz
, MEMSUFFIX
) (void)
792 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x00), 0);
793 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x04), 0);
794 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x08), 0);
795 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x0C), 0);
796 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x10), 0);
797 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x14), 0);
798 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x18), 0);
799 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x1C), 0);
800 #if DCACHE_LINE_SIZE == 64
801 /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
802 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x20UL
), 0);
803 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x24UL
), 0);
804 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x28UL
), 0);
805 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x2CUL
), 0);
806 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x30UL
), 0);
807 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x34UL
), 0);
808 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x38UL
), 0);
809 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x3CUL
), 0);
814 #if defined(TARGET_PPC64)
815 void OPPROTO
glue(op_dcbz_64
, MEMSUFFIX
) (void)
817 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x00), 0);
818 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x04), 0);
819 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x08), 0);
820 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x0C), 0);
821 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x10), 0);
822 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x14), 0);
823 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x18), 0);
824 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x1C), 0);
825 #if DCACHE_LINE_SIZE == 64
826 /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
827 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x20UL
), 0);
828 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x24UL
), 0);
829 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x28UL
), 0);
830 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x2CUL
), 0);
831 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x30UL
), 0);
832 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x34UL
), 0);
833 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x38UL
), 0);
834 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x3CUL
), 0);
840 /* Instruction cache block invalidate */
841 void OPPROTO
glue(op_icbi
, MEMSUFFIX
) (void)
843 glue(do_icbi
, MEMSUFFIX
)();
847 #if defined(TARGET_PPC64)
848 void OPPROTO
glue(op_icbi_64
, MEMSUFFIX
) (void)
850 glue(do_icbi_64
, MEMSUFFIX
)();
855 /* External access */
856 void OPPROTO
glue(op_eciwx
, MEMSUFFIX
) (void)
858 T1
= glue(ldl
, MEMSUFFIX
)((uint32_t)T0
);
862 #if defined(TARGET_PPC64)
863 void OPPROTO
glue(op_eciwx_64
, MEMSUFFIX
) (void)
865 T1
= glue(ldl
, MEMSUFFIX
)((uint64_t)T0
);
870 void OPPROTO
glue(op_ecowx
, MEMSUFFIX
) (void)
872 glue(stl
, MEMSUFFIX
)((uint32_t)T0
, T1
);
876 #if defined(TARGET_PPC64)
877 void OPPROTO
glue(op_ecowx_64
, MEMSUFFIX
) (void)
879 glue(stl
, MEMSUFFIX
)((uint64_t)T0
, T1
);
884 void OPPROTO
glue(op_eciwx_le
, MEMSUFFIX
) (void)
886 T1
= glue(ld32r
, MEMSUFFIX
)((uint32_t)T0
);
890 #if defined(TARGET_PPC64)
891 void OPPROTO
glue(op_eciwx_le_64
, MEMSUFFIX
) (void)
893 T1
= glue(ld32r
, MEMSUFFIX
)((uint64_t)T0
);
898 void OPPROTO
glue(op_ecowx_le
, MEMSUFFIX
) (void)
900 glue(st32r
, MEMSUFFIX
)((uint32_t)T0
, T1
);
904 #if defined(TARGET_PPC64)
905 void OPPROTO
glue(op_ecowx_le_64
, MEMSUFFIX
) (void)
907 glue(st32r
, MEMSUFFIX
)((uint64_t)T0
, T1
);
912 /* XXX: those micro-ops need tests ! */
913 /* PowerPC 601 specific instructions (POWER bridge) */
914 void OPPROTO
glue(op_POWER_lscbx
, MEMSUFFIX
) (void)
916 /* When byte count is 0, do nothing */
917 if (likely(T1
!= 0)) {
918 glue(do_POWER_lscbx
, MEMSUFFIX
)(PARAM1
, PARAM2
, PARAM3
);
923 /* POWER2 quad load and store */
924 /* XXX: TAGs are not managed */
925 void OPPROTO
glue(op_POWER2_lfq
, MEMSUFFIX
) (void)
927 glue(do_POWER2_lfq
, MEMSUFFIX
)();
931 void glue(op_POWER2_lfq_le
, MEMSUFFIX
) (void)
933 glue(do_POWER2_lfq_le
, MEMSUFFIX
)();
937 void OPPROTO
glue(op_POWER2_stfq
, MEMSUFFIX
) (void)
939 glue(do_POWER2_stfq
, MEMSUFFIX
)();
943 void OPPROTO
glue(op_POWER2_stfq_le
, MEMSUFFIX
) (void)
945 glue(do_POWER2_stfq_le
, MEMSUFFIX
)();
949 #if defined(TARGET_PPCEMB)
951 #define _PPC_SPE_LD_OP(name, op) \
952 void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void) \
954 T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0); \
958 #if defined(TARGET_PPC64)
959 #define _PPC_SPE_LD_OP_64(name, op) \
960 void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void) \
962 T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0); \
965 #define PPC_SPE_LD_OP(name, op) \
966 _PPC_SPE_LD_OP(name, op); \
967 _PPC_SPE_LD_OP_64(name, op)
969 #define PPC_SPE_LD_OP(name, op) \
970 _PPC_SPE_LD_OP(name, op)
974 #define _PPC_SPE_ST_OP(name, op) \
975 void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void) \
977 glue(op, MEMSUFFIX)((uint32_t)T0, T1_64); \
981 #if defined(TARGET_PPC64)
982 #define _PPC_SPE_ST_OP_64(name, op) \
983 void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void) \
985 glue(op, MEMSUFFIX)((uint64_t)T0, T1_64); \
988 #define PPC_SPE_ST_OP(name, op) \
989 _PPC_SPE_ST_OP(name, op); \
990 _PPC_SPE_ST_OP_64(name, op)
992 #define PPC_SPE_ST_OP(name, op) \
993 _PPC_SPE_ST_OP(name, op)
996 #if !defined(TARGET_PPC64)
997 PPC_SPE_LD_OP(dd
, ldq
);
998 PPC_SPE_ST_OP(dd
, stq
);
999 PPC_SPE_LD_OP(dd_le
, ld64r
);
1000 PPC_SPE_ST_OP(dd_le
, st64r
);
1002 static inline uint64_t glue(spe_ldw
, MEMSUFFIX
) (target_ulong EA
)
1005 ret
= (uint64_t)glue(ldl
, MEMSUFFIX
)(EA
) << 32;
1006 ret
|= (uint64_t)glue(ldl
, MEMSUFFIX
)(EA
+ 4);
1009 PPC_SPE_LD_OP(dw
, spe_ldw
);
1010 static inline void glue(spe_stdw
, MEMSUFFIX
) (target_ulong EA
, uint64_t data
)
1012 glue(stl
, MEMSUFFIX
)(EA
, data
>> 32);
1013 glue(stl
, MEMSUFFIX
)(EA
+ 4, data
);
1015 PPC_SPE_ST_OP(dw
, spe_stdw
);
1016 static inline uint64_t glue(spe_ldw_le
, MEMSUFFIX
) (target_ulong EA
)
1019 ret
= (uint64_t)glue(ld32r
, MEMSUFFIX
)(EA
) << 32;
1020 ret
|= (uint64_t)glue(ld32r
, MEMSUFFIX
)(EA
+ 4);
1023 PPC_SPE_LD_OP(dw_le
, spe_ldw_le
);
1024 static inline void glue(spe_stdw_le
, MEMSUFFIX
) (target_ulong EA
,
1027 glue(st32r
, MEMSUFFIX
)(EA
, data
>> 32);
1028 glue(st32r
, MEMSUFFIX
)(EA
+ 4, data
);
1030 PPC_SPE_ST_OP(dw_le
, spe_stdw_le
);
1031 static inline uint64_t glue(spe_ldh
, MEMSUFFIX
) (target_ulong EA
)
1034 ret
= (uint64_t)glue(lduw
, MEMSUFFIX
)(EA
) << 48;
1035 ret
|= (uint64_t)glue(lduw
, MEMSUFFIX
)(EA
+ 2) << 32;
1036 ret
|= (uint64_t)glue(lduw
, MEMSUFFIX
)(EA
+ 4) << 16;
1037 ret
|= (uint64_t)glue(lduw
, MEMSUFFIX
)(EA
+ 6);
1040 PPC_SPE_LD_OP(dh
, spe_ldh
);
1041 static inline void glue(spe_stdh
, MEMSUFFIX
) (target_ulong EA
, uint64_t data
)
1043 glue(stw
, MEMSUFFIX
)(EA
, data
>> 48);
1044 glue(stw
, MEMSUFFIX
)(EA
+ 2, data
>> 32);
1045 glue(stw
, MEMSUFFIX
)(EA
+ 4, data
>> 16);
1046 glue(stw
, MEMSUFFIX
)(EA
+ 6, data
);
1048 PPC_SPE_ST_OP(dh
, spe_stdh
);
1049 static inline uint64_t glue(spe_ldh_le
, MEMSUFFIX
) (target_ulong EA
)
1052 ret
= (uint64_t)glue(ld16r
, MEMSUFFIX
)(EA
) << 48;
1053 ret
|= (uint64_t)glue(ld16r
, MEMSUFFIX
)(EA
+ 2) << 32;
1054 ret
|= (uint64_t)glue(ld16r
, MEMSUFFIX
)(EA
+ 4) << 16;
1055 ret
|= (uint64_t)glue(ld16r
, MEMSUFFIX
)(EA
+ 6);
1058 PPC_SPE_LD_OP(dh_le
, spe_ldh_le
);
1059 static inline void glue(spe_stdh_le
, MEMSUFFIX
) (target_ulong EA
,
1062 glue(st16r
, MEMSUFFIX
)(EA
, data
>> 48);
1063 glue(st16r
, MEMSUFFIX
)(EA
+ 2, data
>> 32);
1064 glue(st16r
, MEMSUFFIX
)(EA
+ 4, data
>> 16);
1065 glue(st16r
, MEMSUFFIX
)(EA
+ 6, data
);
1067 PPC_SPE_ST_OP(dh_le
, spe_stdh_le
);
1068 static inline uint64_t glue(spe_lwhe
, MEMSUFFIX
) (target_ulong EA
)
1071 ret
= (uint64_t)glue(lduw
, MEMSUFFIX
)(EA
) << 48;
1072 ret
|= (uint64_t)glue(lduw
, MEMSUFFIX
)(EA
+ 2) << 16;
1075 PPC_SPE_LD_OP(whe
, spe_lwhe
);
1076 static inline void glue(spe_stwhe
, MEMSUFFIX
) (target_ulong EA
, uint64_t data
)
1078 glue(stw
, MEMSUFFIX
)(EA
, data
>> 48);
1079 glue(stw
, MEMSUFFIX
)(EA
+ 2, data
>> 16);
1081 PPC_SPE_ST_OP(whe
, spe_stwhe
);
1082 static inline uint64_t glue(spe_lwhe_le
, MEMSUFFIX
) (target_ulong EA
)
1085 ret
= (uint64_t)glue(ld16r
, MEMSUFFIX
)(EA
) << 48;
1086 ret
|= (uint64_t)glue(ld16r
, MEMSUFFIX
)(EA
+ 2) << 16;
1089 PPC_SPE_LD_OP(whe_le
, spe_lwhe_le
);
1090 static inline void glue(spe_stwhe_le
, MEMSUFFIX
) (target_ulong EA
,
1093 glue(st16r
, MEMSUFFIX
)(EA
, data
>> 48);
1094 glue(st16r
, MEMSUFFIX
)(EA
+ 2, data
>> 16);
1096 PPC_SPE_ST_OP(whe_le
, spe_stwhe_le
);
1097 static inline uint64_t glue(spe_lwhou
, MEMSUFFIX
) (target_ulong EA
)
1100 ret
= (uint64_t)glue(lduw
, MEMSUFFIX
)(EA
) << 32;
1101 ret
|= (uint64_t)glue(lduw
, MEMSUFFIX
)(EA
+ 2);
1104 PPC_SPE_LD_OP(whou
, spe_lwhou
);
1105 static inline uint64_t glue(spe_lwhos
, MEMSUFFIX
) (target_ulong EA
)
1108 ret
= ((uint64_t)((int32_t)glue(ldsw
, MEMSUFFIX
)(EA
))) << 32;
1109 ret
|= (uint64_t)((int32_t)glue(ldsw
, MEMSUFFIX
)(EA
+ 2));
1112 PPC_SPE_LD_OP(whos
, spe_lwhos
);
1113 static inline void glue(spe_stwho
, MEMSUFFIX
) (target_ulong EA
, uint64_t data
)
1115 glue(stw
, MEMSUFFIX
)(EA
, data
>> 32);
1116 glue(stw
, MEMSUFFIX
)(EA
+ 2, data
);
1118 PPC_SPE_ST_OP(who
, spe_stwho
);
1119 static inline uint64_t glue(spe_lwhou_le
, MEMSUFFIX
) (target_ulong EA
)
1122 ret
= (uint64_t)glue(ld16r
, MEMSUFFIX
)(EA
) << 32;
1123 ret
|= (uint64_t)glue(ld16r
, MEMSUFFIX
)(EA
+ 2);
1126 PPC_SPE_LD_OP(whou_le
, spe_lwhou_le
);
1127 static inline uint64_t glue(spe_lwhos_le
, MEMSUFFIX
) (target_ulong EA
)
1130 ret
= ((uint64_t)((int32_t)glue(ld16rs
, MEMSUFFIX
)(EA
))) << 32;
1131 ret
|= (uint64_t)((int32_t)glue(ld16rs
, MEMSUFFIX
)(EA
+ 2));
1134 PPC_SPE_LD_OP(whos_le
, spe_lwhos_le
);
1135 static inline void glue(spe_stwho_le
, MEMSUFFIX
) (target_ulong EA
,
1138 glue(st16r
, MEMSUFFIX
)(EA
, data
>> 32);
1139 glue(st16r
, MEMSUFFIX
)(EA
+ 2, data
);
1141 PPC_SPE_ST_OP(who_le
, spe_stwho_le
);
1142 #if !defined(TARGET_PPC64)
1143 static inline void glue(spe_stwwo
, MEMSUFFIX
) (target_ulong EA
, uint64_t data
)
1145 glue(stl
, MEMSUFFIX
)(EA
, data
);
1147 PPC_SPE_ST_OP(wwo
, spe_stwwo
);
1148 static inline void glue(spe_stwwo_le
, MEMSUFFIX
) (target_ulong EA
,
1151 glue(st32r
, MEMSUFFIX
)(EA
, data
);
1153 PPC_SPE_ST_OP(wwo_le
, spe_stwwo_le
);
1155 static inline uint64_t glue(spe_lh
, MEMSUFFIX
) (target_ulong EA
)
1158 tmp
= glue(lduw
, MEMSUFFIX
)(EA
);
1159 return ((uint64_t)tmp
<< 48) | ((uint64_t)tmp
<< 16);
1161 PPC_SPE_LD_OP(h
, spe_lh
);
1162 static inline uint64_t glue(spe_lh_le
, MEMSUFFIX
) (target_ulong EA
)
1165 tmp
= glue(ld16r
, MEMSUFFIX
)(EA
);
1166 return ((uint64_t)tmp
<< 48) | ((uint64_t)tmp
<< 16);
1168 PPC_SPE_LD_OP(h_le
, spe_lh_le
);
1169 static inline uint64_t glue(spe_lwwsplat
, MEMSUFFIX
) (target_ulong EA
)
1172 tmp
= glue(ldl
, MEMSUFFIX
)(EA
);
1173 return ((uint64_t)tmp
<< 32) | (uint64_t)tmp
;
1175 PPC_SPE_LD_OP(wwsplat
, spe_lwwsplat
);
1176 static inline uint64_t glue(spe_lwwsplat_le
, MEMSUFFIX
) (target_ulong EA
)
1179 tmp
= glue(ld32r
, MEMSUFFIX
)(EA
);
1180 return ((uint64_t)tmp
<< 32) | (uint64_t)tmp
;
1182 PPC_SPE_LD_OP(wwsplat_le
, spe_lwwsplat_le
);
1183 static inline uint64_t glue(spe_lwhsplat
, MEMSUFFIX
) (target_ulong EA
)
1187 tmp
= glue(lduw
, MEMSUFFIX
)(EA
);
1188 ret
= ((uint64_t)tmp
<< 48) | ((uint64_t)tmp
<< 32);
1189 tmp
= glue(lduw
, MEMSUFFIX
)(EA
+ 2);
1190 ret
|= ((uint64_t)tmp
<< 16) | (uint64_t)tmp
;
1193 PPC_SPE_LD_OP(whsplat
, spe_lwhsplat
);
1194 static inline uint64_t glue(spe_lwhsplat_le
, MEMSUFFIX
) (target_ulong EA
)
1198 tmp
= glue(ld16r
, MEMSUFFIX
)(EA
);
1199 ret
= ((uint64_t)tmp
<< 48) | ((uint64_t)tmp
<< 32);
1200 tmp
= glue(ld16r
, MEMSUFFIX
)(EA
+ 2);
1201 ret
|= ((uint64_t)tmp
<< 16) | (uint64_t)tmp
;
1204 PPC_SPE_LD_OP(whsplat_le
, spe_lwhsplat_le
);
1205 #endif /* defined(TARGET_PPCEMB) */