2 * PowerPC emulation micro-operations for qemu.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 static inline uint16_t glue(ld16r
, MEMSUFFIX
) (target_ulong EA
)
23 uint16_t tmp
= glue(lduw
, MEMSUFFIX
)(EA
);
24 return ((tmp
& 0xFF00) >> 8) | ((tmp
& 0x00FF) << 8);
27 static inline int32_t glue(ld16rs
, MEMSUFFIX
) (target_ulong EA
)
29 int16_t tmp
= glue(lduw
, MEMSUFFIX
)(EA
);
30 return (int16_t)((tmp
& 0xFF00) >> 8) | ((tmp
& 0x00FF) << 8);
33 static inline uint32_t glue(ld32r
, MEMSUFFIX
) (target_ulong EA
)
35 uint32_t tmp
= glue(ldl
, MEMSUFFIX
)(EA
);
36 return ((tmp
& 0xFF000000) >> 24) | ((tmp
& 0x00FF0000) >> 8) |
37 ((tmp
& 0x0000FF00) << 8) | ((tmp
& 0x000000FF) << 24);
40 #if defined(TARGET_PPC64)
41 static inline int64_t glue(ldsl
, MEMSUFFIX
) (target_ulong EA
)
43 return (int32_t)glue(ldl
, MEMSUFFIX
)(EA
);
46 static inline uint64_t glue(ld64r
, MEMSUFFIX
) (target_ulong EA
)
48 uint64_t tmp
= glue(ldq
, MEMSUFFIX
)(EA
);
49 return ((tmp
& 0xFF00000000000000ULL
) >> 56) |
50 ((tmp
& 0x00FF000000000000ULL
) >> 40) |
51 ((tmp
& 0x0000FF0000000000ULL
) >> 24) |
52 ((tmp
& 0x000000FF00000000ULL
) >> 8) |
53 ((tmp
& 0x00000000FF000000ULL
) << 8) |
54 ((tmp
& 0x0000000000FF0000ULL
) << 24) |
55 ((tmp
& 0x000000000000FF00ULL
) << 40) |
56 ((tmp
& 0x00000000000000FFULL
) << 54);
59 static inline int64_t glue(ld32rs
, MEMSUFFIX
) (target_ulong EA
)
61 uint32_t tmp
= glue(ldl
, MEMSUFFIX
)(EA
);
62 return (int32_t)((tmp
& 0xFF000000) >> 24) | ((tmp
& 0x00FF0000) >> 8) |
63 ((tmp
& 0x0000FF00) << 8) | ((tmp
& 0x000000FF) << 24);
67 static inline void glue(st16r
, MEMSUFFIX
) (target_ulong EA
, uint16_t data
)
69 uint16_t tmp
= ((data
& 0xFF00) >> 8) | ((data
& 0x00FF) << 8);
70 glue(stw
, MEMSUFFIX
)(EA
, tmp
);
73 static inline void glue(st32r
, MEMSUFFIX
) (target_ulong EA
, uint32_t data
)
75 uint32_t tmp
= ((data
& 0xFF000000) >> 24) | ((data
& 0x00FF0000) >> 8) |
76 ((data
& 0x0000FF00) << 8) | ((data
& 0x000000FF) << 24);
77 glue(stl
, MEMSUFFIX
)(EA
, tmp
);
80 #if defined(TARGET_PPC64)
81 static inline void glue(st64r
, MEMSUFFIX
) (target_ulong EA
, uint64_t data
)
83 uint64_t tmp
= ((data
& 0xFF00000000000000ULL
) >> 56) |
84 ((data
& 0x00FF000000000000ULL
) >> 40) |
85 ((data
& 0x0000FF0000000000ULL
) >> 24) |
86 ((data
& 0x000000FF00000000ULL
) >> 8) |
87 ((data
& 0x00000000FF000000ULL
) << 8) |
88 ((data
& 0x0000000000FF0000ULL
) << 24) |
89 ((data
& 0x000000000000FF00ULL
) << 40) |
90 ((data
& 0x00000000000000FFULL
) << 56);
91 glue(stq
, MEMSUFFIX
)(EA
, tmp
);
95 /*** Integer load ***/
96 #define PPC_LD_OP(name, op) \
97 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
99 T1 = glue(op, MEMSUFFIX)((uint32_t)T0); \
103 #if defined(TARGET_PPC64)
104 #define PPC_LD_OP_64(name, op) \
105 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
107 T1 = glue(op, MEMSUFFIX)((uint64_t)T0); \
112 #define PPC_ST_OP(name, op) \
113 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
115 glue(op, MEMSUFFIX)((uint32_t)T0, T1); \
119 #if defined(TARGET_PPC64)
120 #define PPC_ST_OP_64(name, op) \
121 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
123 glue(op, MEMSUFFIX)((uint64_t)T0, T1); \
132 #if defined(TARGET_PPC64)
135 PPC_LD_OP_64(d
, ldq
);
136 PPC_LD_OP_64(wa
, ldsl
);
137 PPC_LD_OP_64(bz
, ldub
);
138 PPC_LD_OP_64(ha
, ldsw
);
139 PPC_LD_OP_64(hz
, lduw
);
140 PPC_LD_OP_64(wz
, ldl
);
143 PPC_LD_OP(ha_le
, ld16rs
);
144 PPC_LD_OP(hz_le
, ld16r
);
145 PPC_LD_OP(wz_le
, ld32r
);
146 #if defined(TARGET_PPC64)
147 PPC_LD_OP(d_le
, ld64r
);
148 PPC_LD_OP(wa_le
, ld32rs
);
149 PPC_LD_OP_64(d_le
, ld64r
);
150 PPC_LD_OP_64(wa_le
, ld32rs
);
151 PPC_LD_OP_64(ha_le
, ld16rs
);
152 PPC_LD_OP_64(hz_le
, ld16r
);
153 PPC_LD_OP_64(wz_le
, ld32r
);
156 /*** Integer store ***/
160 #if defined(TARGET_PPC64)
162 PPC_ST_OP_64(d
, stq
);
163 PPC_ST_OP_64(b
, stb
);
164 PPC_ST_OP_64(h
, stw
);
165 PPC_ST_OP_64(w
, stl
);
168 PPC_ST_OP(h_le
, st16r
);
169 PPC_ST_OP(w_le
, st32r
);
170 #if defined(TARGET_PPC64)
171 PPC_ST_OP(d_le
, st64r
);
172 PPC_ST_OP_64(d_le
, st64r
);
173 PPC_ST_OP_64(h_le
, st16r
);
174 PPC_ST_OP_64(w_le
, st32r
);
177 /*** Integer load and store with byte reverse ***/
178 PPC_LD_OP(hbr
, ld16r
);
179 PPC_LD_OP(wbr
, ld32r
);
180 PPC_ST_OP(hbr
, st16r
);
181 PPC_ST_OP(wbr
, st32r
);
182 #if defined(TARGET_PPC64)
183 PPC_LD_OP_64(hbr
, ld16r
);
184 PPC_LD_OP_64(wbr
, ld32r
);
185 PPC_ST_OP_64(hbr
, st16r
);
186 PPC_ST_OP_64(wbr
, st32r
);
189 PPC_LD_OP(hbr_le
, lduw
);
190 PPC_LD_OP(wbr_le
, ldl
);
191 PPC_ST_OP(hbr_le
, stw
);
192 PPC_ST_OP(wbr_le
, stl
);
193 #if defined(TARGET_PPC64)
194 PPC_LD_OP_64(hbr_le
, lduw
);
195 PPC_LD_OP_64(wbr_le
, ldl
);
196 PPC_ST_OP_64(hbr_le
, stw
);
197 PPC_ST_OP_64(wbr_le
, stl
);
200 /*** Integer load and store multiple ***/
201 void OPPROTO
glue(op_lmw
, MEMSUFFIX
) (void)
203 glue(do_lmw
, MEMSUFFIX
)(PARAM1
);
207 #if defined(TARGET_PPC64)
208 void OPPROTO
glue(op_lmw_64
, MEMSUFFIX
) (void)
210 glue(do_lmw_64
, MEMSUFFIX
)(PARAM1
);
215 void OPPROTO
glue(op_lmw_le
, MEMSUFFIX
) (void)
217 glue(do_lmw_le
, MEMSUFFIX
)(PARAM1
);
221 #if defined(TARGET_PPC64)
222 void OPPROTO
glue(op_lmw_le_64
, MEMSUFFIX
) (void)
224 glue(do_lmw_le_64
, MEMSUFFIX
)(PARAM1
);
229 void OPPROTO
glue(op_stmw
, MEMSUFFIX
) (void)
231 glue(do_stmw
, MEMSUFFIX
)(PARAM1
);
235 #if defined(TARGET_PPC64)
236 void OPPROTO
glue(op_stmw_64
, MEMSUFFIX
) (void)
238 glue(do_stmw_64
, MEMSUFFIX
)(PARAM1
);
243 void OPPROTO
glue(op_stmw_le
, MEMSUFFIX
) (void)
245 glue(do_stmw_le
, MEMSUFFIX
)(PARAM1
);
249 #if defined(TARGET_PPC64)
250 void OPPROTO
glue(op_stmw_le_64
, MEMSUFFIX
) (void)
252 glue(do_stmw_le_64
, MEMSUFFIX
)(PARAM1
);
257 /*** Integer load and store strings ***/
258 void OPPROTO
glue(op_lswi
, MEMSUFFIX
) (void)
260 glue(do_lsw
, MEMSUFFIX
)(PARAM1
);
264 #if defined(TARGET_PPC64)
265 void OPPROTO
glue(op_lswi_64
, MEMSUFFIX
) (void)
267 glue(do_lsw_64
, MEMSUFFIX
)(PARAM1
);
272 void OPPROTO
glue(op_lswi_le
, MEMSUFFIX
) (void)
274 glue(do_lsw_le
, MEMSUFFIX
)(PARAM1
);
278 #if defined(TARGET_PPC64)
279 void OPPROTO
glue(op_lswi_le_64
, MEMSUFFIX
) (void)
281 glue(do_lsw_le_64
, MEMSUFFIX
)(PARAM1
);
286 /* PPC32 specification says we must generate an exception if
287 * rA is in the range of registers to be loaded.
288 * In an other hand, IBM says this is valid, but rA won't be loaded.
289 * For now, I'll follow the spec...
291 void OPPROTO
glue(op_lswx
, MEMSUFFIX
) (void)
293 /* Note: T1 comes from xer_bc then no cast is needed */
294 if (likely(T1
!= 0)) {
295 if (unlikely((PARAM1
< PARAM2
&& (PARAM1
+ T1
) > PARAM2
) ||
296 (PARAM1
< PARAM3
&& (PARAM1
+ T1
) > PARAM3
))) {
297 do_raise_exception_err(EXCP_PROGRAM
, EXCP_INVAL
| EXCP_INVAL_LSWX
);
299 glue(do_lsw
, MEMSUFFIX
)(PARAM1
);
305 #if defined(TARGET_PPC64)
306 void OPPROTO
glue(op_lswx_64
, MEMSUFFIX
) (void)
308 /* Note: T1 comes from xer_bc then no cast is needed */
309 if (likely(T1
!= 0)) {
310 if (unlikely((PARAM1
< PARAM2
&& (PARAM1
+ T1
) > PARAM2
) ||
311 (PARAM1
< PARAM3
&& (PARAM1
+ T1
) > PARAM3
))) {
312 do_raise_exception_err(EXCP_PROGRAM
, EXCP_INVAL
| EXCP_INVAL_LSWX
);
314 glue(do_lsw_64
, MEMSUFFIX
)(PARAM1
);
321 void OPPROTO
glue(op_lswx_le
, MEMSUFFIX
) (void)
323 /* Note: T1 comes from xer_bc then no cast is needed */
324 if (likely(T1
!= 0)) {
325 if (unlikely((PARAM1
< PARAM2
&& (PARAM1
+ T1
) > PARAM2
) ||
326 (PARAM1
< PARAM3
&& (PARAM1
+ T1
) > PARAM3
))) {
327 do_raise_exception_err(EXCP_PROGRAM
, EXCP_INVAL
| EXCP_INVAL_LSWX
);
329 glue(do_lsw_le
, MEMSUFFIX
)(PARAM1
);
335 #if defined(TARGET_PPC64)
336 void OPPROTO
glue(op_lswx_le_64
, MEMSUFFIX
) (void)
338 /* Note: T1 comes from xer_bc then no cast is needed */
339 if (likely(T1
!= 0)) {
340 if (unlikely((PARAM1
< PARAM2
&& (PARAM1
+ T1
) > PARAM2
) ||
341 (PARAM1
< PARAM3
&& (PARAM1
+ T1
) > PARAM3
))) {
342 do_raise_exception_err(EXCP_PROGRAM
, EXCP_INVAL
| EXCP_INVAL_LSWX
);
344 glue(do_lsw_le_64
, MEMSUFFIX
)(PARAM1
);
351 void OPPROTO
glue(op_stsw
, MEMSUFFIX
) (void)
353 glue(do_stsw
, MEMSUFFIX
)(PARAM1
);
357 #if defined(TARGET_PPC64)
358 void OPPROTO
glue(op_stsw_64
, MEMSUFFIX
) (void)
360 glue(do_stsw_64
, MEMSUFFIX
)(PARAM1
);
365 void OPPROTO
glue(op_stsw_le
, MEMSUFFIX
) (void)
367 glue(do_stsw_le
, MEMSUFFIX
)(PARAM1
);
371 #if defined(TARGET_PPC64)
372 void OPPROTO
glue(op_stsw_le_64
, MEMSUFFIX
) (void)
374 glue(do_stsw_le_64
, MEMSUFFIX
)(PARAM1
);
379 /*** Floating-point store ***/
380 #define PPC_STF_OP(name, op) \
381 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
383 glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \
387 #if defined(TARGET_PPC64)
388 #define PPC_STF_OP_64(name, op) \
389 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
391 glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \
396 PPC_STF_OP(fd
, stfq
);
397 PPC_STF_OP(fs
, stfl
);
398 #if defined(TARGET_PPC64)
399 PPC_STF_OP_64(fd
, stfq
);
400 PPC_STF_OP_64(fs
, stfl
);
403 static inline void glue(stfqr
, MEMSUFFIX
) (target_ulong EA
, double d
)
411 u
.u
= ((u
.u
& 0xFF00000000000000ULL
) >> 56) |
412 ((u
.u
& 0x00FF000000000000ULL
) >> 40) |
413 ((u
.u
& 0x0000FF0000000000ULL
) >> 24) |
414 ((u
.u
& 0x000000FF00000000ULL
) >> 8) |
415 ((u
.u
& 0x00000000FF000000ULL
) << 8) |
416 ((u
.u
& 0x0000000000FF0000ULL
) << 24) |
417 ((u
.u
& 0x000000000000FF00ULL
) << 40) |
418 ((u
.u
& 0x00000000000000FFULL
) << 56);
419 glue(stfq
, MEMSUFFIX
)(EA
, u
.d
);
422 static inline void glue(stflr
, MEMSUFFIX
) (target_ulong EA
, float f
)
430 u
.u
= ((u
.u
& 0xFF000000UL
) >> 24) |
431 ((u
.u
& 0x00FF0000ULL
) >> 8) |
432 ((u
.u
& 0x0000FF00UL
) << 8) |
433 ((u
.u
& 0x000000FFULL
) << 24);
434 glue(stfl
, MEMSUFFIX
)(EA
, u
.f
);
437 PPC_STF_OP(fd_le
, stfqr
);
438 PPC_STF_OP(fs_le
, stflr
);
439 #if defined(TARGET_PPC64)
440 PPC_STF_OP_64(fd_le
, stfqr
);
441 PPC_STF_OP_64(fs_le
, stflr
);
444 /*** Floating-point load ***/
445 #define PPC_LDF_OP(name, op) \
446 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
448 FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \
452 #if defined(TARGET_PPC64)
453 #define PPC_LDF_OP_64(name, op) \
454 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
456 FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \
461 PPC_LDF_OP(fd
, ldfq
);
462 PPC_LDF_OP(fs
, ldfl
);
463 #if defined(TARGET_PPC64)
464 PPC_LDF_OP_64(fd
, ldfq
);
465 PPC_LDF_OP_64(fs
, ldfl
);
468 static inline double glue(ldfqr
, MEMSUFFIX
) (target_ulong EA
)
475 u
.d
= glue(ldfq
, MEMSUFFIX
)(EA
);
476 u
.u
= ((u
.u
& 0xFF00000000000000ULL
) >> 56) |
477 ((u
.u
& 0x00FF000000000000ULL
) >> 40) |
478 ((u
.u
& 0x0000FF0000000000ULL
) >> 24) |
479 ((u
.u
& 0x000000FF00000000ULL
) >> 8) |
480 ((u
.u
& 0x00000000FF000000ULL
) << 8) |
481 ((u
.u
& 0x0000000000FF0000ULL
) << 24) |
482 ((u
.u
& 0x000000000000FF00ULL
) << 40) |
483 ((u
.u
& 0x00000000000000FFULL
) << 56);
488 static inline float glue(ldflr
, MEMSUFFIX
) (target_ulong EA
)
495 u
.f
= glue(ldfl
, MEMSUFFIX
)(EA
);
496 u
.u
= ((u
.u
& 0xFF000000UL
) >> 24) |
497 ((u
.u
& 0x00FF0000ULL
) >> 8) |
498 ((u
.u
& 0x0000FF00UL
) << 8) |
499 ((u
.u
& 0x000000FFULL
) << 24);
504 PPC_LDF_OP(fd_le
, ldfqr
);
505 PPC_LDF_OP(fs_le
, ldflr
);
506 #if defined(TARGET_PPC64)
507 PPC_LDF_OP_64(fd_le
, ldfqr
);
508 PPC_LDF_OP_64(fs_le
, ldflr
);
511 /* Load and set reservation */
512 void OPPROTO
glue(op_lwarx
, MEMSUFFIX
) (void)
514 if (unlikely(T0
& 0x03)) {
515 do_raise_exception(EXCP_ALIGN
);
517 T1
= glue(ldl
, MEMSUFFIX
)((uint32_t)T0
);
518 regs
->reserve
= (uint32_t)T0
;
523 #if defined(TARGET_PPC64)
524 void OPPROTO
glue(op_lwarx_64
, MEMSUFFIX
) (void)
526 if (unlikely(T0
& 0x03)) {
527 do_raise_exception(EXCP_ALIGN
);
529 T1
= glue(ldl
, MEMSUFFIX
)((uint64_t)T0
);
530 regs
->reserve
= (uint64_t)T0
;
535 void OPPROTO
glue(op_ldarx_64
, MEMSUFFIX
) (void)
537 if (unlikely(T0
& 0x03)) {
538 do_raise_exception(EXCP_ALIGN
);
540 T1
= glue(ldq
, MEMSUFFIX
)((uint64_t)T0
);
541 regs
->reserve
= (uint64_t)T0
;
547 void OPPROTO
glue(op_lwarx_le
, MEMSUFFIX
) (void)
549 if (unlikely(T0
& 0x03)) {
550 do_raise_exception(EXCP_ALIGN
);
552 T1
= glue(ld32r
, MEMSUFFIX
)((uint32_t)T0
);
553 regs
->reserve
= (uint32_t)T0
;
558 #if defined(TARGET_PPC64)
559 void OPPROTO
glue(op_lwarx_le_64
, MEMSUFFIX
) (void)
561 if (unlikely(T0
& 0x03)) {
562 do_raise_exception(EXCP_ALIGN
);
564 T1
= glue(ld32r
, MEMSUFFIX
)((uint64_t)T0
);
565 regs
->reserve
= (uint64_t)T0
;
570 void OPPROTO
glue(op_ldarx_le_64
, MEMSUFFIX
) (void)
572 if (unlikely(T0
& 0x03)) {
573 do_raise_exception(EXCP_ALIGN
);
575 T1
= glue(ld64r
, MEMSUFFIX
)((uint64_t)T0
);
576 regs
->reserve
= (uint64_t)T0
;
582 /* Store with reservation */
583 void OPPROTO
glue(op_stwcx
, MEMSUFFIX
) (void)
585 if (unlikely(T0
& 0x03)) {
586 do_raise_exception(EXCP_ALIGN
);
588 if (unlikely(regs
->reserve
!= (uint32_t)T0
)) {
589 env
->crf
[0] = xer_ov
;
591 glue(stl
, MEMSUFFIX
)((uint32_t)T0
, T1
);
592 env
->crf
[0] = xer_ov
| 0x02;
599 #if defined(TARGET_PPC64)
600 void OPPROTO
glue(op_stwcx_64
, MEMSUFFIX
) (void)
602 if (unlikely(T0
& 0x03)) {
603 do_raise_exception(EXCP_ALIGN
);
605 if (unlikely(regs
->reserve
!= (uint64_t)T0
)) {
606 env
->crf
[0] = xer_ov
;
608 glue(stl
, MEMSUFFIX
)((uint64_t)T0
, T1
);
609 env
->crf
[0] = xer_ov
| 0x02;
616 void OPPROTO
glue(op_stdcx_64
, MEMSUFFIX
) (void)
618 if (unlikely(T0
& 0x03)) {
619 do_raise_exception(EXCP_ALIGN
);
621 if (unlikely(regs
->reserve
!= (uint64_t)T0
)) {
622 env
->crf
[0] = xer_ov
;
624 glue(stq
, MEMSUFFIX
)((uint64_t)T0
, T1
);
625 env
->crf
[0] = xer_ov
| 0x02;
633 void OPPROTO
glue(op_stwcx_le
, MEMSUFFIX
) (void)
635 if (unlikely(T0
& 0x03)) {
636 do_raise_exception(EXCP_ALIGN
);
638 if (unlikely(regs
->reserve
!= (uint32_t)T0
)) {
639 env
->crf
[0] = xer_ov
;
641 glue(st32r
, MEMSUFFIX
)((uint32_t)T0
, T1
);
642 env
->crf
[0] = xer_ov
| 0x02;
649 #if defined(TARGET_PPC64)
650 void OPPROTO
glue(op_stwcx_le_64
, MEMSUFFIX
) (void)
652 if (unlikely(T0
& 0x03)) {
653 do_raise_exception(EXCP_ALIGN
);
655 if (unlikely(regs
->reserve
!= (uint64_t)T0
)) {
656 env
->crf
[0] = xer_ov
;
658 glue(st32r
, MEMSUFFIX
)((uint64_t)T0
, T1
);
659 env
->crf
[0] = xer_ov
| 0x02;
666 void OPPROTO
glue(op_stdcx_le_64
, MEMSUFFIX
) (void)
668 if (unlikely(T0
& 0x03)) {
669 do_raise_exception(EXCP_ALIGN
);
671 if (unlikely(regs
->reserve
!= (uint64_t)T0
)) {
672 env
->crf
[0] = xer_ov
;
674 glue(st64r
, MEMSUFFIX
)((uint64_t)T0
, T1
);
675 env
->crf
[0] = xer_ov
| 0x02;
683 void OPPROTO
glue(op_dcbz
, MEMSUFFIX
) (void)
685 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x00), 0);
686 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x04), 0);
687 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x08), 0);
688 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x0C), 0);
689 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x10), 0);
690 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x14), 0);
691 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x18), 0);
692 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x1C), 0);
693 #if DCACHE_LINE_SIZE == 64
694 /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
695 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x20UL
), 0);
696 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x24UL
), 0);
697 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x28UL
), 0);
698 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x2CUL
), 0);
699 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x30UL
), 0);
700 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x34UL
), 0);
701 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x38UL
), 0);
702 glue(stl
, MEMSUFFIX
)((uint32_t)(T0
+ 0x3CUL
), 0);
707 #if defined(TARGET_PPC64)
708 void OPPROTO
glue(op_dcbz_64
, MEMSUFFIX
) (void)
710 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x00), 0);
711 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x04), 0);
712 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x08), 0);
713 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x0C), 0);
714 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x10), 0);
715 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x14), 0);
716 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x18), 0);
717 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x1C), 0);
718 #if DCACHE_LINE_SIZE == 64
719 /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
720 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x20UL
), 0);
721 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x24UL
), 0);
722 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x28UL
), 0);
723 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x2CUL
), 0);
724 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x30UL
), 0);
725 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x34UL
), 0);
726 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x38UL
), 0);
727 glue(stl
, MEMSUFFIX
)((uint64_t)(T0
+ 0x3CUL
), 0);
733 /* Instruction cache block invalidate */
734 void OPPROTO
glue(op_icbi
, MEMSUFFIX
) (void)
736 glue(do_icbi
, MEMSUFFIX
)();
740 #if defined(TARGET_PPC64)
741 void OPPROTO
glue(op_icbi_64
, MEMSUFFIX
) (void)
743 glue(do_icbi_64
, MEMSUFFIX
)();
748 /* External access */
749 void OPPROTO
glue(op_eciwx
, MEMSUFFIX
) (void)
751 T1
= glue(ldl
, MEMSUFFIX
)((uint32_t)T0
);
755 #if defined(TARGET_PPC64)
756 void OPPROTO
glue(op_eciwx_64
, MEMSUFFIX
) (void)
758 T1
= glue(ldl
, MEMSUFFIX
)((uint64_t)T0
);
763 void OPPROTO
glue(op_ecowx
, MEMSUFFIX
) (void)
765 glue(stl
, MEMSUFFIX
)((uint32_t)T0
, T1
);
769 #if defined(TARGET_PPC64)
770 void OPPROTO
glue(op_ecowx_64
, MEMSUFFIX
) (void)
772 glue(stl
, MEMSUFFIX
)((uint64_t)T0
, T1
);
777 void OPPROTO
glue(op_eciwx_le
, MEMSUFFIX
) (void)
779 T1
= glue(ld32r
, MEMSUFFIX
)((uint32_t)T0
);
783 #if defined(TARGET_PPC64)
784 void OPPROTO
glue(op_eciwx_le_64
, MEMSUFFIX
) (void)
786 T1
= glue(ld32r
, MEMSUFFIX
)((uint64_t)T0
);
791 void OPPROTO
glue(op_ecowx_le
, MEMSUFFIX
) (void)
793 glue(st32r
, MEMSUFFIX
)((uint32_t)T0
, T1
);
797 #if defined(TARGET_PPC64)
798 void OPPROTO
glue(op_ecowx_le_64
, MEMSUFFIX
) (void)
800 glue(st32r
, MEMSUFFIX
)((uint64_t)T0
, T1
);
805 /* XXX: those micro-ops need tests ! */
806 /* PowerPC 601 specific instructions (POWER bridge) */
807 void OPPROTO
glue(op_POWER_lscbx
, MEMSUFFIX
) (void)
809 /* When byte count is 0, do nothing */
810 if (likely(T1
!= 0)) {
811 glue(do_POWER_lscbx
, MEMSUFFIX
)(PARAM1
, PARAM2
, PARAM3
);
816 /* POWER2 quad load and store */
817 /* XXX: TAGs are not managed */
818 void OPPROTO
glue(op_POWER2_lfq
, MEMSUFFIX
) (void)
820 glue(do_POWER2_lfq
, MEMSUFFIX
)();
824 void glue(op_POWER2_lfq_le
, MEMSUFFIX
) (void)
826 glue(do_POWER2_lfq_le
, MEMSUFFIX
)();
830 void OPPROTO
glue(op_POWER2_stfq
, MEMSUFFIX
) (void)
832 glue(do_POWER2_stfq
, MEMSUFFIX
)();
836 void OPPROTO
glue(op_POWER2_stfq_le
, MEMSUFFIX
) (void)
838 glue(do_POWER2_stfq_le
, MEMSUFFIX
)();