2 void glue(do_lsw
, MEMSUFFIX
) (int dst
);
3 void glue(do_stsw
, MEMSUFFIX
) (int src
);
5 static inline uint16_t glue(ld16r
, MEMSUFFIX
) (target_ulong EA
)
7 uint16_t tmp
= glue(lduw
, MEMSUFFIX
)(EA
);
8 return ((tmp
& 0xFF00) >> 8) | ((tmp
& 0x00FF) << 8);
11 static inline int32_t glue(ld16rs
, MEMSUFFIX
) (target_ulong EA
)
13 int16_t tmp
= glue(lduw
, MEMSUFFIX
)(EA
);
14 return ((tmp
& 0xFF00) >> 8) | ((tmp
& 0x00FF) << 8);
17 static inline uint32_t glue(ld32r
, MEMSUFFIX
) (target_ulong EA
)
19 uint32_t tmp
= glue(ldl
, MEMSUFFIX
)(EA
);
20 return ((tmp
& 0xFF000000) >> 24) | ((tmp
& 0x00FF0000) >> 8) |
21 ((tmp
& 0x0000FF00) << 8) | ((tmp
& 0x000000FF) << 24);
24 static inline void glue(st16r
, MEMSUFFIX
) (target_ulong EA
, uint16_t data
)
26 uint16_t tmp
= ((data
& 0xFF00) >> 8) | ((data
& 0x00FF) << 8);
27 glue(stw
, MEMSUFFIX
)(EA
, tmp
);
30 static inline void glue(st32r
, MEMSUFFIX
) (target_ulong EA
, uint32_t data
)
32 uint32_t tmp
= ((data
& 0xFF000000) >> 24) | ((data
& 0x00FF0000) >> 8) |
33 ((data
& 0x0000FF00) << 8) | ((data
& 0x000000FF) << 24);
34 glue(stl
, MEMSUFFIX
)(EA
, tmp
);
37 /*** Integer load ***/
38 #define PPC_LD_OP(name, op) \
39 PPC_OP(glue(glue(l, name), MEMSUFFIX)) \
41 T1 = glue(op, MEMSUFFIX)(T0); \
45 #define PPC_ST_OP(name, op) \
46 PPC_OP(glue(glue(st, name), MEMSUFFIX)) \
48 glue(op, MEMSUFFIX)(T0, T1); \
57 PPC_LD_OP(ha_le
, ld16rs
);
58 PPC_LD_OP(hz_le
, ld16r
);
59 PPC_LD_OP(wz_le
, ld32r
);
61 /*** Integer store ***/
66 PPC_ST_OP(h_le
, st16r
);
67 PPC_ST_OP(w_le
, st32r
);
69 /*** Integer load and store with byte reverse ***/
70 PPC_LD_OP(hbr
, ld16r
);
71 PPC_LD_OP(wbr
, ld32r
);
72 PPC_ST_OP(hbr
, st16r
);
73 PPC_ST_OP(wbr
, st32r
);
75 PPC_LD_OP(hbr_le
, lduw
);
76 PPC_LD_OP(wbr_le
, ldl
);
77 PPC_ST_OP(hbr_le
, stw
);
78 PPC_ST_OP(wbr_le
, stl
);
80 /*** Integer load and store multiple ***/
81 PPC_OP(glue(lmw
, MEMSUFFIX
))
85 for (; dst
< 32; dst
++, T0
+= 4) {
86 ugpr(dst
) = glue(ldl
, MEMSUFFIX
)(T0
);
91 PPC_OP(glue(stmw
, MEMSUFFIX
))
95 for (; src
< 32; src
++, T0
+= 4) {
96 glue(stl
, MEMSUFFIX
)(T0
, ugpr(src
));
101 PPC_OP(glue(lmw_le
, MEMSUFFIX
))
105 for (; dst
< 32; dst
++, T0
+= 4) {
106 ugpr(dst
) = glue(ld32r
, MEMSUFFIX
)(T0
);
111 PPC_OP(glue(stmw_le
, MEMSUFFIX
))
115 for (; src
< 32; src
++, T0
+= 4) {
116 glue(st32r
, MEMSUFFIX
)(T0
, ugpr(src
));
121 /*** Integer load and store strings ***/
122 PPC_OP(glue(lswi
, MEMSUFFIX
))
124 glue(do_lsw
, MEMSUFFIX
)(PARAM(1));
128 void glue(do_lsw_le
, MEMSUFFIX
) (int dst
);
129 PPC_OP(glue(lswi_le
, MEMSUFFIX
))
131 glue(do_lsw_le
, MEMSUFFIX
)(PARAM(1));
135 /* PPC32 specification says we must generate an exception if
136 * rA is in the range of registers to be loaded.
137 * In an other hand, IBM says this is valid, but rA won't be loaded.
138 * For now, I'll follow the spec...
140 PPC_OP(glue(lswx
, MEMSUFFIX
))
143 if ((PARAM(1) < PARAM(2) && (PARAM(1) + T1
) > PARAM(2)) ||
144 (PARAM(1) < PARAM(3) && (PARAM(1) + T1
) > PARAM(3))) {
145 do_raise_exception_err(EXCP_PROGRAM
, EXCP_INVAL
| EXCP_INVAL_LSWX
);
147 glue(do_lsw
, MEMSUFFIX
)(PARAM(1));
153 PPC_OP(glue(lswx_le
, MEMSUFFIX
))
156 if ((PARAM(1) < PARAM(2) && (PARAM(1) + T1
) > PARAM(2)) ||
157 (PARAM(1) < PARAM(3) && (PARAM(1) + T1
) > PARAM(3))) {
158 do_raise_exception_err(EXCP_PROGRAM
, EXCP_INVAL
| EXCP_INVAL_LSWX
);
160 glue(do_lsw_le
, MEMSUFFIX
)(PARAM(1));
166 PPC_OP(glue(stsw
, MEMSUFFIX
))
168 glue(do_stsw
, MEMSUFFIX
)(PARAM(1));
172 void glue(do_stsw_le
, MEMSUFFIX
) (int src
);
173 PPC_OP(glue(stsw_le
, MEMSUFFIX
))
175 glue(do_stsw_le
, MEMSUFFIX
)(PARAM(1));
179 /*** Floating-point store ***/
180 #define PPC_STF_OP(name, op) \
181 PPC_OP(glue(glue(st, name), MEMSUFFIX)) \
183 glue(op, MEMSUFFIX)(T0, FT1); \
187 PPC_STF_OP(fd
, stfq
);
188 PPC_STF_OP(fs
, stfl
);
190 static inline void glue(stfqr
, MEMSUFFIX
) (target_ulong EA
, double d
)
198 u
.u
= ((u
.u
& 0xFF00000000000000ULL
) >> 56) |
199 ((u
.u
& 0x00FF000000000000ULL
) >> 40) |
200 ((u
.u
& 0x0000FF0000000000ULL
) >> 24) |
201 ((u
.u
& 0x000000FF00000000ULL
) >> 8) |
202 ((u
.u
& 0x00000000FF000000ULL
) << 8) |
203 ((u
.u
& 0x0000000000FF0000ULL
) << 24) |
204 ((u
.u
& 0x000000000000FF00ULL
) << 40) |
205 ((u
.u
& 0x00000000000000FFULL
) << 56);
206 glue(stfq
, MEMSUFFIX
)(EA
, u
.d
);
209 static inline void glue(stflr
, MEMSUFFIX
) (target_ulong EA
, float f
)
217 u
.u
= ((u
.u
& 0xFF000000UL
) >> 24) |
218 ((u
.u
& 0x00FF0000ULL
) >> 8) |
219 ((u
.u
& 0x0000FF00UL
) << 8) |
220 ((u
.u
& 0x000000FFULL
) << 24);
221 glue(stfl
, MEMSUFFIX
)(EA
, u
.f
);
224 PPC_STF_OP(fd_le
, stfqr
);
225 PPC_STF_OP(fs_le
, stflr
);
227 /*** Floating-point load ***/
228 #define PPC_LDF_OP(name, op) \
229 PPC_OP(glue(glue(l, name), MEMSUFFIX)) \
231 FT1 = glue(op, MEMSUFFIX)(T0); \
235 PPC_LDF_OP(fd
, ldfq
);
236 PPC_LDF_OP(fs
, ldfl
);
238 static inline double glue(ldfqr
, MEMSUFFIX
) (target_ulong EA
)
245 u
.d
= glue(ldfq
, MEMSUFFIX
)(EA
);
246 u
.u
= ((u
.u
& 0xFF00000000000000ULL
) >> 56) |
247 ((u
.u
& 0x00FF000000000000ULL
) >> 40) |
248 ((u
.u
& 0x0000FF0000000000ULL
) >> 24) |
249 ((u
.u
& 0x000000FF00000000ULL
) >> 8) |
250 ((u
.u
& 0x00000000FF000000ULL
) << 8) |
251 ((u
.u
& 0x0000000000FF0000ULL
) << 24) |
252 ((u
.u
& 0x000000000000FF00ULL
) << 40) |
253 ((u
.u
& 0x00000000000000FFULL
) << 56);
258 static inline float glue(ldflr
, MEMSUFFIX
) (target_ulong EA
)
265 u
.f
= glue(ldfl
, MEMSUFFIX
)(EA
);
266 u
.u
= ((u
.u
& 0xFF000000UL
) >> 24) |
267 ((u
.u
& 0x00FF0000ULL
) >> 8) |
268 ((u
.u
& 0x0000FF00UL
) << 8) |
269 ((u
.u
& 0x000000FFULL
) << 24);
274 PPC_LDF_OP(fd_le
, ldfqr
);
275 PPC_LDF_OP(fs_le
, ldflr
);
277 /* Load and set reservation */
278 PPC_OP(glue(lwarx
, MEMSUFFIX
))
281 do_raise_exception(EXCP_ALIGN
);
283 T1
= glue(ldl
, MEMSUFFIX
)(T0
);
289 PPC_OP(glue(lwarx_le
, MEMSUFFIX
))
292 do_raise_exception(EXCP_ALIGN
);
294 T1
= glue(ld32r
, MEMSUFFIX
)(T0
);
300 /* Store with reservation */
301 PPC_OP(glue(stwcx
, MEMSUFFIX
))
304 do_raise_exception(EXCP_ALIGN
);
306 if (regs
->reserve
!= T0
) {
307 env
->crf
[0] = xer_ov
;
309 glue(stl
, MEMSUFFIX
)(T0
, T1
);
310 env
->crf
[0] = xer_ov
| 0x02;
317 PPC_OP(glue(stwcx_le
, MEMSUFFIX
))
320 do_raise_exception(EXCP_ALIGN
);
322 if (regs
->reserve
!= T0
) {
323 env
->crf
[0] = xer_ov
;
325 glue(st32r
, MEMSUFFIX
)(T0
, T1
);
326 env
->crf
[0] = xer_ov
| 0x02;
333 PPC_OP(glue(dcbz
, MEMSUFFIX
))
335 glue(stl
, MEMSUFFIX
)(T0
+ 0x00, 0);
336 glue(stl
, MEMSUFFIX
)(T0
+ 0x04, 0);
337 glue(stl
, MEMSUFFIX
)(T0
+ 0x08, 0);
338 glue(stl
, MEMSUFFIX
)(T0
+ 0x0C, 0);
339 glue(stl
, MEMSUFFIX
)(T0
+ 0x10, 0);
340 glue(stl
, MEMSUFFIX
)(T0
+ 0x14, 0);
341 glue(stl
, MEMSUFFIX
)(T0
+ 0x18, 0);
342 glue(stl
, MEMSUFFIX
)(T0
+ 0x1C, 0);
346 /* External access */
347 PPC_OP(glue(eciwx
, MEMSUFFIX
))
349 T1
= glue(ldl
, MEMSUFFIX
)(T0
);
353 PPC_OP(glue(ecowx
, MEMSUFFIX
))
355 glue(stl
, MEMSUFFIX
)(T0
, T1
);
359 PPC_OP(glue(eciwx_le
, MEMSUFFIX
))
361 T1
= glue(ld32r
, MEMSUFFIX
)(T0
);
365 PPC_OP(glue(ecowx_le
, MEMSUFFIX
))
367 glue(st32r
, MEMSUFFIX
)(T0
, T1
);