Avoid gcc warnings
[qemu/qemu_0_9_1_stable.git] / target-ppc / op_mem.h
blobca48f5de0ac9d6eb26a262f3c7e400b3cfa1877c
1 /*
2 * PowerPC emulation micro-operations for qemu.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 static always_inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
23 uint16_t tmp = glue(lduw, MEMSUFFIX)(EA);
24 return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
27 static always_inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
29 int16_t tmp = glue(lduw, MEMSUFFIX)(EA);
30 return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
33 static always_inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
35 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
36 return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
37 ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
40 #if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
41 static always_inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
43 uint64_t tmp = glue(ldq, MEMSUFFIX)(EA);
44 return ((tmp & 0xFF00000000000000ULL) >> 56) |
45 ((tmp & 0x00FF000000000000ULL) >> 40) |
46 ((tmp & 0x0000FF0000000000ULL) >> 24) |
47 ((tmp & 0x000000FF00000000ULL) >> 8) |
48 ((tmp & 0x00000000FF000000ULL) << 8) |
49 ((tmp & 0x0000000000FF0000ULL) << 24) |
50 ((tmp & 0x000000000000FF00ULL) << 40) |
51 ((tmp & 0x00000000000000FFULL) << 54);
53 #endif
55 #if defined(TARGET_PPC64)
56 static always_inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
58 return (int32_t)glue(ldl, MEMSUFFIX)(EA);
61 static always_inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
63 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
64 return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
65 ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
67 #endif
69 static always_inline void glue(st16r, MEMSUFFIX) (target_ulong EA,
70 uint16_t data)
72 uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
73 glue(stw, MEMSUFFIX)(EA, tmp);
76 static always_inline void glue(st32r, MEMSUFFIX) (target_ulong EA,
77 uint32_t data)
79 uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) |
80 ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24);
81 glue(stl, MEMSUFFIX)(EA, tmp);
84 #if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
85 static always_inline void glue(st64r, MEMSUFFIX) (target_ulong EA,
86 uint64_t data)
88 uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) |
89 ((data & 0x00FF000000000000ULL) >> 40) |
90 ((data & 0x0000FF0000000000ULL) >> 24) |
91 ((data & 0x000000FF00000000ULL) >> 8) |
92 ((data & 0x00000000FF000000ULL) << 8) |
93 ((data & 0x0000000000FF0000ULL) << 24) |
94 ((data & 0x000000000000FF00ULL) << 40) |
95 ((data & 0x00000000000000FFULL) << 56);
96 glue(stq, MEMSUFFIX)(EA, tmp);
98 #endif
100 /*** Integer load ***/
101 #define PPC_LD_OP(name, op) \
102 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
104 T1 = glue(op, MEMSUFFIX)((uint32_t)T0); \
105 RETURN(); \
108 #if defined(TARGET_PPC64)
109 #define PPC_LD_OP_64(name, op) \
110 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
112 T1 = glue(op, MEMSUFFIX)((uint64_t)T0); \
113 RETURN(); \
115 #endif
117 #define PPC_ST_OP(name, op) \
118 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
120 glue(op, MEMSUFFIX)((uint32_t)T0, T1); \
121 RETURN(); \
124 #if defined(TARGET_PPC64)
125 #define PPC_ST_OP_64(name, op) \
126 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
128 glue(op, MEMSUFFIX)((uint64_t)T0, T1); \
129 RETURN(); \
131 #endif
133 PPC_LD_OP(bz, ldub);
134 PPC_LD_OP(ha, ldsw);
135 PPC_LD_OP(hz, lduw);
136 PPC_LD_OP(wz, ldl);
137 #if defined(TARGET_PPC64)
138 PPC_LD_OP(d, ldq);
139 PPC_LD_OP(wa, ldsl);
140 PPC_LD_OP_64(d, ldq);
141 PPC_LD_OP_64(wa, ldsl);
142 PPC_LD_OP_64(bz, ldub);
143 PPC_LD_OP_64(ha, ldsw);
144 PPC_LD_OP_64(hz, lduw);
145 PPC_LD_OP_64(wz, ldl);
146 #endif
148 PPC_LD_OP(ha_le, ld16rs);
149 PPC_LD_OP(hz_le, ld16r);
150 PPC_LD_OP(wz_le, ld32r);
151 #if defined(TARGET_PPC64)
152 PPC_LD_OP(d_le, ld64r);
153 PPC_LD_OP(wa_le, ld32rs);
154 PPC_LD_OP_64(d_le, ld64r);
155 PPC_LD_OP_64(wa_le, ld32rs);
156 PPC_LD_OP_64(ha_le, ld16rs);
157 PPC_LD_OP_64(hz_le, ld16r);
158 PPC_LD_OP_64(wz_le, ld32r);
159 #endif
161 /*** Integer store ***/
162 PPC_ST_OP(b, stb);
163 PPC_ST_OP(h, stw);
164 PPC_ST_OP(w, stl);
165 #if defined(TARGET_PPC64)
166 PPC_ST_OP(d, stq);
167 PPC_ST_OP_64(d, stq);
168 PPC_ST_OP_64(b, stb);
169 PPC_ST_OP_64(h, stw);
170 PPC_ST_OP_64(w, stl);
171 #endif
173 PPC_ST_OP(h_le, st16r);
174 PPC_ST_OP(w_le, st32r);
175 #if defined(TARGET_PPC64)
176 PPC_ST_OP(d_le, st64r);
177 PPC_ST_OP_64(d_le, st64r);
178 PPC_ST_OP_64(h_le, st16r);
179 PPC_ST_OP_64(w_le, st32r);
180 #endif
182 /*** Integer load and store with byte reverse ***/
183 PPC_LD_OP(hbr, ld16r);
184 PPC_LD_OP(wbr, ld32r);
185 PPC_ST_OP(hbr, st16r);
186 PPC_ST_OP(wbr, st32r);
187 #if defined(TARGET_PPC64)
188 PPC_LD_OP_64(hbr, ld16r);
189 PPC_LD_OP_64(wbr, ld32r);
190 PPC_ST_OP_64(hbr, st16r);
191 PPC_ST_OP_64(wbr, st32r);
192 #endif
194 PPC_LD_OP(hbr_le, lduw);
195 PPC_LD_OP(wbr_le, ldl);
196 PPC_ST_OP(hbr_le, stw);
197 PPC_ST_OP(wbr_le, stl);
198 #if defined(TARGET_PPC64)
199 PPC_LD_OP_64(hbr_le, lduw);
200 PPC_LD_OP_64(wbr_le, ldl);
201 PPC_ST_OP_64(hbr_le, stw);
202 PPC_ST_OP_64(wbr_le, stl);
203 #endif
205 /*** Integer load and store multiple ***/
206 void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
208 glue(do_lmw, MEMSUFFIX)(PARAM1);
209 RETURN();
212 #if defined(TARGET_PPC64)
213 void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
215 glue(do_lmw_64, MEMSUFFIX)(PARAM1);
216 RETURN();
218 #endif
220 void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
222 glue(do_lmw_le, MEMSUFFIX)(PARAM1);
223 RETURN();
226 #if defined(TARGET_PPC64)
227 void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
229 glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
230 RETURN();
232 #endif
234 void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
236 glue(do_stmw, MEMSUFFIX)(PARAM1);
237 RETURN();
240 #if defined(TARGET_PPC64)
241 void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
243 glue(do_stmw_64, MEMSUFFIX)(PARAM1);
244 RETURN();
246 #endif
248 void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
250 glue(do_stmw_le, MEMSUFFIX)(PARAM1);
251 RETURN();
254 #if defined(TARGET_PPC64)
255 void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
257 glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
258 RETURN();
260 #endif
262 /*** Integer load and store strings ***/
263 void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
265 glue(do_lsw, MEMSUFFIX)(PARAM1);
266 RETURN();
269 #if defined(TARGET_PPC64)
270 void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
272 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
273 RETURN();
275 #endif
277 void OPPROTO glue(op_lswi_le, MEMSUFFIX) (void)
279 glue(do_lsw_le, MEMSUFFIX)(PARAM1);
280 RETURN();
283 #if defined(TARGET_PPC64)
284 void OPPROTO glue(op_lswi_le_64, MEMSUFFIX) (void)
286 glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
287 RETURN();
289 #endif
291 /* PPC32 specification says we must generate an exception if
292 * rA is in the range of registers to be loaded.
293 * In an other hand, IBM says this is valid, but rA won't be loaded.
294 * For now, I'll follow the spec...
296 void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
298 /* Note: T1 comes from xer_bc then no cast is needed */
299 if (likely(T1 != 0)) {
300 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
301 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
302 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
303 POWERPC_EXCP_INVAL |
304 POWERPC_EXCP_INVAL_LSWX);
305 } else {
306 glue(do_lsw, MEMSUFFIX)(PARAM1);
309 RETURN();
312 #if defined(TARGET_PPC64)
313 void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
315 /* Note: T1 comes from xer_bc then no cast is needed */
316 if (likely(T1 != 0)) {
317 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
318 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
319 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
320 POWERPC_EXCP_INVAL |
321 POWERPC_EXCP_INVAL_LSWX);
322 } else {
323 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
326 RETURN();
328 #endif
330 void OPPROTO glue(op_lswx_le, MEMSUFFIX) (void)
332 /* Note: T1 comes from xer_bc then no cast is needed */
333 if (likely(T1 != 0)) {
334 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
335 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
336 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
337 POWERPC_EXCP_INVAL |
338 POWERPC_EXCP_INVAL_LSWX);
339 } else {
340 glue(do_lsw_le, MEMSUFFIX)(PARAM1);
343 RETURN();
346 #if defined(TARGET_PPC64)
347 void OPPROTO glue(op_lswx_le_64, MEMSUFFIX) (void)
349 /* Note: T1 comes from xer_bc then no cast is needed */
350 if (likely(T1 != 0)) {
351 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
352 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
353 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
354 POWERPC_EXCP_INVAL |
355 POWERPC_EXCP_INVAL_LSWX);
356 } else {
357 glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
360 RETURN();
362 #endif
364 void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
366 glue(do_stsw, MEMSUFFIX)(PARAM1);
367 RETURN();
370 #if defined(TARGET_PPC64)
371 void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
373 glue(do_stsw_64, MEMSUFFIX)(PARAM1);
374 RETURN();
376 #endif
378 void OPPROTO glue(op_stsw_le, MEMSUFFIX) (void)
380 glue(do_stsw_le, MEMSUFFIX)(PARAM1);
381 RETURN();
384 #if defined(TARGET_PPC64)
385 void OPPROTO glue(op_stsw_le_64, MEMSUFFIX) (void)
387 glue(do_stsw_le_64, MEMSUFFIX)(PARAM1);
388 RETURN();
390 #endif
392 /*** Floating-point store ***/
393 #define PPC_STF_OP(name, op) \
394 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
396 glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \
397 RETURN(); \
400 #if defined(TARGET_PPC64)
401 #define PPC_STF_OP_64(name, op) \
402 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
404 glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \
405 RETURN(); \
407 #endif
409 static always_inline void glue(stfs, MEMSUFFIX) (target_ulong EA, double d)
411 glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status));
414 static always_inline void glue(stfiwx, MEMSUFFIX) (target_ulong EA, double d)
416 union {
417 double d;
418 uint64_t u;
419 } u;
421 /* Store the low order 32 bits without any conversion */
422 u.d = d;
423 glue(stl, MEMSUFFIX)(EA, u.u);
426 PPC_STF_OP(fd, stfq);
427 PPC_STF_OP(fs, stfs);
428 PPC_STF_OP(fiwx, stfiwx);
429 #if defined(TARGET_PPC64)
430 PPC_STF_OP_64(fd, stfq);
431 PPC_STF_OP_64(fs, stfs);
432 PPC_STF_OP_64(fiwx, stfiwx);
433 #endif
435 static always_inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
437 union {
438 double d;
439 uint64_t u;
440 } u;
442 u.d = d;
443 u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
444 ((u.u & 0x00FF000000000000ULL) >> 40) |
445 ((u.u & 0x0000FF0000000000ULL) >> 24) |
446 ((u.u & 0x000000FF00000000ULL) >> 8) |
447 ((u.u & 0x00000000FF000000ULL) << 8) |
448 ((u.u & 0x0000000000FF0000ULL) << 24) |
449 ((u.u & 0x000000000000FF00ULL) << 40) |
450 ((u.u & 0x00000000000000FFULL) << 56);
451 glue(stfq, MEMSUFFIX)(EA, u.d);
454 static always_inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d)
456 union {
457 float f;
458 uint32_t u;
459 } u;
461 u.f = float64_to_float32(d, &env->fp_status);
462 u.u = ((u.u & 0xFF000000UL) >> 24) |
463 ((u.u & 0x00FF0000ULL) >> 8) |
464 ((u.u & 0x0000FF00UL) << 8) |
465 ((u.u & 0x000000FFULL) << 24);
466 glue(stfl, MEMSUFFIX)(EA, u.f);
469 static always_inline void glue(stfiwxr, MEMSUFFIX) (target_ulong EA, double d)
471 union {
472 double d;
473 uint64_t u;
474 } u;
476 /* Store the low order 32 bits without any conversion */
477 u.d = d;
478 u.u = ((u.u & 0xFF000000UL) >> 24) |
479 ((u.u & 0x00FF0000ULL) >> 8) |
480 ((u.u & 0x0000FF00UL) << 8) |
481 ((u.u & 0x000000FFULL) << 24);
482 glue(stl, MEMSUFFIX)(EA, u.u);
485 PPC_STF_OP(fd_le, stfqr);
486 PPC_STF_OP(fs_le, stfsr);
487 PPC_STF_OP(fiwx_le, stfiwxr);
488 #if defined(TARGET_PPC64)
489 PPC_STF_OP_64(fd_le, stfqr);
490 PPC_STF_OP_64(fs_le, stfsr);
491 PPC_STF_OP_64(fiwx_le, stfiwxr);
492 #endif
494 /*** Floating-point load ***/
495 #define PPC_LDF_OP(name, op) \
496 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
498 FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \
499 RETURN(); \
502 #if defined(TARGET_PPC64)
503 #define PPC_LDF_OP_64(name, op) \
504 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
506 FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \
507 RETURN(); \
509 #endif
511 static always_inline double glue(ldfs, MEMSUFFIX) (target_ulong EA)
513 return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status);
516 PPC_LDF_OP(fd, ldfq);
517 PPC_LDF_OP(fs, ldfs);
518 #if defined(TARGET_PPC64)
519 PPC_LDF_OP_64(fd, ldfq);
520 PPC_LDF_OP_64(fs, ldfs);
521 #endif
523 static always_inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
525 union {
526 double d;
527 uint64_t u;
528 } u;
530 u.d = glue(ldfq, MEMSUFFIX)(EA);
531 u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
532 ((u.u & 0x00FF000000000000ULL) >> 40) |
533 ((u.u & 0x0000FF0000000000ULL) >> 24) |
534 ((u.u & 0x000000FF00000000ULL) >> 8) |
535 ((u.u & 0x00000000FF000000ULL) << 8) |
536 ((u.u & 0x0000000000FF0000ULL) << 24) |
537 ((u.u & 0x000000000000FF00ULL) << 40) |
538 ((u.u & 0x00000000000000FFULL) << 56);
540 return u.d;
543 static always_inline double glue(ldfsr, MEMSUFFIX) (target_ulong EA)
545 union {
546 float f;
547 uint32_t u;
548 } u;
550 u.f = glue(ldfl, MEMSUFFIX)(EA);
551 u.u = ((u.u & 0xFF000000UL) >> 24) |
552 ((u.u & 0x00FF0000ULL) >> 8) |
553 ((u.u & 0x0000FF00UL) << 8) |
554 ((u.u & 0x000000FFULL) << 24);
556 return float32_to_float64(u.f, &env->fp_status);
559 PPC_LDF_OP(fd_le, ldfqr);
560 PPC_LDF_OP(fs_le, ldfsr);
561 #if defined(TARGET_PPC64)
562 PPC_LDF_OP_64(fd_le, ldfqr);
563 PPC_LDF_OP_64(fs_le, ldfsr);
564 #endif
566 /* Load and set reservation */
567 void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
569 if (unlikely(T0 & 0x03)) {
570 do_raise_exception(POWERPC_EXCP_ALIGN);
571 } else {
572 T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
573 env->reserve = (uint32_t)T0;
575 RETURN();
578 #if defined(TARGET_PPC64)
579 void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
581 if (unlikely(T0 & 0x03)) {
582 do_raise_exception(POWERPC_EXCP_ALIGN);
583 } else {
584 T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
585 env->reserve = (uint64_t)T0;
587 RETURN();
590 void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)
592 if (unlikely(T0 & 0x03)) {
593 do_raise_exception(POWERPC_EXCP_ALIGN);
594 } else {
595 T1 = glue(ldq, MEMSUFFIX)((uint32_t)T0);
596 env->reserve = (uint32_t)T0;
598 RETURN();
601 void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
603 if (unlikely(T0 & 0x03)) {
604 do_raise_exception(POWERPC_EXCP_ALIGN);
605 } else {
606 T1 = glue(ldq, MEMSUFFIX)((uint64_t)T0);
607 env->reserve = (uint64_t)T0;
609 RETURN();
611 #endif
613 void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
615 if (unlikely(T0 & 0x03)) {
616 do_raise_exception(POWERPC_EXCP_ALIGN);
617 } else {
618 T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
619 env->reserve = (uint32_t)T0;
621 RETURN();
624 #if defined(TARGET_PPC64)
625 void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
627 if (unlikely(T0 & 0x03)) {
628 do_raise_exception(POWERPC_EXCP_ALIGN);
629 } else {
630 T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
631 env->reserve = (uint64_t)T0;
633 RETURN();
636 void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)
638 if (unlikely(T0 & 0x03)) {
639 do_raise_exception(POWERPC_EXCP_ALIGN);
640 } else {
641 T1 = glue(ld64r, MEMSUFFIX)((uint32_t)T0);
642 env->reserve = (uint32_t)T0;
644 RETURN();
647 void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
649 if (unlikely(T0 & 0x03)) {
650 do_raise_exception(POWERPC_EXCP_ALIGN);
651 } else {
652 T1 = glue(ld64r, MEMSUFFIX)((uint64_t)T0);
653 env->reserve = (uint64_t)T0;
655 RETURN();
657 #endif
659 /* Store with reservation */
660 void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
662 if (unlikely(T0 & 0x03)) {
663 do_raise_exception(POWERPC_EXCP_ALIGN);
664 } else {
665 if (unlikely(env->reserve != (uint32_t)T0)) {
666 env->crf[0] = xer_so;
667 } else {
668 glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
669 env->crf[0] = xer_so | 0x02;
672 env->reserve = -1;
673 RETURN();
676 #if defined(TARGET_PPC64)
677 void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
679 if (unlikely(T0 & 0x03)) {
680 do_raise_exception(POWERPC_EXCP_ALIGN);
681 } else {
682 if (unlikely(env->reserve != (uint64_t)T0)) {
683 env->crf[0] = xer_so;
684 } else {
685 glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
686 env->crf[0] = xer_so | 0x02;
689 env->reserve = -1;
690 RETURN();
693 void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)
695 if (unlikely(T0 & 0x03)) {
696 do_raise_exception(POWERPC_EXCP_ALIGN);
697 } else {
698 if (unlikely(env->reserve != (uint32_t)T0)) {
699 env->crf[0] = xer_so;
700 } else {
701 glue(stq, MEMSUFFIX)((uint32_t)T0, T1);
702 env->crf[0] = xer_so | 0x02;
705 env->reserve = -1;
706 RETURN();
709 void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
711 if (unlikely(T0 & 0x03)) {
712 do_raise_exception(POWERPC_EXCP_ALIGN);
713 } else {
714 if (unlikely(env->reserve != (uint64_t)T0)) {
715 env->crf[0] = xer_so;
716 } else {
717 glue(stq, MEMSUFFIX)((uint64_t)T0, T1);
718 env->crf[0] = xer_so | 0x02;
721 env->reserve = -1;
722 RETURN();
724 #endif
726 void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
728 if (unlikely(T0 & 0x03)) {
729 do_raise_exception(POWERPC_EXCP_ALIGN);
730 } else {
731 if (unlikely(env->reserve != (uint32_t)T0)) {
732 env->crf[0] = xer_so;
733 } else {
734 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
735 env->crf[0] = xer_so | 0x02;
738 env->reserve = -1;
739 RETURN();
742 #if defined(TARGET_PPC64)
743 void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
745 if (unlikely(T0 & 0x03)) {
746 do_raise_exception(POWERPC_EXCP_ALIGN);
747 } else {
748 if (unlikely(env->reserve != (uint64_t)T0)) {
749 env->crf[0] = xer_so;
750 } else {
751 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
752 env->crf[0] = xer_so | 0x02;
755 env->reserve = -1;
756 RETURN();
759 void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)
761 if (unlikely(T0 & 0x03)) {
762 do_raise_exception(POWERPC_EXCP_ALIGN);
763 } else {
764 if (unlikely(env->reserve != (uint32_t)T0)) {
765 env->crf[0] = xer_so;
766 } else {
767 glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);
768 env->crf[0] = xer_so | 0x02;
771 env->reserve = -1;
772 RETURN();
775 void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
777 if (unlikely(T0 & 0x03)) {
778 do_raise_exception(POWERPC_EXCP_ALIGN);
779 } else {
780 if (unlikely(env->reserve != (uint64_t)T0)) {
781 env->crf[0] = xer_so;
782 } else {
783 glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
784 env->crf[0] = xer_so | 0x02;
787 env->reserve = -1;
788 RETURN();
790 #endif
792 void OPPROTO glue(op_dcbz_l32, MEMSUFFIX) (void)
794 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
795 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
796 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
797 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
798 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
799 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
800 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
801 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
802 RETURN();
805 void OPPROTO glue(op_dcbz_l64, MEMSUFFIX) (void)
807 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
808 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
809 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
810 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
811 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
812 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
813 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
814 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
815 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
816 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
817 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
818 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
819 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
820 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
821 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
822 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
823 RETURN();
826 void OPPROTO glue(op_dcbz_l128, MEMSUFFIX) (void)
828 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
829 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
830 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
831 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
832 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
833 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
834 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
835 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
836 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
837 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
838 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
839 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
840 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
841 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
842 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
843 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
844 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x40UL), 0);
845 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x44UL), 0);
846 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x48UL), 0);
847 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x4CUL), 0);
848 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x50UL), 0);
849 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x54UL), 0);
850 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x58UL), 0);
851 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x5CUL), 0);
852 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x60UL), 0);
853 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x64UL), 0);
854 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x68UL), 0);
855 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x6CUL), 0);
856 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x70UL), 0);
857 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x74UL), 0);
858 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x78UL), 0);
859 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x7CUL), 0);
860 RETURN();
863 void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
865 glue(do_dcbz, MEMSUFFIX)();
866 RETURN();
869 #if defined(TARGET_PPC64)
870 void OPPROTO glue(op_dcbz_l32_64, MEMSUFFIX) (void)
872 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
873 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
874 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
875 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
876 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
877 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
878 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
879 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
880 RETURN();
883 void OPPROTO glue(op_dcbz_l64_64, MEMSUFFIX) (void)
885 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
886 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
887 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
888 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
889 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
890 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
891 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
892 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
893 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
894 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
895 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
896 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
897 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
898 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
899 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
900 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
901 RETURN();
904 void OPPROTO glue(op_dcbz_l128_64, MEMSUFFIX) (void)
906 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
907 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
908 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
909 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
910 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
911 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
912 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
913 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
914 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
915 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
916 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
917 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
918 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
919 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
920 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
921 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
922 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x40UL), 0);
923 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x44UL), 0);
924 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x48UL), 0);
925 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x4CUL), 0);
926 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x50UL), 0);
927 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x54UL), 0);
928 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x58UL), 0);
929 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x5CUL), 0);
930 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x60UL), 0);
931 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x64UL), 0);
932 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x68UL), 0);
933 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x6CUL), 0);
934 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x70UL), 0);
935 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x74UL), 0);
936 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x78UL), 0);
937 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x7CUL), 0);
938 RETURN();
941 void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
943 glue(do_dcbz_64, MEMSUFFIX)();
944 RETURN();
946 #endif
948 /* Instruction cache block invalidate */
949 void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
951 glue(do_icbi, MEMSUFFIX)();
952 RETURN();
955 #if defined(TARGET_PPC64)
956 void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
958 glue(do_icbi_64, MEMSUFFIX)();
959 RETURN();
961 #endif
963 /* External access */
964 void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
966 T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
967 RETURN();
970 #if defined(TARGET_PPC64)
971 void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
973 T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
974 RETURN();
976 #endif
978 void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
980 glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
981 RETURN();
984 #if defined(TARGET_PPC64)
985 void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
987 glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
988 RETURN();
990 #endif
992 void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
994 T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
995 RETURN();
998 #if defined(TARGET_PPC64)
999 void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
1001 T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
1002 RETURN();
1004 #endif
1006 void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
1008 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
1009 RETURN();
1012 #if defined(TARGET_PPC64)
1013 void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
1015 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
1016 RETURN();
1018 #endif
1020 /* XXX: those micro-ops need tests ! */
1021 /* PowerPC 601 specific instructions (POWER bridge) */
1022 void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
1024 /* When byte count is 0, do nothing */
1025 if (likely(T1 != 0)) {
1026 glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
1028 RETURN();
1031 /* POWER2 quad load and store */
1032 /* XXX: TAGs are not managed */
1033 void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
1035 glue(do_POWER2_lfq, MEMSUFFIX)();
1036 RETURN();
1039 void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
1041 glue(do_POWER2_lfq_le, MEMSUFFIX)();
1042 RETURN();
1045 void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
1047 glue(do_POWER2_stfq, MEMSUFFIX)();
1048 RETURN();
1051 void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
1053 glue(do_POWER2_stfq_le, MEMSUFFIX)();
1054 RETURN();
1057 /* Altivec vector extension */
1058 #if defined(WORDS_BIGENDIAN)
1059 #define VR_DWORD0 0
1060 #define VR_DWORD1 1
1061 #else
1062 #define VR_DWORD0 1
1063 #define VR_DWORD1 0
1064 #endif
1065 void OPPROTO glue(op_vr_lvx, MEMSUFFIX) (void)
1067 AVR0.u64[VR_DWORD0] = glue(ldq, MEMSUFFIX)((uint32_t)T0);
1068 AVR0.u64[VR_DWORD1] = glue(ldq, MEMSUFFIX)((uint32_t)T0 + 8);
1071 void OPPROTO glue(op_vr_lvx_le, MEMSUFFIX) (void)
1073 AVR0.u64[VR_DWORD1] = glue(ldq, MEMSUFFIX)((uint32_t)T0);
1074 AVR0.u64[VR_DWORD0] = glue(ldq, MEMSUFFIX)((uint32_t)T0 + 8);
1077 void OPPROTO glue(op_vr_stvx, MEMSUFFIX) (void)
1079 glue(stq, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD0]);
1080 glue(stq, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD1]);
1083 void OPPROTO glue(op_vr_stvx_le, MEMSUFFIX) (void)
1085 glue(stq, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD1]);
1086 glue(stq, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD0]);
1089 #if defined(TARGET_PPC64)
1090 void OPPROTO glue(op_vr_lvx_64, MEMSUFFIX) (void)
1092 AVR0.u64[VR_DWORD0] = glue(ldq, MEMSUFFIX)((uint64_t)T0);
1093 AVR0.u64[VR_DWORD1] = glue(ldq, MEMSUFFIX)((uint64_t)T0 + 8);
1096 void OPPROTO glue(op_vr_lvx_le_64, MEMSUFFIX) (void)
1098 AVR0.u64[VR_DWORD1] = glue(ldq, MEMSUFFIX)((uint64_t)T0);
1099 AVR0.u64[VR_DWORD0] = glue(ldq, MEMSUFFIX)((uint64_t)T0 + 8);
1102 void OPPROTO glue(op_vr_stvx_64, MEMSUFFIX) (void)
1104 glue(stq, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD0]);
1105 glue(stq, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD1]);
1108 void OPPROTO glue(op_vr_stvx_le_64, MEMSUFFIX) (void)
1110 glue(stq, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD1]);
1111 glue(stq, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD0]);
1113 #endif
1114 #undef VR_DWORD0
1115 #undef VR_DWORD1
1117 #if defined(TARGET_PPCEMB)
1118 /* SPE extension */
1119 #define _PPC_SPE_LD_OP(name, op) \
1120 void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void) \
1122 T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0); \
1123 RETURN(); \
1126 #if defined(TARGET_PPC64)
1127 #define _PPC_SPE_LD_OP_64(name, op) \
1128 void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void) \
1130 T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0); \
1131 RETURN(); \
1133 #define PPC_SPE_LD_OP(name, op) \
1134 _PPC_SPE_LD_OP(name, op); \
1135 _PPC_SPE_LD_OP_64(name, op)
1136 #else
1137 #define PPC_SPE_LD_OP(name, op) \
1138 _PPC_SPE_LD_OP(name, op)
1139 #endif
1141 #define _PPC_SPE_ST_OP(name, op) \
1142 void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void) \
1144 glue(op, MEMSUFFIX)((uint32_t)T0, T1_64); \
1145 RETURN(); \
1148 #if defined(TARGET_PPC64)
1149 #define _PPC_SPE_ST_OP_64(name, op) \
1150 void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void) \
1152 glue(op, MEMSUFFIX)((uint64_t)T0, T1_64); \
1153 RETURN(); \
1155 #define PPC_SPE_ST_OP(name, op) \
1156 _PPC_SPE_ST_OP(name, op); \
1157 _PPC_SPE_ST_OP_64(name, op)
1158 #else
1159 #define PPC_SPE_ST_OP(name, op) \
1160 _PPC_SPE_ST_OP(name, op)
1161 #endif
1163 #if !defined(TARGET_PPC64)
1164 PPC_SPE_LD_OP(dd, ldq);
1165 PPC_SPE_ST_OP(dd, stq);
1166 PPC_SPE_LD_OP(dd_le, ld64r);
1167 PPC_SPE_ST_OP(dd_le, st64r);
1168 #endif
1169 static always_inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
1171 uint64_t ret;
1172 ret = (uint64_t)glue(ldl, MEMSUFFIX)(EA) << 32;
1173 ret |= (uint64_t)glue(ldl, MEMSUFFIX)(EA + 4);
1174 return ret;
1176 PPC_SPE_LD_OP(dw, spe_ldw);
1177 static always_inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA,
1178 uint64_t data)
1180 glue(stl, MEMSUFFIX)(EA, data >> 32);
1181 glue(stl, MEMSUFFIX)(EA + 4, data);
1183 PPC_SPE_ST_OP(dw, spe_stdw);
1184 static always_inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
1186 uint64_t ret;
1187 ret = (uint64_t)glue(ld32r, MEMSUFFIX)(EA) << 32;
1188 ret |= (uint64_t)glue(ld32r, MEMSUFFIX)(EA + 4);
1189 return ret;
1191 PPC_SPE_LD_OP(dw_le, spe_ldw_le);
1192 static always_inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
1193 uint64_t data)
1195 glue(st32r, MEMSUFFIX)(EA, data >> 32);
1196 glue(st32r, MEMSUFFIX)(EA + 4, data);
1198 PPC_SPE_ST_OP(dw_le, spe_stdw_le);
1199 static always_inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
1201 uint64_t ret;
1202 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1203 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 32;
1204 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 4) << 16;
1205 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 6);
1206 return ret;
1208 PPC_SPE_LD_OP(dh, spe_ldh);
1209 static always_inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA,
1210 uint64_t data)
1212 glue(stw, MEMSUFFIX)(EA, data >> 48);
1213 glue(stw, MEMSUFFIX)(EA + 2, data >> 32);
1214 glue(stw, MEMSUFFIX)(EA + 4, data >> 16);
1215 glue(stw, MEMSUFFIX)(EA + 6, data);
1217 PPC_SPE_ST_OP(dh, spe_stdh);
1218 static always_inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
1220 uint64_t ret;
1221 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1222 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 32;
1223 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 4) << 16;
1224 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 6);
1225 return ret;
1227 PPC_SPE_LD_OP(dh_le, spe_ldh_le);
1228 static always_inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1229 uint64_t data)
1231 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1232 glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
1233 glue(st16r, MEMSUFFIX)(EA + 4, data >> 16);
1234 glue(st16r, MEMSUFFIX)(EA + 6, data);
1236 PPC_SPE_ST_OP(dh_le, spe_stdh_le);
1237 static always_inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1239 uint64_t ret;
1240 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1241 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 16;
1242 return ret;
1244 PPC_SPE_LD_OP(whe, spe_lwhe);
1245 static always_inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA,
1246 uint64_t data)
1248 glue(stw, MEMSUFFIX)(EA, data >> 48);
1249 glue(stw, MEMSUFFIX)(EA + 2, data >> 16);
1251 PPC_SPE_ST_OP(whe, spe_stwhe);
1252 static always_inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1254 uint64_t ret;
1255 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1256 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 16;
1257 return ret;
1259 PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
1260 static always_inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
1261 uint64_t data)
1263 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1264 glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
1266 PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
1267 static always_inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1269 uint64_t ret;
1270 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 32;
1271 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2);
1272 return ret;
1274 PPC_SPE_LD_OP(whou, spe_lwhou);
1275 static always_inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1277 uint64_t ret;
1278 ret = ((uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA))) << 32;
1279 ret |= (uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA + 2));
1280 return ret;
1282 PPC_SPE_LD_OP(whos, spe_lwhos);
1283 static always_inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA,
1284 uint64_t data)
1286 glue(stw, MEMSUFFIX)(EA, data >> 32);
1287 glue(stw, MEMSUFFIX)(EA + 2, data);
1289 PPC_SPE_ST_OP(who, spe_stwho);
1290 static always_inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1292 uint64_t ret;
1293 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 32;
1294 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2);
1295 return ret;
1297 PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1298 static always_inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1300 uint64_t ret;
1301 ret = ((uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA))) << 32;
1302 ret |= (uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA + 2));
1303 return ret;
1305 PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1306 static always_inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1307 uint64_t data)
1309 glue(st16r, MEMSUFFIX)(EA, data >> 32);
1310 glue(st16r, MEMSUFFIX)(EA + 2, data);
1312 PPC_SPE_ST_OP(who_le, spe_stwho_le);
1313 #if !defined(TARGET_PPC64)
1314 static always_inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA,
1315 uint64_t data)
1317 glue(stl, MEMSUFFIX)(EA, data);
1319 PPC_SPE_ST_OP(wwo, spe_stwwo);
1320 static always_inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1321 uint64_t data)
1323 glue(st32r, MEMSUFFIX)(EA, data);
1325 PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1326 #endif
1327 static always_inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1329 uint16_t tmp;
1330 tmp = glue(lduw, MEMSUFFIX)(EA);
1331 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1333 PPC_SPE_LD_OP(h, spe_lh);
1334 static always_inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1336 uint16_t tmp;
1337 tmp = glue(ld16r, MEMSUFFIX)(EA);
1338 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1340 PPC_SPE_LD_OP(h_le, spe_lh_le);
1341 static always_inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1343 uint32_t tmp;
1344 tmp = glue(ldl, MEMSUFFIX)(EA);
1345 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1347 PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1348 static always_inline
1349 uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1351 uint32_t tmp;
1352 tmp = glue(ld32r, MEMSUFFIX)(EA);
1353 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1355 PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1356 static always_inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1358 uint64_t ret;
1359 uint16_t tmp;
1360 tmp = glue(lduw, MEMSUFFIX)(EA);
1361 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1362 tmp = glue(lduw, MEMSUFFIX)(EA + 2);
1363 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1364 return ret;
1366 PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1367 static always_inline
1368 uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1370 uint64_t ret;
1371 uint16_t tmp;
1372 tmp = glue(ld16r, MEMSUFFIX)(EA);
1373 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1374 tmp = glue(ld16r, MEMSUFFIX)(EA + 2);
1375 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1376 return ret;
1378 PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le);
1379 #endif /* defined(TARGET_PPCEMB) */
1381 #undef MEMSUFFIX