Change -drive parsing so that paths don't have to be double-escaped (Laurent Vivier...
[qemu/qemu_0_9_1_stable.git] / target-ppc / op_mem.h
blob16dd4ceebf416e527bcb6330743b10d01d746415
1 /*
2 * PowerPC emulation micro-operations for qemu.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "op_mem_access.h"
23 /*** Integer load ***/
24 #define PPC_LD_OP(name, op) \
25 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
26 { \
27 T1 = glue(op, MEMSUFFIX)((uint32_t)T0); \
28 RETURN(); \
31 #if defined(TARGET_PPC64)
32 #define PPC_LD_OP_64(name, op) \
33 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
34 { \
35 T1 = glue(op, MEMSUFFIX)((uint64_t)T0); \
36 RETURN(); \
38 #endif
40 #define PPC_ST_OP(name, op) \
41 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
42 { \
43 glue(op, MEMSUFFIX)((uint32_t)T0, T1); \
44 RETURN(); \
47 #if defined(TARGET_PPC64)
48 #define PPC_ST_OP_64(name, op) \
49 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
50 { \
51 glue(op, MEMSUFFIX)((uint64_t)T0, T1); \
52 RETURN(); \
54 #endif
56 PPC_LD_OP(bz, ldu8);
57 PPC_LD_OP(ha, lds16);
58 PPC_LD_OP(hz, ldu16);
59 PPC_LD_OP(wz, ldu32);
60 #if defined(TARGET_PPC64)
61 PPC_LD_OP(wa, lds32);
62 PPC_LD_OP(d, ldu64);
63 PPC_LD_OP_64(bz, ldu8);
64 PPC_LD_OP_64(ha, lds16);
65 PPC_LD_OP_64(hz, ldu16);
66 PPC_LD_OP_64(wz, ldu32);
67 PPC_LD_OP_64(wa, lds32);
68 PPC_LD_OP_64(d, ldu64);
69 #endif
71 PPC_LD_OP(ha_le, lds16r);
72 PPC_LD_OP(hz_le, ldu16r);
73 PPC_LD_OP(wz_le, ldu32r);
74 #if defined(TARGET_PPC64)
75 PPC_LD_OP(wa_le, lds32r);
76 PPC_LD_OP(d_le, ldu64r);
77 PPC_LD_OP_64(ha_le, lds16r);
78 PPC_LD_OP_64(hz_le, ldu16r);
79 PPC_LD_OP_64(wz_le, ldu32r);
80 PPC_LD_OP_64(wa_le, lds32r);
81 PPC_LD_OP_64(d_le, ldu64r);
82 #endif
84 /*** Integer store ***/
85 PPC_ST_OP(b, st8);
86 PPC_ST_OP(h, st16);
87 PPC_ST_OP(w, st32);
88 #if defined(TARGET_PPC64)
89 PPC_ST_OP(d, st64);
90 PPC_ST_OP_64(b, st8);
91 PPC_ST_OP_64(h, st16);
92 PPC_ST_OP_64(w, st32);
93 PPC_ST_OP_64(d, st64);
94 #endif
96 PPC_ST_OP(h_le, st16r);
97 PPC_ST_OP(w_le, st32r);
98 #if defined(TARGET_PPC64)
99 PPC_ST_OP(d_le, st64r);
100 PPC_ST_OP_64(h_le, st16r);
101 PPC_ST_OP_64(w_le, st32r);
102 PPC_ST_OP_64(d_le, st64r);
103 #endif
105 /*** Integer load and store with byte reverse ***/
106 PPC_LD_OP(hbr, ldu16r);
107 PPC_LD_OP(wbr, ldu32r);
108 PPC_ST_OP(hbr, st16r);
109 PPC_ST_OP(wbr, st32r);
110 #if defined(TARGET_PPC64)
111 PPC_LD_OP_64(hbr, ldu16r);
112 PPC_LD_OP_64(wbr, ldu32r);
113 PPC_ST_OP_64(hbr, st16r);
114 PPC_ST_OP_64(wbr, st32r);
115 #endif
117 PPC_LD_OP(hbr_le, ldu16);
118 PPC_LD_OP(wbr_le, ldu32);
119 PPC_ST_OP(hbr_le, st16);
120 PPC_ST_OP(wbr_le, st32);
121 #if defined(TARGET_PPC64)
122 PPC_LD_OP_64(hbr_le, ldu16);
123 PPC_LD_OP_64(wbr_le, ldu32);
124 PPC_ST_OP_64(hbr_le, st16);
125 PPC_ST_OP_64(wbr_le, st32);
126 #endif
128 /*** Integer load and store multiple ***/
129 void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
131 glue(do_lmw, MEMSUFFIX)(PARAM1);
132 RETURN();
135 #if defined(TARGET_PPC64)
136 void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
138 glue(do_lmw_64, MEMSUFFIX)(PARAM1);
139 RETURN();
141 #endif
143 void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
145 glue(do_lmw_le, MEMSUFFIX)(PARAM1);
146 RETURN();
149 #if defined(TARGET_PPC64)
150 void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
152 glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
153 RETURN();
155 #endif
157 void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
159 glue(do_stmw, MEMSUFFIX)(PARAM1);
160 RETURN();
163 #if defined(TARGET_PPC64)
164 void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
166 glue(do_stmw_64, MEMSUFFIX)(PARAM1);
167 RETURN();
169 #endif
171 void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
173 glue(do_stmw_le, MEMSUFFIX)(PARAM1);
174 RETURN();
177 #if defined(TARGET_PPC64)
178 void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
180 glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
181 RETURN();
183 #endif
185 /*** Integer load and store strings ***/
186 void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
188 glue(do_lsw, MEMSUFFIX)(PARAM1);
189 RETURN();
192 #if defined(TARGET_PPC64)
193 void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
195 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
196 RETURN();
198 #endif
200 /* PPC32 specification says we must generate an exception if
201 * rA is in the range of registers to be loaded.
202 * In an other hand, IBM says this is valid, but rA won't be loaded.
203 * For now, I'll follow the spec...
205 void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
207 /* Note: T1 comes from xer_bc then no cast is needed */
208 if (likely(T1 != 0)) {
209 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
210 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
211 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
212 POWERPC_EXCP_INVAL |
213 POWERPC_EXCP_INVAL_LSWX);
214 } else {
215 glue(do_lsw, MEMSUFFIX)(PARAM1);
218 RETURN();
221 #if defined(TARGET_PPC64)
222 void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
224 /* Note: T1 comes from xer_bc then no cast is needed */
225 if (likely(T1 != 0)) {
226 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
227 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
228 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
229 POWERPC_EXCP_INVAL |
230 POWERPC_EXCP_INVAL_LSWX);
231 } else {
232 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
235 RETURN();
237 #endif
239 void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
241 glue(do_stsw, MEMSUFFIX)(PARAM1);
242 RETURN();
245 #if defined(TARGET_PPC64)
246 void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
248 glue(do_stsw_64, MEMSUFFIX)(PARAM1);
249 RETURN();
251 #endif
253 /*** Floating-point store ***/
254 #define PPC_STF_OP(name, op) \
255 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
257 glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \
258 RETURN(); \
261 #if defined(TARGET_PPC64)
262 #define PPC_STF_OP_64(name, op) \
263 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
265 glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \
266 RETURN(); \
268 #endif
270 static always_inline void glue(stfs, MEMSUFFIX) (target_ulong EA, double d)
272 glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status));
275 #if defined(WORDS_BIGENDIAN)
276 #define WORD0 0
277 #define WORD1 1
278 #else
279 #define WORD0 1
280 #define WORD1 0
281 #endif
282 static always_inline void glue(stfiw, MEMSUFFIX) (target_ulong EA, double d)
284 union {
285 double d;
286 uint32_t u[2];
287 } u;
289 /* Store the low order 32 bits without any conversion */
290 u.d = d;
291 glue(st32, MEMSUFFIX)(EA, u.u[WORD0]);
293 #undef WORD0
294 #undef WORD1
296 PPC_STF_OP(fd, stfq);
297 PPC_STF_OP(fs, stfs);
298 PPC_STF_OP(fiw, stfiw);
299 #if defined(TARGET_PPC64)
300 PPC_STF_OP_64(fd, stfq);
301 PPC_STF_OP_64(fs, stfs);
302 PPC_STF_OP_64(fiw, stfiw);
303 #endif
305 static always_inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
307 union {
308 double d;
309 uint64_t u;
310 } u;
312 u.d = d;
313 u.u = bswap64(u.u);
314 glue(stfq, MEMSUFFIX)(EA, u.d);
317 static always_inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d)
319 union {
320 float f;
321 uint32_t u;
322 } u;
324 u.f = float64_to_float32(d, &env->fp_status);
325 u.u = bswap32(u.u);
326 glue(stfl, MEMSUFFIX)(EA, u.f);
329 static always_inline void glue(stfiwr, MEMSUFFIX) (target_ulong EA, double d)
331 union {
332 double d;
333 uint64_t u;
334 } u;
336 /* Store the low order 32 bits without any conversion */
337 u.d = d;
338 u.u = bswap32(u.u);
339 glue(st32, MEMSUFFIX)(EA, u.u);
342 PPC_STF_OP(fd_le, stfqr);
343 PPC_STF_OP(fs_le, stfsr);
344 PPC_STF_OP(fiw_le, stfiwr);
345 #if defined(TARGET_PPC64)
346 PPC_STF_OP_64(fd_le, stfqr);
347 PPC_STF_OP_64(fs_le, stfsr);
348 PPC_STF_OP_64(fiw_le, stfiwr);
349 #endif
351 /*** Floating-point load ***/
352 #define PPC_LDF_OP(name, op) \
353 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
355 FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \
356 RETURN(); \
359 #if defined(TARGET_PPC64)
360 #define PPC_LDF_OP_64(name, op) \
361 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
363 FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \
364 RETURN(); \
366 #endif
368 static always_inline double glue(ldfs, MEMSUFFIX) (target_ulong EA)
370 return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status);
373 PPC_LDF_OP(fd, ldfq);
374 PPC_LDF_OP(fs, ldfs);
375 #if defined(TARGET_PPC64)
376 PPC_LDF_OP_64(fd, ldfq);
377 PPC_LDF_OP_64(fs, ldfs);
378 #endif
380 static always_inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
382 union {
383 double d;
384 uint64_t u;
385 } u;
387 u.d = glue(ldfq, MEMSUFFIX)(EA);
388 u.u = bswap64(u.u);
390 return u.d;
393 static always_inline double glue(ldfsr, MEMSUFFIX) (target_ulong EA)
395 union {
396 float f;
397 uint32_t u;
398 } u;
400 u.f = glue(ldfl, MEMSUFFIX)(EA);
401 u.u = bswap32(u.u);
403 return float32_to_float64(u.f, &env->fp_status);
406 PPC_LDF_OP(fd_le, ldfqr);
407 PPC_LDF_OP(fs_le, ldfsr);
408 #if defined(TARGET_PPC64)
409 PPC_LDF_OP_64(fd_le, ldfqr);
410 PPC_LDF_OP_64(fs_le, ldfsr);
411 #endif
413 /* Load and set reservation */
414 void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
416 if (unlikely(T0 & 0x03)) {
417 do_raise_exception(POWERPC_EXCP_ALIGN);
418 } else {
419 T1 = glue(ldu32, MEMSUFFIX)((uint32_t)T0);
420 env->reserve = (uint32_t)T0;
422 RETURN();
425 #if defined(TARGET_PPC64)
426 void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
428 if (unlikely(T0 & 0x03)) {
429 do_raise_exception(POWERPC_EXCP_ALIGN);
430 } else {
431 T1 = glue(ldu32, MEMSUFFIX)((uint64_t)T0);
432 env->reserve = (uint64_t)T0;
434 RETURN();
437 void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)
439 if (unlikely(T0 & 0x03)) {
440 do_raise_exception(POWERPC_EXCP_ALIGN);
441 } else {
442 T1 = glue(ldu64, MEMSUFFIX)((uint32_t)T0);
443 env->reserve = (uint32_t)T0;
445 RETURN();
448 void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
450 if (unlikely(T0 & 0x03)) {
451 do_raise_exception(POWERPC_EXCP_ALIGN);
452 } else {
453 T1 = glue(ldu64, MEMSUFFIX)((uint64_t)T0);
454 env->reserve = (uint64_t)T0;
456 RETURN();
458 #endif
460 void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
462 if (unlikely(T0 & 0x03)) {
463 do_raise_exception(POWERPC_EXCP_ALIGN);
464 } else {
465 T1 = glue(ldu32r, MEMSUFFIX)((uint32_t)T0);
466 env->reserve = (uint32_t)T0;
468 RETURN();
471 #if defined(TARGET_PPC64)
472 void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
474 if (unlikely(T0 & 0x03)) {
475 do_raise_exception(POWERPC_EXCP_ALIGN);
476 } else {
477 T1 = glue(ldu32r, MEMSUFFIX)((uint64_t)T0);
478 env->reserve = (uint64_t)T0;
480 RETURN();
483 void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)
485 if (unlikely(T0 & 0x03)) {
486 do_raise_exception(POWERPC_EXCP_ALIGN);
487 } else {
488 T1 = glue(ldu64r, MEMSUFFIX)((uint32_t)T0);
489 env->reserve = (uint32_t)T0;
491 RETURN();
494 void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
496 if (unlikely(T0 & 0x03)) {
497 do_raise_exception(POWERPC_EXCP_ALIGN);
498 } else {
499 T1 = glue(ldu64r, MEMSUFFIX)((uint64_t)T0);
500 env->reserve = (uint64_t)T0;
502 RETURN();
504 #endif
506 /* Store with reservation */
507 void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
509 if (unlikely(T0 & 0x03)) {
510 do_raise_exception(POWERPC_EXCP_ALIGN);
511 } else {
512 if (unlikely(env->reserve != (uint32_t)T0)) {
513 env->crf[0] = xer_so;
514 } else {
515 glue(st32, MEMSUFFIX)((uint32_t)T0, T1);
516 env->crf[0] = xer_so | 0x02;
519 env->reserve = (target_ulong)-1ULL;
520 RETURN();
523 #if defined(TARGET_PPC64)
524 void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
526 if (unlikely(T0 & 0x03)) {
527 do_raise_exception(POWERPC_EXCP_ALIGN);
528 } else {
529 if (unlikely(env->reserve != (uint64_t)T0)) {
530 env->crf[0] = xer_so;
531 } else {
532 glue(st32, MEMSUFFIX)((uint64_t)T0, T1);
533 env->crf[0] = xer_so | 0x02;
536 env->reserve = (target_ulong)-1ULL;
537 RETURN();
540 void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)
542 if (unlikely(T0 & 0x03)) {
543 do_raise_exception(POWERPC_EXCP_ALIGN);
544 } else {
545 if (unlikely(env->reserve != (uint32_t)T0)) {
546 env->crf[0] = xer_so;
547 } else {
548 glue(st64, MEMSUFFIX)((uint32_t)T0, T1);
549 env->crf[0] = xer_so | 0x02;
552 env->reserve = (target_ulong)-1ULL;
553 RETURN();
556 void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
558 if (unlikely(T0 & 0x03)) {
559 do_raise_exception(POWERPC_EXCP_ALIGN);
560 } else {
561 if (unlikely(env->reserve != (uint64_t)T0)) {
562 env->crf[0] = xer_so;
563 } else {
564 glue(st64, MEMSUFFIX)((uint64_t)T0, T1);
565 env->crf[0] = xer_so | 0x02;
568 env->reserve = (target_ulong)-1ULL;
569 RETURN();
571 #endif
573 void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
575 if (unlikely(T0 & 0x03)) {
576 do_raise_exception(POWERPC_EXCP_ALIGN);
577 } else {
578 if (unlikely(env->reserve != (uint32_t)T0)) {
579 env->crf[0] = xer_so;
580 } else {
581 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
582 env->crf[0] = xer_so | 0x02;
585 env->reserve = (target_ulong)-1ULL;
586 RETURN();
589 #if defined(TARGET_PPC64)
590 void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
592 if (unlikely(T0 & 0x03)) {
593 do_raise_exception(POWERPC_EXCP_ALIGN);
594 } else {
595 if (unlikely(env->reserve != (uint64_t)T0)) {
596 env->crf[0] = xer_so;
597 } else {
598 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
599 env->crf[0] = xer_so | 0x02;
602 env->reserve = (target_ulong)-1ULL;
603 RETURN();
606 void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)
608 if (unlikely(T0 & 0x03)) {
609 do_raise_exception(POWERPC_EXCP_ALIGN);
610 } else {
611 if (unlikely(env->reserve != (uint32_t)T0)) {
612 env->crf[0] = xer_so;
613 } else {
614 glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);
615 env->crf[0] = xer_so | 0x02;
618 env->reserve = (target_ulong)-1ULL;
619 RETURN();
622 void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
624 if (unlikely(T0 & 0x03)) {
625 do_raise_exception(POWERPC_EXCP_ALIGN);
626 } else {
627 if (unlikely(env->reserve != (uint64_t)T0)) {
628 env->crf[0] = xer_so;
629 } else {
630 glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
631 env->crf[0] = xer_so | 0x02;
634 env->reserve = (target_ulong)-1ULL;
635 RETURN();
637 #endif
639 void OPPROTO glue(op_dcbz_l32, MEMSUFFIX) (void)
641 T0 &= ~((uint32_t)31);
642 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
643 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
644 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
645 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
646 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
647 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
648 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
649 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
650 RETURN();
653 void OPPROTO glue(op_dcbz_l64, MEMSUFFIX) (void)
655 T0 &= ~((uint32_t)63);
656 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
657 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
658 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
659 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
660 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
661 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
662 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
663 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
664 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
665 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
666 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
667 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
668 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
669 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
670 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
671 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
672 RETURN();
675 void OPPROTO glue(op_dcbz_l128, MEMSUFFIX) (void)
677 T0 &= ~((uint32_t)127);
678 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
679 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
680 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
681 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
682 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
683 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
684 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
685 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
686 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
687 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
688 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
689 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
690 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
691 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
692 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
693 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
694 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x40UL), 0);
695 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x44UL), 0);
696 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x48UL), 0);
697 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x4CUL), 0);
698 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x50UL), 0);
699 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x54UL), 0);
700 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x58UL), 0);
701 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x5CUL), 0);
702 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x60UL), 0);
703 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x64UL), 0);
704 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x68UL), 0);
705 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x6CUL), 0);
706 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x70UL), 0);
707 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x74UL), 0);
708 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x78UL), 0);
709 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x7CUL), 0);
710 RETURN();
713 void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
715 glue(do_dcbz, MEMSUFFIX)();
716 RETURN();
719 #if defined(TARGET_PPC64)
720 void OPPROTO glue(op_dcbz_l32_64, MEMSUFFIX) (void)
722 T0 &= ~((uint64_t)31);
723 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
724 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
725 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
726 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
727 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
728 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
729 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
730 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
731 RETURN();
734 void OPPROTO glue(op_dcbz_l64_64, MEMSUFFIX) (void)
736 T0 &= ~((uint64_t)63);
737 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
738 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
739 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
740 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
741 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
742 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
743 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
744 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
745 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
746 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
747 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
748 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
749 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
750 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
751 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
752 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
753 RETURN();
756 void OPPROTO glue(op_dcbz_l128_64, MEMSUFFIX) (void)
758 T0 &= ~((uint64_t)127);
759 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
760 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
761 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
762 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
763 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
764 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
765 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
766 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
767 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
768 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
769 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
770 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
771 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
772 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
773 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
774 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
775 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x40UL), 0);
776 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x44UL), 0);
777 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x48UL), 0);
778 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x4CUL), 0);
779 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x50UL), 0);
780 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x54UL), 0);
781 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x58UL), 0);
782 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x5CUL), 0);
783 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x60UL), 0);
784 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x64UL), 0);
785 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x68UL), 0);
786 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x6CUL), 0);
787 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x70UL), 0);
788 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x74UL), 0);
789 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x78UL), 0);
790 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x7CUL), 0);
791 RETURN();
794 void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
796 glue(do_dcbz_64, MEMSUFFIX)();
797 RETURN();
799 #endif
801 /* Instruction cache block invalidate */
802 void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
804 glue(do_icbi, MEMSUFFIX)();
805 RETURN();
808 #if defined(TARGET_PPC64)
809 void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
811 glue(do_icbi_64, MEMSUFFIX)();
812 RETURN();
814 #endif
816 /* External access */
817 void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
819 T1 = glue(ldu32, MEMSUFFIX)((uint32_t)T0);
820 RETURN();
823 #if defined(TARGET_PPC64)
824 void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
826 T1 = glue(ldu32, MEMSUFFIX)((uint64_t)T0);
827 RETURN();
829 #endif
831 void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
833 glue(st32, MEMSUFFIX)((uint32_t)T0, T1);
834 RETURN();
837 #if defined(TARGET_PPC64)
838 void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
840 glue(st32, MEMSUFFIX)((uint64_t)T0, T1);
841 RETURN();
843 #endif
845 void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
847 T1 = glue(ldu32r, MEMSUFFIX)((uint32_t)T0);
848 RETURN();
851 #if defined(TARGET_PPC64)
852 void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
854 T1 = glue(ldu32r, MEMSUFFIX)((uint64_t)T0);
855 RETURN();
857 #endif
859 void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
861 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
862 RETURN();
865 #if defined(TARGET_PPC64)
866 void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
868 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
869 RETURN();
871 #endif
873 /* XXX: those micro-ops need tests ! */
874 /* PowerPC 601 specific instructions (POWER bridge) */
875 void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
877 /* When byte count is 0, do nothing */
878 if (likely(T1 != 0)) {
879 glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
881 RETURN();
884 /* POWER2 quad load and store */
885 /* XXX: TAGs are not managed */
886 void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
888 glue(do_POWER2_lfq, MEMSUFFIX)();
889 RETURN();
892 void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
894 glue(do_POWER2_lfq_le, MEMSUFFIX)();
895 RETURN();
898 void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
900 glue(do_POWER2_stfq, MEMSUFFIX)();
901 RETURN();
904 void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
906 glue(do_POWER2_stfq_le, MEMSUFFIX)();
907 RETURN();
910 /* Altivec vector extension */
911 #if defined(WORDS_BIGENDIAN)
912 #define VR_DWORD0 0
913 #define VR_DWORD1 1
914 #else
915 #define VR_DWORD0 1
916 #define VR_DWORD1 0
917 #endif
918 void OPPROTO glue(op_vr_lvx, MEMSUFFIX) (void)
920 AVR0.u64[VR_DWORD0] = glue(ldu64, MEMSUFFIX)((uint32_t)T0);
921 AVR0.u64[VR_DWORD1] = glue(ldu64, MEMSUFFIX)((uint32_t)T0 + 8);
924 void OPPROTO glue(op_vr_lvx_le, MEMSUFFIX) (void)
926 AVR0.u64[VR_DWORD1] = glue(ldu64r, MEMSUFFIX)((uint32_t)T0);
927 AVR0.u64[VR_DWORD0] = glue(ldu64r, MEMSUFFIX)((uint32_t)T0 + 8);
930 void OPPROTO glue(op_vr_stvx, MEMSUFFIX) (void)
932 glue(st64, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD0]);
933 glue(st64, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD1]);
936 void OPPROTO glue(op_vr_stvx_le, MEMSUFFIX) (void)
938 glue(st64r, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD1]);
939 glue(st64r, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD0]);
942 #if defined(TARGET_PPC64)
943 void OPPROTO glue(op_vr_lvx_64, MEMSUFFIX) (void)
945 AVR0.u64[VR_DWORD0] = glue(ldu64, MEMSUFFIX)((uint64_t)T0);
946 AVR0.u64[VR_DWORD1] = glue(ldu64, MEMSUFFIX)((uint64_t)T0 + 8);
949 void OPPROTO glue(op_vr_lvx_le_64, MEMSUFFIX) (void)
951 AVR0.u64[VR_DWORD1] = glue(ldu64r, MEMSUFFIX)((uint64_t)T0);
952 AVR0.u64[VR_DWORD0] = glue(ldu64r, MEMSUFFIX)((uint64_t)T0 + 8);
955 void OPPROTO glue(op_vr_stvx_64, MEMSUFFIX) (void)
957 glue(st64, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD0]);
958 glue(st64, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD1]);
961 void OPPROTO glue(op_vr_stvx_le_64, MEMSUFFIX) (void)
963 glue(st64r, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD1]);
964 glue(st64r, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD0]);
966 #endif
967 #undef VR_DWORD0
968 #undef VR_DWORD1
970 /* SPE extension */
971 #define _PPC_SPE_LD_OP(name, op) \
972 void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void) \
974 T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0); \
975 RETURN(); \
978 #if defined(TARGET_PPC64)
979 #define _PPC_SPE_LD_OP_64(name, op) \
980 void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void) \
982 T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0); \
983 RETURN(); \
985 #define PPC_SPE_LD_OP(name, op) \
986 _PPC_SPE_LD_OP(name, op); \
987 _PPC_SPE_LD_OP_64(name, op)
988 #else
989 #define PPC_SPE_LD_OP(name, op) \
990 _PPC_SPE_LD_OP(name, op)
991 #endif
993 #define _PPC_SPE_ST_OP(name, op) \
994 void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void) \
996 glue(op, MEMSUFFIX)((uint32_t)T0, T1_64); \
997 RETURN(); \
1000 #if defined(TARGET_PPC64)
1001 #define _PPC_SPE_ST_OP_64(name, op) \
1002 void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void) \
1004 glue(op, MEMSUFFIX)((uint64_t)T0, T1_64); \
1005 RETURN(); \
1007 #define PPC_SPE_ST_OP(name, op) \
1008 _PPC_SPE_ST_OP(name, op); \
1009 _PPC_SPE_ST_OP_64(name, op)
1010 #else
1011 #define PPC_SPE_ST_OP(name, op) \
1012 _PPC_SPE_ST_OP(name, op)
1013 #endif
1015 #if !defined(TARGET_PPC64)
1016 PPC_SPE_LD_OP(dd, ldu64);
1017 PPC_SPE_ST_OP(dd, st64);
1018 PPC_SPE_LD_OP(dd_le, ldu64r);
1019 PPC_SPE_ST_OP(dd_le, st64r);
1020 #endif
1021 static always_inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
1023 uint64_t ret;
1024 ret = (uint64_t)glue(ldu32, MEMSUFFIX)(EA) << 32;
1025 ret |= (uint64_t)glue(ldu32, MEMSUFFIX)(EA + 4);
1026 return ret;
1028 PPC_SPE_LD_OP(dw, spe_ldw);
1029 static always_inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA,
1030 uint64_t data)
1032 glue(st32, MEMSUFFIX)(EA, data >> 32);
1033 glue(st32, MEMSUFFIX)(EA + 4, data);
1035 PPC_SPE_ST_OP(dw, spe_stdw);
1036 static always_inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
1038 uint64_t ret;
1039 ret = (uint64_t)glue(ldu32r, MEMSUFFIX)(EA) << 32;
1040 ret |= (uint64_t)glue(ldu32r, MEMSUFFIX)(EA + 4);
1041 return ret;
1043 PPC_SPE_LD_OP(dw_le, spe_ldw_le);
1044 static always_inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
1045 uint64_t data)
1047 glue(st32r, MEMSUFFIX)(EA, data >> 32);
1048 glue(st32r, MEMSUFFIX)(EA + 4, data);
1050 PPC_SPE_ST_OP(dw_le, spe_stdw_le);
1051 static always_inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
1053 uint64_t ret;
1054 ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 48;
1055 ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2) << 32;
1056 ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 4) << 16;
1057 ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 6);
1058 return ret;
1060 PPC_SPE_LD_OP(dh, spe_ldh);
1061 static always_inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA,
1062 uint64_t data)
1064 glue(st16, MEMSUFFIX)(EA, data >> 48);
1065 glue(st16, MEMSUFFIX)(EA + 2, data >> 32);
1066 glue(st16, MEMSUFFIX)(EA + 4, data >> 16);
1067 glue(st16, MEMSUFFIX)(EA + 6, data);
1069 PPC_SPE_ST_OP(dh, spe_stdh);
1070 static always_inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
1072 uint64_t ret;
1073 ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 48;
1074 ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2) << 32;
1075 ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 4) << 16;
1076 ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 6);
1077 return ret;
1079 PPC_SPE_LD_OP(dh_le, spe_ldh_le);
1080 static always_inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1081 uint64_t data)
1083 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1084 glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
1085 glue(st16r, MEMSUFFIX)(EA + 4, data >> 16);
1086 glue(st16r, MEMSUFFIX)(EA + 6, data);
1088 PPC_SPE_ST_OP(dh_le, spe_stdh_le);
1089 static always_inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1091 uint64_t ret;
1092 ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 48;
1093 ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2) << 16;
1094 return ret;
1096 PPC_SPE_LD_OP(whe, spe_lwhe);
1097 static always_inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA,
1098 uint64_t data)
1100 glue(st16, MEMSUFFIX)(EA, data >> 48);
1101 glue(st16, MEMSUFFIX)(EA + 2, data >> 16);
1103 PPC_SPE_ST_OP(whe, spe_stwhe);
1104 static always_inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1106 uint64_t ret;
1107 ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 48;
1108 ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2) << 16;
1109 return ret;
1111 PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
1112 static always_inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
1113 uint64_t data)
1115 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1116 glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
1118 PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
1119 static always_inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1121 uint64_t ret;
1122 ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 32;
1123 ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2);
1124 return ret;
1126 PPC_SPE_LD_OP(whou, spe_lwhou);
1127 static always_inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1129 uint64_t ret;
1130 ret = ((uint64_t)((int32_t)glue(lds16, MEMSUFFIX)(EA))) << 32;
1131 ret |= (uint64_t)((int32_t)glue(lds16, MEMSUFFIX)(EA + 2));
1132 return ret;
1134 PPC_SPE_LD_OP(whos, spe_lwhos);
1135 static always_inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA,
1136 uint64_t data)
1138 glue(st16, MEMSUFFIX)(EA, data >> 32);
1139 glue(st16, MEMSUFFIX)(EA + 2, data);
1141 PPC_SPE_ST_OP(who, spe_stwho);
1142 static always_inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1144 uint64_t ret;
1145 ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 32;
1146 ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2);
1147 return ret;
1149 PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1150 static always_inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1152 uint64_t ret;
1153 ret = ((uint64_t)((int32_t)glue(lds16r, MEMSUFFIX)(EA))) << 32;
1154 ret |= (uint64_t)((int32_t)glue(lds16r, MEMSUFFIX)(EA + 2));
1155 return ret;
1157 PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1158 static always_inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1159 uint64_t data)
1161 glue(st16r, MEMSUFFIX)(EA, data >> 32);
1162 glue(st16r, MEMSUFFIX)(EA + 2, data);
1164 PPC_SPE_ST_OP(who_le, spe_stwho_le);
1165 #if !defined(TARGET_PPC64)
1166 static always_inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA,
1167 uint64_t data)
1169 glue(st32, MEMSUFFIX)(EA, data);
1171 PPC_SPE_ST_OP(wwo, spe_stwwo);
1172 static always_inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1173 uint64_t data)
1175 glue(st32r, MEMSUFFIX)(EA, data);
1177 PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1178 #endif
1179 static always_inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1181 uint16_t tmp;
1182 tmp = glue(ldu16, MEMSUFFIX)(EA);
1183 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1185 PPC_SPE_LD_OP(h, spe_lh);
1186 static always_inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1188 uint16_t tmp;
1189 tmp = glue(ldu16r, MEMSUFFIX)(EA);
1190 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1192 PPC_SPE_LD_OP(h_le, spe_lh_le);
1193 static always_inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1195 uint32_t tmp;
1196 tmp = glue(ldu32, MEMSUFFIX)(EA);
1197 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1199 PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1200 static always_inline
1201 uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1203 uint32_t tmp;
1204 tmp = glue(ldu32r, MEMSUFFIX)(EA);
1205 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1207 PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1208 static always_inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1210 uint64_t ret;
1211 uint16_t tmp;
1212 tmp = glue(ldu16, MEMSUFFIX)(EA);
1213 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1214 tmp = glue(ldu16, MEMSUFFIX)(EA + 2);
1215 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1216 return ret;
1218 PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1219 static always_inline
1220 uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1222 uint64_t ret;
1223 uint16_t tmp;
1224 tmp = glue(ldu16r, MEMSUFFIX)(EA);
1225 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1226 tmp = glue(ldu16r, MEMSUFFIX)(EA + 2);
1227 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1228 return ret;
1230 PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le);
1232 #undef MEMSUFFIX