Fix SS-2 crash
[qemu/mini2440.git] / target-ppc / op_mem.h
blobeae9f4ea5c80da5601952b3c96c19e92fdff8ee4
1 /*
2 * PowerPC emulation micro-operations for qemu.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "op_mem_access.h"
23 /*** Integer load ***/
24 #define PPC_LD_OP(name, op) \
25 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
26 { \
27 T1 = glue(op, MEMSUFFIX)((uint32_t)T0); \
28 RETURN(); \
31 #if defined(TARGET_PPC64)
32 #define PPC_LD_OP_64(name, op) \
33 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
34 { \
35 T1 = glue(op, MEMSUFFIX)((uint64_t)T0); \
36 RETURN(); \
38 #endif
40 #define PPC_ST_OP(name, op) \
41 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
42 { \
43 glue(op, MEMSUFFIX)((uint32_t)T0, T1); \
44 RETURN(); \
47 #if defined(TARGET_PPC64)
48 #define PPC_ST_OP_64(name, op) \
49 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
50 { \
51 glue(op, MEMSUFFIX)((uint64_t)T0, T1); \
52 RETURN(); \
54 #endif
56 PPC_LD_OP(bz, ldu8);
57 PPC_LD_OP(ha, lds16);
58 PPC_LD_OP(hz, ldu16);
59 PPC_LD_OP(wz, ldu32);
60 #if defined(TARGET_PPC64)
61 PPC_LD_OP(wa, lds32);
62 PPC_LD_OP(d, ldu64);
63 PPC_LD_OP_64(bz, ldu8);
64 PPC_LD_OP_64(ha, lds16);
65 PPC_LD_OP_64(hz, ldu16);
66 PPC_LD_OP_64(wz, ldu32);
67 PPC_LD_OP_64(wa, lds32);
68 PPC_LD_OP_64(d, ldu64);
69 #endif
71 PPC_LD_OP(ha_le, lds16r);
72 PPC_LD_OP(hz_le, ldu16r);
73 PPC_LD_OP(wz_le, ldu32r);
74 #if defined(TARGET_PPC64)
75 PPC_LD_OP(wa_le, lds32r);
76 PPC_LD_OP(d_le, ldu64r);
77 PPC_LD_OP_64(ha_le, lds16r);
78 PPC_LD_OP_64(hz_le, ldu16r);
79 PPC_LD_OP_64(wz_le, ldu32r);
80 PPC_LD_OP_64(wa_le, lds32r);
81 PPC_LD_OP_64(d_le, ldu64r);
82 #endif
84 /*** Integer store ***/
85 PPC_ST_OP(b, st8);
86 PPC_ST_OP(h, st16);
87 PPC_ST_OP(w, st32);
88 #if defined(TARGET_PPC64)
89 PPC_ST_OP(d, st64);
90 PPC_ST_OP_64(b, st8);
91 PPC_ST_OP_64(h, st16);
92 PPC_ST_OP_64(w, st32);
93 PPC_ST_OP_64(d, st64);
94 #endif
96 PPC_ST_OP(h_le, st16r);
97 PPC_ST_OP(w_le, st32r);
98 #if defined(TARGET_PPC64)
99 PPC_ST_OP(d_le, st64r);
100 PPC_ST_OP_64(h_le, st16r);
101 PPC_ST_OP_64(w_le, st32r);
102 PPC_ST_OP_64(d_le, st64r);
103 #endif
105 /*** Integer load and store with byte reverse ***/
106 PPC_LD_OP(hbr, ldu16r);
107 PPC_LD_OP(wbr, ldu32r);
108 PPC_ST_OP(hbr, st16r);
109 PPC_ST_OP(wbr, st32r);
110 #if defined(TARGET_PPC64)
111 PPC_LD_OP_64(hbr, ldu16r);
112 PPC_LD_OP_64(wbr, ldu32r);
113 PPC_ST_OP_64(hbr, st16r);
114 PPC_ST_OP_64(wbr, st32r);
115 #endif
117 PPC_LD_OP(hbr_le, ldu16);
118 PPC_LD_OP(wbr_le, ldu32);
119 PPC_ST_OP(hbr_le, st16);
120 PPC_ST_OP(wbr_le, st32);
121 #if defined(TARGET_PPC64)
122 PPC_LD_OP_64(hbr_le, ldu16);
123 PPC_LD_OP_64(wbr_le, ldu32);
124 PPC_ST_OP_64(hbr_le, st16);
125 PPC_ST_OP_64(wbr_le, st32);
126 #endif
128 /*** Integer load and store multiple ***/
129 void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
131 glue(do_lmw, MEMSUFFIX)(PARAM1);
132 RETURN();
135 #if defined(TARGET_PPC64)
136 void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
138 glue(do_lmw_64, MEMSUFFIX)(PARAM1);
139 RETURN();
141 #endif
143 void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
145 glue(do_lmw_le, MEMSUFFIX)(PARAM1);
146 RETURN();
149 #if defined(TARGET_PPC64)
150 void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
152 glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
153 RETURN();
155 #endif
157 void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
159 glue(do_stmw, MEMSUFFIX)(PARAM1);
160 RETURN();
163 #if defined(TARGET_PPC64)
164 void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
166 glue(do_stmw_64, MEMSUFFIX)(PARAM1);
167 RETURN();
169 #endif
171 void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
173 glue(do_stmw_le, MEMSUFFIX)(PARAM1);
174 RETURN();
177 #if defined(TARGET_PPC64)
178 void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
180 glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
181 RETURN();
183 #endif
185 /*** Integer load and store strings ***/
186 void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
188 glue(do_lsw, MEMSUFFIX)(PARAM1);
189 RETURN();
192 #if defined(TARGET_PPC64)
193 void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
195 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
196 RETURN();
198 #endif
200 /* PPC32 specification says we must generate an exception if
201 * rA is in the range of registers to be loaded.
202 * In an other hand, IBM says this is valid, but rA won't be loaded.
203 * For now, I'll follow the spec...
205 void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
207 /* Note: T1 comes from xer_bc then no cast is needed */
208 if (likely(T1 != 0)) {
209 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
210 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
211 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
212 POWERPC_EXCP_INVAL |
213 POWERPC_EXCP_INVAL_LSWX);
214 } else {
215 glue(do_lsw, MEMSUFFIX)(PARAM1);
218 RETURN();
221 #if defined(TARGET_PPC64)
222 void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
224 /* Note: T1 comes from xer_bc then no cast is needed */
225 if (likely(T1 != 0)) {
226 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
227 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
228 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
229 POWERPC_EXCP_INVAL |
230 POWERPC_EXCP_INVAL_LSWX);
231 } else {
232 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
235 RETURN();
237 #endif
239 void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
241 glue(do_stsw, MEMSUFFIX)(PARAM1);
242 RETURN();
245 #if defined(TARGET_PPC64)
246 void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
248 glue(do_stsw_64, MEMSUFFIX)(PARAM1);
249 RETURN();
251 #endif
253 /*** Floating-point store ***/
254 #define PPC_STF_OP(name, op) \
255 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
257 glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \
258 RETURN(); \
261 #if defined(TARGET_PPC64)
262 #define PPC_STF_OP_64(name, op) \
263 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
265 glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \
266 RETURN(); \
268 #endif
270 static always_inline void glue(stfs, MEMSUFFIX) (target_ulong EA, float64 d)
272 glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status));
275 static always_inline void glue(stfiw, MEMSUFFIX) (target_ulong EA, float64 d)
277 CPU_DoubleU u;
279 /* Store the low order 32 bits without any conversion */
280 u.d = d;
281 glue(st32, MEMSUFFIX)(EA, u.l.lower);
284 PPC_STF_OP(fd, stfq);
285 PPC_STF_OP(fs, stfs);
286 PPC_STF_OP(fiw, stfiw);
287 #if defined(TARGET_PPC64)
288 PPC_STF_OP_64(fd, stfq);
289 PPC_STF_OP_64(fs, stfs);
290 PPC_STF_OP_64(fiw, stfiw);
291 #endif
293 static always_inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, float64 d)
295 CPU_DoubleU u;
297 u.d = d;
298 u.ll = bswap64(u.ll);
299 glue(stfq, MEMSUFFIX)(EA, u.d);
302 static always_inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, float64 d)
304 CPU_FloatU u;
306 u.f = float64_to_float32(d, &env->fp_status);
307 u.l = bswap32(u.l);
308 glue(stfl, MEMSUFFIX)(EA, u.f);
311 static always_inline void glue(stfiwr, MEMSUFFIX) (target_ulong EA, float64 d)
313 CPU_DoubleU u;
315 /* Store the low order 32 bits without any conversion */
316 u.d = d;
317 u.l.lower = bswap32(u.l.lower);
318 glue(st32, MEMSUFFIX)(EA, u.l.lower);
321 PPC_STF_OP(fd_le, stfqr);
322 PPC_STF_OP(fs_le, stfsr);
323 PPC_STF_OP(fiw_le, stfiwr);
324 #if defined(TARGET_PPC64)
325 PPC_STF_OP_64(fd_le, stfqr);
326 PPC_STF_OP_64(fs_le, stfsr);
327 PPC_STF_OP_64(fiw_le, stfiwr);
328 #endif
330 /*** Floating-point load ***/
331 #define PPC_LDF_OP(name, op) \
332 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
334 FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \
335 RETURN(); \
338 #if defined(TARGET_PPC64)
339 #define PPC_LDF_OP_64(name, op) \
340 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
342 FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \
343 RETURN(); \
345 #endif
347 static always_inline float64 glue(ldfs, MEMSUFFIX) (target_ulong EA)
349 return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status);
352 PPC_LDF_OP(fd, ldfq);
353 PPC_LDF_OP(fs, ldfs);
354 #if defined(TARGET_PPC64)
355 PPC_LDF_OP_64(fd, ldfq);
356 PPC_LDF_OP_64(fs, ldfs);
357 #endif
359 static always_inline float64 glue(ldfqr, MEMSUFFIX) (target_ulong EA)
361 CPU_DoubleU u;
363 u.d = glue(ldfq, MEMSUFFIX)(EA);
364 u.ll = bswap64(u.ll);
366 return u.d;
369 static always_inline float64 glue(ldfsr, MEMSUFFIX) (target_ulong EA)
371 CPU_FloatU u;
373 u.f = glue(ldfl, MEMSUFFIX)(EA);
374 u.l = bswap32(u.l);
376 return float32_to_float64(u.f, &env->fp_status);
379 PPC_LDF_OP(fd_le, ldfqr);
380 PPC_LDF_OP(fs_le, ldfsr);
381 #if defined(TARGET_PPC64)
382 PPC_LDF_OP_64(fd_le, ldfqr);
383 PPC_LDF_OP_64(fs_le, ldfsr);
384 #endif
386 /* Load and set reservation */
387 void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
389 if (unlikely(T0 & 0x03)) {
390 do_raise_exception(POWERPC_EXCP_ALIGN);
391 } else {
392 T1 = glue(ldu32, MEMSUFFIX)((uint32_t)T0);
393 env->reserve = (uint32_t)T0;
395 RETURN();
398 #if defined(TARGET_PPC64)
399 void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
401 if (unlikely(T0 & 0x03)) {
402 do_raise_exception(POWERPC_EXCP_ALIGN);
403 } else {
404 T1 = glue(ldu32, MEMSUFFIX)((uint64_t)T0);
405 env->reserve = (uint64_t)T0;
407 RETURN();
410 void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)
412 if (unlikely(T0 & 0x03)) {
413 do_raise_exception(POWERPC_EXCP_ALIGN);
414 } else {
415 T1 = glue(ldu64, MEMSUFFIX)((uint32_t)T0);
416 env->reserve = (uint32_t)T0;
418 RETURN();
421 void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
423 if (unlikely(T0 & 0x03)) {
424 do_raise_exception(POWERPC_EXCP_ALIGN);
425 } else {
426 T1 = glue(ldu64, MEMSUFFIX)((uint64_t)T0);
427 env->reserve = (uint64_t)T0;
429 RETURN();
431 #endif
433 void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
435 if (unlikely(T0 & 0x03)) {
436 do_raise_exception(POWERPC_EXCP_ALIGN);
437 } else {
438 T1 = glue(ldu32r, MEMSUFFIX)((uint32_t)T0);
439 env->reserve = (uint32_t)T0;
441 RETURN();
444 #if defined(TARGET_PPC64)
445 void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
447 if (unlikely(T0 & 0x03)) {
448 do_raise_exception(POWERPC_EXCP_ALIGN);
449 } else {
450 T1 = glue(ldu32r, MEMSUFFIX)((uint64_t)T0);
451 env->reserve = (uint64_t)T0;
453 RETURN();
456 void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)
458 if (unlikely(T0 & 0x03)) {
459 do_raise_exception(POWERPC_EXCP_ALIGN);
460 } else {
461 T1 = glue(ldu64r, MEMSUFFIX)((uint32_t)T0);
462 env->reserve = (uint32_t)T0;
464 RETURN();
467 void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
469 if (unlikely(T0 & 0x03)) {
470 do_raise_exception(POWERPC_EXCP_ALIGN);
471 } else {
472 T1 = glue(ldu64r, MEMSUFFIX)((uint64_t)T0);
473 env->reserve = (uint64_t)T0;
475 RETURN();
477 #endif
479 /* Store with reservation */
480 void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
482 if (unlikely(T0 & 0x03)) {
483 do_raise_exception(POWERPC_EXCP_ALIGN);
484 } else {
485 if (unlikely(env->reserve != (uint32_t)T0)) {
486 env->crf[0] = xer_so;
487 } else {
488 glue(st32, MEMSUFFIX)((uint32_t)T0, T1);
489 env->crf[0] = xer_so | 0x02;
492 env->reserve = (target_ulong)-1ULL;
493 RETURN();
496 #if defined(TARGET_PPC64)
497 void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
499 if (unlikely(T0 & 0x03)) {
500 do_raise_exception(POWERPC_EXCP_ALIGN);
501 } else {
502 if (unlikely(env->reserve != (uint64_t)T0)) {
503 env->crf[0] = xer_so;
504 } else {
505 glue(st32, MEMSUFFIX)((uint64_t)T0, T1);
506 env->crf[0] = xer_so | 0x02;
509 env->reserve = (target_ulong)-1ULL;
510 RETURN();
513 void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)
515 if (unlikely(T0 & 0x03)) {
516 do_raise_exception(POWERPC_EXCP_ALIGN);
517 } else {
518 if (unlikely(env->reserve != (uint32_t)T0)) {
519 env->crf[0] = xer_so;
520 } else {
521 glue(st64, MEMSUFFIX)((uint32_t)T0, T1);
522 env->crf[0] = xer_so | 0x02;
525 env->reserve = (target_ulong)-1ULL;
526 RETURN();
529 void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
531 if (unlikely(T0 & 0x03)) {
532 do_raise_exception(POWERPC_EXCP_ALIGN);
533 } else {
534 if (unlikely(env->reserve != (uint64_t)T0)) {
535 env->crf[0] = xer_so;
536 } else {
537 glue(st64, MEMSUFFIX)((uint64_t)T0, T1);
538 env->crf[0] = xer_so | 0x02;
541 env->reserve = (target_ulong)-1ULL;
542 RETURN();
544 #endif
546 void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
548 if (unlikely(T0 & 0x03)) {
549 do_raise_exception(POWERPC_EXCP_ALIGN);
550 } else {
551 if (unlikely(env->reserve != (uint32_t)T0)) {
552 env->crf[0] = xer_so;
553 } else {
554 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
555 env->crf[0] = xer_so | 0x02;
558 env->reserve = (target_ulong)-1ULL;
559 RETURN();
562 #if defined(TARGET_PPC64)
563 void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
565 if (unlikely(T0 & 0x03)) {
566 do_raise_exception(POWERPC_EXCP_ALIGN);
567 } else {
568 if (unlikely(env->reserve != (uint64_t)T0)) {
569 env->crf[0] = xer_so;
570 } else {
571 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
572 env->crf[0] = xer_so | 0x02;
575 env->reserve = (target_ulong)-1ULL;
576 RETURN();
579 void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)
581 if (unlikely(T0 & 0x03)) {
582 do_raise_exception(POWERPC_EXCP_ALIGN);
583 } else {
584 if (unlikely(env->reserve != (uint32_t)T0)) {
585 env->crf[0] = xer_so;
586 } else {
587 glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);
588 env->crf[0] = xer_so | 0x02;
591 env->reserve = (target_ulong)-1ULL;
592 RETURN();
595 void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
597 if (unlikely(T0 & 0x03)) {
598 do_raise_exception(POWERPC_EXCP_ALIGN);
599 } else {
600 if (unlikely(env->reserve != (uint64_t)T0)) {
601 env->crf[0] = xer_so;
602 } else {
603 glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
604 env->crf[0] = xer_so | 0x02;
607 env->reserve = (target_ulong)-1ULL;
608 RETURN();
610 #endif
612 void OPPROTO glue(op_dcbz_l32, MEMSUFFIX) (void)
614 T0 &= ~((uint32_t)31);
615 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
616 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
617 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
618 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
619 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
620 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
621 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
622 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
623 RETURN();
626 void OPPROTO glue(op_dcbz_l64, MEMSUFFIX) (void)
628 T0 &= ~((uint32_t)63);
629 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
630 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
631 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
632 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
633 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
634 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
635 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
636 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
637 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
638 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
639 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
640 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
641 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
642 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
643 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
644 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
645 RETURN();
648 void OPPROTO glue(op_dcbz_l128, MEMSUFFIX) (void)
650 T0 &= ~((uint32_t)127);
651 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
652 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
653 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
654 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
655 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
656 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
657 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
658 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
659 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
660 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
661 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
662 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
663 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
664 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
665 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
666 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
667 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x40UL), 0);
668 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x44UL), 0);
669 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x48UL), 0);
670 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x4CUL), 0);
671 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x50UL), 0);
672 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x54UL), 0);
673 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x58UL), 0);
674 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x5CUL), 0);
675 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x60UL), 0);
676 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x64UL), 0);
677 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x68UL), 0);
678 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x6CUL), 0);
679 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x70UL), 0);
680 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x74UL), 0);
681 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x78UL), 0);
682 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x7CUL), 0);
683 RETURN();
686 void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
688 glue(do_dcbz, MEMSUFFIX)();
689 RETURN();
692 #if defined(TARGET_PPC64)
693 void OPPROTO glue(op_dcbz_l32_64, MEMSUFFIX) (void)
695 T0 &= ~((uint64_t)31);
696 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
697 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
698 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
699 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
700 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
701 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
702 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
703 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
704 RETURN();
707 void OPPROTO glue(op_dcbz_l64_64, MEMSUFFIX) (void)
709 T0 &= ~((uint64_t)63);
710 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
711 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
712 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
713 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
714 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
715 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
716 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
717 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
718 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
719 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
720 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
721 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
722 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
723 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
724 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
725 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
726 RETURN();
729 void OPPROTO glue(op_dcbz_l128_64, MEMSUFFIX) (void)
731 T0 &= ~((uint64_t)127);
732 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
733 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
734 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
735 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
736 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
737 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
738 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
739 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
740 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
741 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
742 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
743 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
744 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
745 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
746 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
747 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
748 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x40UL), 0);
749 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x44UL), 0);
750 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x48UL), 0);
751 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x4CUL), 0);
752 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x50UL), 0);
753 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x54UL), 0);
754 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x58UL), 0);
755 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x5CUL), 0);
756 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x60UL), 0);
757 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x64UL), 0);
758 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x68UL), 0);
759 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x6CUL), 0);
760 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x70UL), 0);
761 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x74UL), 0);
762 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x78UL), 0);
763 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x7CUL), 0);
764 RETURN();
767 void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
769 glue(do_dcbz_64, MEMSUFFIX)();
770 RETURN();
772 #endif
774 /* Instruction cache block invalidate */
775 void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
777 glue(do_icbi, MEMSUFFIX)();
778 RETURN();
781 #if defined(TARGET_PPC64)
782 void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
784 glue(do_icbi_64, MEMSUFFIX)();
785 RETURN();
787 #endif
789 /* External access */
790 void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
792 T1 = glue(ldu32, MEMSUFFIX)((uint32_t)T0);
793 RETURN();
796 #if defined(TARGET_PPC64)
797 void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
799 T1 = glue(ldu32, MEMSUFFIX)((uint64_t)T0);
800 RETURN();
802 #endif
804 void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
806 glue(st32, MEMSUFFIX)((uint32_t)T0, T1);
807 RETURN();
810 #if defined(TARGET_PPC64)
811 void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
813 glue(st32, MEMSUFFIX)((uint64_t)T0, T1);
814 RETURN();
816 #endif
818 void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
820 T1 = glue(ldu32r, MEMSUFFIX)((uint32_t)T0);
821 RETURN();
824 #if defined(TARGET_PPC64)
825 void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
827 T1 = glue(ldu32r, MEMSUFFIX)((uint64_t)T0);
828 RETURN();
830 #endif
832 void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
834 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
835 RETURN();
838 #if defined(TARGET_PPC64)
839 void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
841 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
842 RETURN();
844 #endif
846 /* XXX: those micro-ops need tests ! */
847 /* PowerPC 601 specific instructions (POWER bridge) */
848 void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
850 /* When byte count is 0, do nothing */
851 if (likely(T1 != 0)) {
852 glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
854 RETURN();
857 /* POWER2 quad load and store */
858 /* XXX: TAGs are not managed */
859 void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
861 glue(do_POWER2_lfq, MEMSUFFIX)();
862 RETURN();
865 void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
867 glue(do_POWER2_lfq_le, MEMSUFFIX)();
868 RETURN();
871 void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
873 glue(do_POWER2_stfq, MEMSUFFIX)();
874 RETURN();
877 void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
879 glue(do_POWER2_stfq_le, MEMSUFFIX)();
880 RETURN();
883 /* Altivec vector extension */
884 #if defined(WORDS_BIGENDIAN)
885 #define VR_DWORD0 0
886 #define VR_DWORD1 1
887 #else
888 #define VR_DWORD0 1
889 #define VR_DWORD1 0
890 #endif
891 void OPPROTO glue(op_vr_lvx, MEMSUFFIX) (void)
893 AVR0.u64[VR_DWORD0] = glue(ldu64, MEMSUFFIX)((uint32_t)T0);
894 AVR0.u64[VR_DWORD1] = glue(ldu64, MEMSUFFIX)((uint32_t)T0 + 8);
897 void OPPROTO glue(op_vr_lvx_le, MEMSUFFIX) (void)
899 AVR0.u64[VR_DWORD1] = glue(ldu64r, MEMSUFFIX)((uint32_t)T0);
900 AVR0.u64[VR_DWORD0] = glue(ldu64r, MEMSUFFIX)((uint32_t)T0 + 8);
903 void OPPROTO glue(op_vr_stvx, MEMSUFFIX) (void)
905 glue(st64, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD0]);
906 glue(st64, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD1]);
909 void OPPROTO glue(op_vr_stvx_le, MEMSUFFIX) (void)
911 glue(st64r, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD1]);
912 glue(st64r, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD0]);
915 #if defined(TARGET_PPC64)
916 void OPPROTO glue(op_vr_lvx_64, MEMSUFFIX) (void)
918 AVR0.u64[VR_DWORD0] = glue(ldu64, MEMSUFFIX)((uint64_t)T0);
919 AVR0.u64[VR_DWORD1] = glue(ldu64, MEMSUFFIX)((uint64_t)T0 + 8);
922 void OPPROTO glue(op_vr_lvx_le_64, MEMSUFFIX) (void)
924 AVR0.u64[VR_DWORD1] = glue(ldu64r, MEMSUFFIX)((uint64_t)T0);
925 AVR0.u64[VR_DWORD0] = glue(ldu64r, MEMSUFFIX)((uint64_t)T0 + 8);
928 void OPPROTO glue(op_vr_stvx_64, MEMSUFFIX) (void)
930 glue(st64, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD0]);
931 glue(st64, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD1]);
934 void OPPROTO glue(op_vr_stvx_le_64, MEMSUFFIX) (void)
936 glue(st64r, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD1]);
937 glue(st64r, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD0]);
939 #endif
940 #undef VR_DWORD0
941 #undef VR_DWORD1
943 /* SPE extension */
944 #define _PPC_SPE_LD_OP(name, op) \
945 void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void) \
947 T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0); \
948 RETURN(); \
951 #if defined(TARGET_PPC64)
952 #define _PPC_SPE_LD_OP_64(name, op) \
953 void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void) \
955 T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0); \
956 RETURN(); \
958 #define PPC_SPE_LD_OP(name, op) \
959 _PPC_SPE_LD_OP(name, op); \
960 _PPC_SPE_LD_OP_64(name, op)
961 #else
962 #define PPC_SPE_LD_OP(name, op) \
963 _PPC_SPE_LD_OP(name, op)
964 #endif
966 #define _PPC_SPE_ST_OP(name, op) \
967 void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void) \
969 glue(op, MEMSUFFIX)((uint32_t)T0, T1_64); \
970 RETURN(); \
973 #if defined(TARGET_PPC64)
974 #define _PPC_SPE_ST_OP_64(name, op) \
975 void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void) \
977 glue(op, MEMSUFFIX)((uint64_t)T0, T1_64); \
978 RETURN(); \
980 #define PPC_SPE_ST_OP(name, op) \
981 _PPC_SPE_ST_OP(name, op); \
982 _PPC_SPE_ST_OP_64(name, op)
983 #else
984 #define PPC_SPE_ST_OP(name, op) \
985 _PPC_SPE_ST_OP(name, op)
986 #endif
988 #if !defined(TARGET_PPC64)
989 PPC_SPE_LD_OP(dd, ldu64);
990 PPC_SPE_ST_OP(dd, st64);
991 PPC_SPE_LD_OP(dd_le, ldu64r);
992 PPC_SPE_ST_OP(dd_le, st64r);
993 #endif
994 static always_inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
996 uint64_t ret;
997 ret = (uint64_t)glue(ldu32, MEMSUFFIX)(EA) << 32;
998 ret |= (uint64_t)glue(ldu32, MEMSUFFIX)(EA + 4);
999 return ret;
1001 PPC_SPE_LD_OP(dw, spe_ldw);
1002 static always_inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA,
1003 uint64_t data)
1005 glue(st32, MEMSUFFIX)(EA, data >> 32);
1006 glue(st32, MEMSUFFIX)(EA + 4, data);
1008 PPC_SPE_ST_OP(dw, spe_stdw);
1009 static always_inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
1011 uint64_t ret;
1012 ret = (uint64_t)glue(ldu32r, MEMSUFFIX)(EA) << 32;
1013 ret |= (uint64_t)glue(ldu32r, MEMSUFFIX)(EA + 4);
1014 return ret;
1016 PPC_SPE_LD_OP(dw_le, spe_ldw_le);
1017 static always_inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
1018 uint64_t data)
1020 glue(st32r, MEMSUFFIX)(EA, data >> 32);
1021 glue(st32r, MEMSUFFIX)(EA + 4, data);
1023 PPC_SPE_ST_OP(dw_le, spe_stdw_le);
1024 static always_inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
1026 uint64_t ret;
1027 ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 48;
1028 ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2) << 32;
1029 ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 4) << 16;
1030 ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 6);
1031 return ret;
1033 PPC_SPE_LD_OP(dh, spe_ldh);
1034 static always_inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA,
1035 uint64_t data)
1037 glue(st16, MEMSUFFIX)(EA, data >> 48);
1038 glue(st16, MEMSUFFIX)(EA + 2, data >> 32);
1039 glue(st16, MEMSUFFIX)(EA + 4, data >> 16);
1040 glue(st16, MEMSUFFIX)(EA + 6, data);
1042 PPC_SPE_ST_OP(dh, spe_stdh);
1043 static always_inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
1045 uint64_t ret;
1046 ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 48;
1047 ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2) << 32;
1048 ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 4) << 16;
1049 ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 6);
1050 return ret;
1052 PPC_SPE_LD_OP(dh_le, spe_ldh_le);
1053 static always_inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1054 uint64_t data)
1056 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1057 glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
1058 glue(st16r, MEMSUFFIX)(EA + 4, data >> 16);
1059 glue(st16r, MEMSUFFIX)(EA + 6, data);
1061 PPC_SPE_ST_OP(dh_le, spe_stdh_le);
1062 static always_inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1064 uint64_t ret;
1065 ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 48;
1066 ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2) << 16;
1067 return ret;
1069 PPC_SPE_LD_OP(whe, spe_lwhe);
1070 static always_inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA,
1071 uint64_t data)
1073 glue(st16, MEMSUFFIX)(EA, data >> 48);
1074 glue(st16, MEMSUFFIX)(EA + 2, data >> 16);
1076 PPC_SPE_ST_OP(whe, spe_stwhe);
1077 static always_inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1079 uint64_t ret;
1080 ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 48;
1081 ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2) << 16;
1082 return ret;
1084 PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
1085 static always_inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
1086 uint64_t data)
1088 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1089 glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
1091 PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
1092 static always_inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1094 uint64_t ret;
1095 ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 32;
1096 ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2);
1097 return ret;
1099 PPC_SPE_LD_OP(whou, spe_lwhou);
1100 static always_inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1102 uint64_t ret;
1103 ret = ((uint64_t)((int32_t)glue(lds16, MEMSUFFIX)(EA))) << 32;
1104 ret |= (uint64_t)((int32_t)glue(lds16, MEMSUFFIX)(EA + 2));
1105 return ret;
1107 PPC_SPE_LD_OP(whos, spe_lwhos);
1108 static always_inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA,
1109 uint64_t data)
1111 glue(st16, MEMSUFFIX)(EA, data >> 32);
1112 glue(st16, MEMSUFFIX)(EA + 2, data);
1114 PPC_SPE_ST_OP(who, spe_stwho);
1115 static always_inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1117 uint64_t ret;
1118 ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 32;
1119 ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2);
1120 return ret;
1122 PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1123 static always_inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1125 uint64_t ret;
1126 ret = ((uint64_t)((int32_t)glue(lds16r, MEMSUFFIX)(EA))) << 32;
1127 ret |= (uint64_t)((int32_t)glue(lds16r, MEMSUFFIX)(EA + 2));
1128 return ret;
1130 PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1131 static always_inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1132 uint64_t data)
1134 glue(st16r, MEMSUFFIX)(EA, data >> 32);
1135 glue(st16r, MEMSUFFIX)(EA + 2, data);
1137 PPC_SPE_ST_OP(who_le, spe_stwho_le);
1138 #if !defined(TARGET_PPC64)
1139 static always_inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA,
1140 uint64_t data)
1142 glue(st32, MEMSUFFIX)(EA, data);
1144 PPC_SPE_ST_OP(wwo, spe_stwwo);
1145 static always_inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1146 uint64_t data)
1148 glue(st32r, MEMSUFFIX)(EA, data);
1150 PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1151 #endif
1152 static always_inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1154 uint16_t tmp;
1155 tmp = glue(ldu16, MEMSUFFIX)(EA);
1156 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1158 PPC_SPE_LD_OP(h, spe_lh);
1159 static always_inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1161 uint16_t tmp;
1162 tmp = glue(ldu16r, MEMSUFFIX)(EA);
1163 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1165 PPC_SPE_LD_OP(h_le, spe_lh_le);
1166 static always_inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1168 uint32_t tmp;
1169 tmp = glue(ldu32, MEMSUFFIX)(EA);
1170 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1172 PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1173 static always_inline
1174 uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1176 uint32_t tmp;
1177 tmp = glue(ldu32r, MEMSUFFIX)(EA);
1178 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1180 PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1181 static always_inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1183 uint64_t ret;
1184 uint16_t tmp;
1185 tmp = glue(ldu16, MEMSUFFIX)(EA);
1186 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1187 tmp = glue(ldu16, MEMSUFFIX)(EA + 2);
1188 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1189 return ret;
1191 PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1192 static always_inline
1193 uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1195 uint64_t ret;
1196 uint16_t tmp;
1197 tmp = glue(ldu16r, MEMSUFFIX)(EA);
1198 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1199 tmp = glue(ldu16r, MEMSUFFIX)(EA + 2);
1200 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1201 return ret;
1203 PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le);
1205 #undef MEMSUFFIX