Init dumb display if no others available.
[qemu/mini2440.git] / target-ppc / op_mem.h
blob971cc18381cd53d1e70f2efcd24e02e45b572d7b
1 /*
2 * PowerPC emulation micro-operations for qemu.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 static always_inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
23 uint16_t tmp = glue(lduw, MEMSUFFIX)(EA);
24 return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
27 static always_inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
29 int16_t tmp = glue(lduw, MEMSUFFIX)(EA);
30 return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
33 static always_inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
35 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
36 return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
37 ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
40 static always_inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
42 uint64_t tmp = glue(ldq, MEMSUFFIX)(EA);
43 return ((tmp & 0xFF00000000000000ULL) >> 56) |
44 ((tmp & 0x00FF000000000000ULL) >> 40) |
45 ((tmp & 0x0000FF0000000000ULL) >> 24) |
46 ((tmp & 0x000000FF00000000ULL) >> 8) |
47 ((tmp & 0x00000000FF000000ULL) << 8) |
48 ((tmp & 0x0000000000FF0000ULL) << 24) |
49 ((tmp & 0x000000000000FF00ULL) << 40) |
50 ((tmp & 0x00000000000000FFULL) << 54);
53 #if defined(TARGET_PPC64)
54 static always_inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
56 return (int32_t)glue(ldl, MEMSUFFIX)(EA);
59 static always_inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
61 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
62 return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
63 ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
65 #endif
67 static always_inline void glue(st16r, MEMSUFFIX) (target_ulong EA,
68 uint16_t data)
70 uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
71 glue(stw, MEMSUFFIX)(EA, tmp);
74 static always_inline void glue(st32r, MEMSUFFIX) (target_ulong EA,
75 uint32_t data)
77 uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) |
78 ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24);
79 glue(stl, MEMSUFFIX)(EA, tmp);
82 static always_inline void glue(st64r, MEMSUFFIX) (target_ulong EA,
83 uint64_t data)
85 uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) |
86 ((data & 0x00FF000000000000ULL) >> 40) |
87 ((data & 0x0000FF0000000000ULL) >> 24) |
88 ((data & 0x000000FF00000000ULL) >> 8) |
89 ((data & 0x00000000FF000000ULL) << 8) |
90 ((data & 0x0000000000FF0000ULL) << 24) |
91 ((data & 0x000000000000FF00ULL) << 40) |
92 ((data & 0x00000000000000FFULL) << 56);
93 glue(stq, MEMSUFFIX)(EA, tmp);
96 /*** Integer load ***/
97 #define PPC_LD_OP(name, op) \
98 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
99 { \
100 T1 = glue(op, MEMSUFFIX)((uint32_t)T0); \
101 RETURN(); \
104 #if defined(TARGET_PPC64)
105 #define PPC_LD_OP_64(name, op) \
106 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
108 T1 = glue(op, MEMSUFFIX)((uint64_t)T0); \
109 RETURN(); \
111 #endif
113 #define PPC_ST_OP(name, op) \
114 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
116 glue(op, MEMSUFFIX)((uint32_t)T0, T1); \
117 RETURN(); \
120 #if defined(TARGET_PPC64)
121 #define PPC_ST_OP_64(name, op) \
122 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
124 glue(op, MEMSUFFIX)((uint64_t)T0, T1); \
125 RETURN(); \
127 #endif
129 PPC_LD_OP(bz, ldub);
130 PPC_LD_OP(ha, ldsw);
131 PPC_LD_OP(hz, lduw);
132 PPC_LD_OP(wz, ldl);
133 #if defined(TARGET_PPC64)
134 PPC_LD_OP(d, ldq);
135 PPC_LD_OP(wa, ldsl);
136 PPC_LD_OP_64(d, ldq);
137 PPC_LD_OP_64(wa, ldsl);
138 PPC_LD_OP_64(bz, ldub);
139 PPC_LD_OP_64(ha, ldsw);
140 PPC_LD_OP_64(hz, lduw);
141 PPC_LD_OP_64(wz, ldl);
142 #endif
144 PPC_LD_OP(ha_le, ld16rs);
145 PPC_LD_OP(hz_le, ld16r);
146 PPC_LD_OP(wz_le, ld32r);
147 #if defined(TARGET_PPC64)
148 PPC_LD_OP(d_le, ld64r);
149 PPC_LD_OP(wa_le, ld32rs);
150 PPC_LD_OP_64(d_le, ld64r);
151 PPC_LD_OP_64(wa_le, ld32rs);
152 PPC_LD_OP_64(ha_le, ld16rs);
153 PPC_LD_OP_64(hz_le, ld16r);
154 PPC_LD_OP_64(wz_le, ld32r);
155 #endif
157 /*** Integer store ***/
158 PPC_ST_OP(b, stb);
159 PPC_ST_OP(h, stw);
160 PPC_ST_OP(w, stl);
161 #if defined(TARGET_PPC64)
162 PPC_ST_OP(d, stq);
163 PPC_ST_OP_64(d, stq);
164 PPC_ST_OP_64(b, stb);
165 PPC_ST_OP_64(h, stw);
166 PPC_ST_OP_64(w, stl);
167 #endif
169 PPC_ST_OP(h_le, st16r);
170 PPC_ST_OP(w_le, st32r);
171 #if defined(TARGET_PPC64)
172 PPC_ST_OP(d_le, st64r);
173 PPC_ST_OP_64(d_le, st64r);
174 PPC_ST_OP_64(h_le, st16r);
175 PPC_ST_OP_64(w_le, st32r);
176 #endif
178 /*** Integer load and store with byte reverse ***/
179 PPC_LD_OP(hbr, ld16r);
180 PPC_LD_OP(wbr, ld32r);
181 PPC_ST_OP(hbr, st16r);
182 PPC_ST_OP(wbr, st32r);
183 #if defined(TARGET_PPC64)
184 PPC_LD_OP_64(hbr, ld16r);
185 PPC_LD_OP_64(wbr, ld32r);
186 PPC_ST_OP_64(hbr, st16r);
187 PPC_ST_OP_64(wbr, st32r);
188 #endif
190 PPC_LD_OP(hbr_le, lduw);
191 PPC_LD_OP(wbr_le, ldl);
192 PPC_ST_OP(hbr_le, stw);
193 PPC_ST_OP(wbr_le, stl);
194 #if defined(TARGET_PPC64)
195 PPC_LD_OP_64(hbr_le, lduw);
196 PPC_LD_OP_64(wbr_le, ldl);
197 PPC_ST_OP_64(hbr_le, stw);
198 PPC_ST_OP_64(wbr_le, stl);
199 #endif
201 /*** Integer load and store multiple ***/
202 void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
204 glue(do_lmw, MEMSUFFIX)(PARAM1);
205 RETURN();
208 #if defined(TARGET_PPC64)
209 void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
211 glue(do_lmw_64, MEMSUFFIX)(PARAM1);
212 RETURN();
214 #endif
216 void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
218 glue(do_lmw_le, MEMSUFFIX)(PARAM1);
219 RETURN();
222 #if defined(TARGET_PPC64)
223 void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
225 glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
226 RETURN();
228 #endif
230 void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
232 glue(do_stmw, MEMSUFFIX)(PARAM1);
233 RETURN();
236 #if defined(TARGET_PPC64)
237 void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
239 glue(do_stmw_64, MEMSUFFIX)(PARAM1);
240 RETURN();
242 #endif
244 void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
246 glue(do_stmw_le, MEMSUFFIX)(PARAM1);
247 RETURN();
250 #if defined(TARGET_PPC64)
251 void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
253 glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
254 RETURN();
256 #endif
258 /*** Integer load and store strings ***/
259 void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
261 glue(do_lsw, MEMSUFFIX)(PARAM1);
262 RETURN();
265 #if defined(TARGET_PPC64)
266 void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
268 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
269 RETURN();
271 #endif
273 void OPPROTO glue(op_lswi_le, MEMSUFFIX) (void)
275 glue(do_lsw_le, MEMSUFFIX)(PARAM1);
276 RETURN();
279 #if defined(TARGET_PPC64)
280 void OPPROTO glue(op_lswi_le_64, MEMSUFFIX) (void)
282 glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
283 RETURN();
285 #endif
287 /* PPC32 specification says we must generate an exception if
288 * rA is in the range of registers to be loaded.
289 * In an other hand, IBM says this is valid, but rA won't be loaded.
290 * For now, I'll follow the spec...
292 void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
294 /* Note: T1 comes from xer_bc then no cast is needed */
295 if (likely(T1 != 0)) {
296 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
297 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
298 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
299 POWERPC_EXCP_INVAL |
300 POWERPC_EXCP_INVAL_LSWX);
301 } else {
302 glue(do_lsw, MEMSUFFIX)(PARAM1);
305 RETURN();
308 #if defined(TARGET_PPC64)
309 void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
311 /* Note: T1 comes from xer_bc then no cast is needed */
312 if (likely(T1 != 0)) {
313 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
314 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
315 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
316 POWERPC_EXCP_INVAL |
317 POWERPC_EXCP_INVAL_LSWX);
318 } else {
319 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
322 RETURN();
324 #endif
326 void OPPROTO glue(op_lswx_le, MEMSUFFIX) (void)
328 /* Note: T1 comes from xer_bc then no cast is needed */
329 if (likely(T1 != 0)) {
330 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
331 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
332 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
333 POWERPC_EXCP_INVAL |
334 POWERPC_EXCP_INVAL_LSWX);
335 } else {
336 glue(do_lsw_le, MEMSUFFIX)(PARAM1);
339 RETURN();
342 #if defined(TARGET_PPC64)
343 void OPPROTO glue(op_lswx_le_64, MEMSUFFIX) (void)
345 /* Note: T1 comes from xer_bc then no cast is needed */
346 if (likely(T1 != 0)) {
347 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
348 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
349 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
350 POWERPC_EXCP_INVAL |
351 POWERPC_EXCP_INVAL_LSWX);
352 } else {
353 glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
356 RETURN();
358 #endif
360 void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
362 glue(do_stsw, MEMSUFFIX)(PARAM1);
363 RETURN();
366 #if defined(TARGET_PPC64)
367 void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
369 glue(do_stsw_64, MEMSUFFIX)(PARAM1);
370 RETURN();
372 #endif
374 void OPPROTO glue(op_stsw_le, MEMSUFFIX) (void)
376 glue(do_stsw_le, MEMSUFFIX)(PARAM1);
377 RETURN();
380 #if defined(TARGET_PPC64)
381 void OPPROTO glue(op_stsw_le_64, MEMSUFFIX) (void)
383 glue(do_stsw_le_64, MEMSUFFIX)(PARAM1);
384 RETURN();
386 #endif
388 /*** Floating-point store ***/
389 #define PPC_STF_OP(name, op) \
390 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
392 glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \
393 RETURN(); \
396 #if defined(TARGET_PPC64)
397 #define PPC_STF_OP_64(name, op) \
398 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
400 glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \
401 RETURN(); \
403 #endif
405 static always_inline void glue(stfs, MEMSUFFIX) (target_ulong EA, double d)
407 glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status));
410 #if defined(WORDS_BIGENDIAN)
411 #define WORD0 0
412 #define WORD1 1
413 #else
414 #define WORD0 1
415 #define WORD1 0
416 #endif
417 static always_inline void glue(stfiwx, MEMSUFFIX) (target_ulong EA, double d)
419 union {
420 double d;
421 uint32_t u[2];
422 } u;
424 /* Store the low order 32 bits without any conversion */
425 u.d = d;
426 glue(stl, MEMSUFFIX)(EA, u.u[WORD0]);
428 #undef WORD0
429 #undef WORD1
431 PPC_STF_OP(fd, stfq);
432 PPC_STF_OP(fs, stfs);
433 PPC_STF_OP(fiwx, stfiwx);
434 #if defined(TARGET_PPC64)
435 PPC_STF_OP_64(fd, stfq);
436 PPC_STF_OP_64(fs, stfs);
437 PPC_STF_OP_64(fiwx, stfiwx);
438 #endif
440 static always_inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
442 union {
443 double d;
444 uint64_t u;
445 } u;
447 u.d = d;
448 u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
449 ((u.u & 0x00FF000000000000ULL) >> 40) |
450 ((u.u & 0x0000FF0000000000ULL) >> 24) |
451 ((u.u & 0x000000FF00000000ULL) >> 8) |
452 ((u.u & 0x00000000FF000000ULL) << 8) |
453 ((u.u & 0x0000000000FF0000ULL) << 24) |
454 ((u.u & 0x000000000000FF00ULL) << 40) |
455 ((u.u & 0x00000000000000FFULL) << 56);
456 glue(stfq, MEMSUFFIX)(EA, u.d);
459 static always_inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d)
461 union {
462 float f;
463 uint32_t u;
464 } u;
466 u.f = float64_to_float32(d, &env->fp_status);
467 u.u = ((u.u & 0xFF000000UL) >> 24) |
468 ((u.u & 0x00FF0000ULL) >> 8) |
469 ((u.u & 0x0000FF00UL) << 8) |
470 ((u.u & 0x000000FFULL) << 24);
471 glue(stfl, MEMSUFFIX)(EA, u.f);
474 static always_inline void glue(stfiwxr, MEMSUFFIX) (target_ulong EA, double d)
476 union {
477 double d;
478 uint64_t u;
479 } u;
481 /* Store the low order 32 bits without any conversion */
482 u.d = d;
483 u.u = ((u.u & 0xFF000000UL) >> 24) |
484 ((u.u & 0x00FF0000ULL) >> 8) |
485 ((u.u & 0x0000FF00UL) << 8) |
486 ((u.u & 0x000000FFULL) << 24);
487 glue(stl, MEMSUFFIX)(EA, u.u);
490 PPC_STF_OP(fd_le, stfqr);
491 PPC_STF_OP(fs_le, stfsr);
492 PPC_STF_OP(fiwx_le, stfiwxr);
493 #if defined(TARGET_PPC64)
494 PPC_STF_OP_64(fd_le, stfqr);
495 PPC_STF_OP_64(fs_le, stfsr);
496 PPC_STF_OP_64(fiwx_le, stfiwxr);
497 #endif
499 /*** Floating-point load ***/
500 #define PPC_LDF_OP(name, op) \
501 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
503 FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \
504 RETURN(); \
507 #if defined(TARGET_PPC64)
508 #define PPC_LDF_OP_64(name, op) \
509 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
511 FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \
512 RETURN(); \
514 #endif
516 static always_inline double glue(ldfs, MEMSUFFIX) (target_ulong EA)
518 return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status);
521 PPC_LDF_OP(fd, ldfq);
522 PPC_LDF_OP(fs, ldfs);
523 #if defined(TARGET_PPC64)
524 PPC_LDF_OP_64(fd, ldfq);
525 PPC_LDF_OP_64(fs, ldfs);
526 #endif
528 static always_inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
530 union {
531 double d;
532 uint64_t u;
533 } u;
535 u.d = glue(ldfq, MEMSUFFIX)(EA);
536 u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
537 ((u.u & 0x00FF000000000000ULL) >> 40) |
538 ((u.u & 0x0000FF0000000000ULL) >> 24) |
539 ((u.u & 0x000000FF00000000ULL) >> 8) |
540 ((u.u & 0x00000000FF000000ULL) << 8) |
541 ((u.u & 0x0000000000FF0000ULL) << 24) |
542 ((u.u & 0x000000000000FF00ULL) << 40) |
543 ((u.u & 0x00000000000000FFULL) << 56);
545 return u.d;
548 static always_inline double glue(ldfsr, MEMSUFFIX) (target_ulong EA)
550 union {
551 float f;
552 uint32_t u;
553 } u;
555 u.f = glue(ldfl, MEMSUFFIX)(EA);
556 u.u = ((u.u & 0xFF000000UL) >> 24) |
557 ((u.u & 0x00FF0000ULL) >> 8) |
558 ((u.u & 0x0000FF00UL) << 8) |
559 ((u.u & 0x000000FFULL) << 24);
561 return float32_to_float64(u.f, &env->fp_status);
564 PPC_LDF_OP(fd_le, ldfqr);
565 PPC_LDF_OP(fs_le, ldfsr);
566 #if defined(TARGET_PPC64)
567 PPC_LDF_OP_64(fd_le, ldfqr);
568 PPC_LDF_OP_64(fs_le, ldfsr);
569 #endif
571 /* Load and set reservation */
572 void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
574 if (unlikely(T0 & 0x03)) {
575 do_raise_exception(POWERPC_EXCP_ALIGN);
576 } else {
577 T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
578 env->reserve = (uint32_t)T0;
580 RETURN();
583 #if defined(TARGET_PPC64)
584 void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
586 if (unlikely(T0 & 0x03)) {
587 do_raise_exception(POWERPC_EXCP_ALIGN);
588 } else {
589 T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
590 env->reserve = (uint64_t)T0;
592 RETURN();
595 void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)
597 if (unlikely(T0 & 0x03)) {
598 do_raise_exception(POWERPC_EXCP_ALIGN);
599 } else {
600 T1 = glue(ldq, MEMSUFFIX)((uint32_t)T0);
601 env->reserve = (uint32_t)T0;
603 RETURN();
606 void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
608 if (unlikely(T0 & 0x03)) {
609 do_raise_exception(POWERPC_EXCP_ALIGN);
610 } else {
611 T1 = glue(ldq, MEMSUFFIX)((uint64_t)T0);
612 env->reserve = (uint64_t)T0;
614 RETURN();
616 #endif
618 void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
620 if (unlikely(T0 & 0x03)) {
621 do_raise_exception(POWERPC_EXCP_ALIGN);
622 } else {
623 T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
624 env->reserve = (uint32_t)T0;
626 RETURN();
629 #if defined(TARGET_PPC64)
630 void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
632 if (unlikely(T0 & 0x03)) {
633 do_raise_exception(POWERPC_EXCP_ALIGN);
634 } else {
635 T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
636 env->reserve = (uint64_t)T0;
638 RETURN();
641 void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)
643 if (unlikely(T0 & 0x03)) {
644 do_raise_exception(POWERPC_EXCP_ALIGN);
645 } else {
646 T1 = glue(ld64r, MEMSUFFIX)((uint32_t)T0);
647 env->reserve = (uint32_t)T0;
649 RETURN();
652 void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
654 if (unlikely(T0 & 0x03)) {
655 do_raise_exception(POWERPC_EXCP_ALIGN);
656 } else {
657 T1 = glue(ld64r, MEMSUFFIX)((uint64_t)T0);
658 env->reserve = (uint64_t)T0;
660 RETURN();
662 #endif
664 /* Store with reservation */
665 void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
667 if (unlikely(T0 & 0x03)) {
668 do_raise_exception(POWERPC_EXCP_ALIGN);
669 } else {
670 if (unlikely(env->reserve != (uint32_t)T0)) {
671 env->crf[0] = xer_so;
672 } else {
673 glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
674 env->crf[0] = xer_so | 0x02;
677 env->reserve = (target_ulong)-1ULL;
678 RETURN();
681 #if defined(TARGET_PPC64)
682 void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
684 if (unlikely(T0 & 0x03)) {
685 do_raise_exception(POWERPC_EXCP_ALIGN);
686 } else {
687 if (unlikely(env->reserve != (uint64_t)T0)) {
688 env->crf[0] = xer_so;
689 } else {
690 glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
691 env->crf[0] = xer_so | 0x02;
694 env->reserve = (target_ulong)-1ULL;
695 RETURN();
698 void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)
700 if (unlikely(T0 & 0x03)) {
701 do_raise_exception(POWERPC_EXCP_ALIGN);
702 } else {
703 if (unlikely(env->reserve != (uint32_t)T0)) {
704 env->crf[0] = xer_so;
705 } else {
706 glue(stq, MEMSUFFIX)((uint32_t)T0, T1);
707 env->crf[0] = xer_so | 0x02;
710 env->reserve = (target_ulong)-1ULL;
711 RETURN();
714 void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
716 if (unlikely(T0 & 0x03)) {
717 do_raise_exception(POWERPC_EXCP_ALIGN);
718 } else {
719 if (unlikely(env->reserve != (uint64_t)T0)) {
720 env->crf[0] = xer_so;
721 } else {
722 glue(stq, MEMSUFFIX)((uint64_t)T0, T1);
723 env->crf[0] = xer_so | 0x02;
726 env->reserve = (target_ulong)-1ULL;
727 RETURN();
729 #endif
731 void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
733 if (unlikely(T0 & 0x03)) {
734 do_raise_exception(POWERPC_EXCP_ALIGN);
735 } else {
736 if (unlikely(env->reserve != (uint32_t)T0)) {
737 env->crf[0] = xer_so;
738 } else {
739 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
740 env->crf[0] = xer_so | 0x02;
743 env->reserve = (target_ulong)-1ULL;
744 RETURN();
747 #if defined(TARGET_PPC64)
748 void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
750 if (unlikely(T0 & 0x03)) {
751 do_raise_exception(POWERPC_EXCP_ALIGN);
752 } else {
753 if (unlikely(env->reserve != (uint64_t)T0)) {
754 env->crf[0] = xer_so;
755 } else {
756 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
757 env->crf[0] = xer_so | 0x02;
760 env->reserve = (target_ulong)-1ULL;
761 RETURN();
764 void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)
766 if (unlikely(T0 & 0x03)) {
767 do_raise_exception(POWERPC_EXCP_ALIGN);
768 } else {
769 if (unlikely(env->reserve != (uint32_t)T0)) {
770 env->crf[0] = xer_so;
771 } else {
772 glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);
773 env->crf[0] = xer_so | 0x02;
776 env->reserve = (target_ulong)-1ULL;
777 RETURN();
780 void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
782 if (unlikely(T0 & 0x03)) {
783 do_raise_exception(POWERPC_EXCP_ALIGN);
784 } else {
785 if (unlikely(env->reserve != (uint64_t)T0)) {
786 env->crf[0] = xer_so;
787 } else {
788 glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
789 env->crf[0] = xer_so | 0x02;
792 env->reserve = (target_ulong)-1ULL;
793 RETURN();
795 #endif
797 void OPPROTO glue(op_dcbz_l32, MEMSUFFIX) (void)
799 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
800 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
801 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
802 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
803 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
804 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
805 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
806 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
807 RETURN();
810 void OPPROTO glue(op_dcbz_l64, MEMSUFFIX) (void)
812 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
813 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
814 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
815 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
816 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
817 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
818 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
819 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
820 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
821 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
822 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
823 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
824 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
825 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
826 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
827 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
828 RETURN();
831 void OPPROTO glue(op_dcbz_l128, MEMSUFFIX) (void)
833 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
834 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
835 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
836 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
837 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
838 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
839 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
840 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
841 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
842 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
843 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
844 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
845 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
846 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
847 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
848 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
849 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x40UL), 0);
850 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x44UL), 0);
851 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x48UL), 0);
852 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x4CUL), 0);
853 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x50UL), 0);
854 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x54UL), 0);
855 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x58UL), 0);
856 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x5CUL), 0);
857 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x60UL), 0);
858 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x64UL), 0);
859 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x68UL), 0);
860 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x6CUL), 0);
861 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x70UL), 0);
862 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x74UL), 0);
863 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x78UL), 0);
864 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x7CUL), 0);
865 RETURN();
868 void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
870 glue(do_dcbz, MEMSUFFIX)();
871 RETURN();
874 #if defined(TARGET_PPC64)
875 void OPPROTO glue(op_dcbz_l32_64, MEMSUFFIX) (void)
877 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
878 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
879 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
880 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
881 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
882 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
883 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
884 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
885 RETURN();
888 void OPPROTO glue(op_dcbz_l64_64, MEMSUFFIX) (void)
890 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
891 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
892 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
893 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
894 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
895 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
896 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
897 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
898 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
899 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
900 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
901 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
902 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
903 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
904 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
905 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
906 RETURN();
909 void OPPROTO glue(op_dcbz_l128_64, MEMSUFFIX) (void)
911 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
912 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
913 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
914 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
915 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
916 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
917 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
918 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
919 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
920 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
921 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
922 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
923 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
924 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
925 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
926 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
927 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x40UL), 0);
928 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x44UL), 0);
929 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x48UL), 0);
930 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x4CUL), 0);
931 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x50UL), 0);
932 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x54UL), 0);
933 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x58UL), 0);
934 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x5CUL), 0);
935 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x60UL), 0);
936 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x64UL), 0);
937 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x68UL), 0);
938 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x6CUL), 0);
939 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x70UL), 0);
940 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x74UL), 0);
941 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x78UL), 0);
942 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x7CUL), 0);
943 RETURN();
946 void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
948 glue(do_dcbz_64, MEMSUFFIX)();
949 RETURN();
951 #endif
953 /* Instruction cache block invalidate */
954 void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
956 glue(do_icbi, MEMSUFFIX)();
957 RETURN();
960 #if defined(TARGET_PPC64)
961 void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
963 glue(do_icbi_64, MEMSUFFIX)();
964 RETURN();
966 #endif
968 /* External access */
969 void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
971 T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
972 RETURN();
975 #if defined(TARGET_PPC64)
976 void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
978 T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
979 RETURN();
981 #endif
983 void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
985 glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
986 RETURN();
989 #if defined(TARGET_PPC64)
990 void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
992 glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
993 RETURN();
995 #endif
997 void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
999 T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
1000 RETURN();
1003 #if defined(TARGET_PPC64)
1004 void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
1006 T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
1007 RETURN();
1009 #endif
1011 void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
1013 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
1014 RETURN();
1017 #if defined(TARGET_PPC64)
1018 void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
1020 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
1021 RETURN();
1023 #endif
1025 /* XXX: those micro-ops need tests ! */
1026 /* PowerPC 601 specific instructions (POWER bridge) */
1027 void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
1029 /* When byte count is 0, do nothing */
1030 if (likely(T1 != 0)) {
1031 glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
1033 RETURN();
1036 /* POWER2 quad load and store */
1037 /* XXX: TAGs are not managed */
1038 void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
1040 glue(do_POWER2_lfq, MEMSUFFIX)();
1041 RETURN();
1044 void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
1046 glue(do_POWER2_lfq_le, MEMSUFFIX)();
1047 RETURN();
1050 void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
1052 glue(do_POWER2_stfq, MEMSUFFIX)();
1053 RETURN();
1056 void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
1058 glue(do_POWER2_stfq_le, MEMSUFFIX)();
1059 RETURN();
1062 /* Altivec vector extension */
1063 #if defined(WORDS_BIGENDIAN)
1064 #define VR_DWORD0 0
1065 #define VR_DWORD1 1
1066 #else
1067 #define VR_DWORD0 1
1068 #define VR_DWORD1 0
1069 #endif
1070 void OPPROTO glue(op_vr_lvx, MEMSUFFIX) (void)
1072 AVR0.u64[VR_DWORD0] = glue(ldq, MEMSUFFIX)((uint32_t)T0);
1073 AVR0.u64[VR_DWORD1] = glue(ldq, MEMSUFFIX)((uint32_t)T0 + 8);
1076 void OPPROTO glue(op_vr_lvx_le, MEMSUFFIX) (void)
1078 AVR0.u64[VR_DWORD1] = glue(ldq, MEMSUFFIX)((uint32_t)T0);
1079 AVR0.u64[VR_DWORD0] = glue(ldq, MEMSUFFIX)((uint32_t)T0 + 8);
1082 void OPPROTO glue(op_vr_stvx, MEMSUFFIX) (void)
1084 glue(stq, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD0]);
1085 glue(stq, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD1]);
1088 void OPPROTO glue(op_vr_stvx_le, MEMSUFFIX) (void)
1090 glue(stq, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD1]);
1091 glue(stq, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD0]);
1094 #if defined(TARGET_PPC64)
1095 void OPPROTO glue(op_vr_lvx_64, MEMSUFFIX) (void)
1097 AVR0.u64[VR_DWORD0] = glue(ldq, MEMSUFFIX)((uint64_t)T0);
1098 AVR0.u64[VR_DWORD1] = glue(ldq, MEMSUFFIX)((uint64_t)T0 + 8);
1101 void OPPROTO glue(op_vr_lvx_le_64, MEMSUFFIX) (void)
1103 AVR0.u64[VR_DWORD1] = glue(ldq, MEMSUFFIX)((uint64_t)T0);
1104 AVR0.u64[VR_DWORD0] = glue(ldq, MEMSUFFIX)((uint64_t)T0 + 8);
1107 void OPPROTO glue(op_vr_stvx_64, MEMSUFFIX) (void)
1109 glue(stq, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD0]);
1110 glue(stq, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD1]);
1113 void OPPROTO glue(op_vr_stvx_le_64, MEMSUFFIX) (void)
1115 glue(stq, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD1]);
1116 glue(stq, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD0]);
1118 #endif
1119 #undef VR_DWORD0
1120 #undef VR_DWORD1
1122 /* SPE extension */
1123 #define _PPC_SPE_LD_OP(name, op) \
1124 void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void) \
1126 T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0); \
1127 RETURN(); \
1130 #if defined(TARGET_PPC64)
1131 #define _PPC_SPE_LD_OP_64(name, op) \
1132 void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void) \
1134 T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0); \
1135 RETURN(); \
1137 #define PPC_SPE_LD_OP(name, op) \
1138 _PPC_SPE_LD_OP(name, op); \
1139 _PPC_SPE_LD_OP_64(name, op)
1140 #else
1141 #define PPC_SPE_LD_OP(name, op) \
1142 _PPC_SPE_LD_OP(name, op)
1143 #endif
1145 #define _PPC_SPE_ST_OP(name, op) \
1146 void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void) \
1148 glue(op, MEMSUFFIX)((uint32_t)T0, T1_64); \
1149 RETURN(); \
1152 #if defined(TARGET_PPC64)
1153 #define _PPC_SPE_ST_OP_64(name, op) \
1154 void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void) \
1156 glue(op, MEMSUFFIX)((uint64_t)T0, T1_64); \
1157 RETURN(); \
1159 #define PPC_SPE_ST_OP(name, op) \
1160 _PPC_SPE_ST_OP(name, op); \
1161 _PPC_SPE_ST_OP_64(name, op)
1162 #else
1163 #define PPC_SPE_ST_OP(name, op) \
1164 _PPC_SPE_ST_OP(name, op)
1165 #endif
1167 #if !defined(TARGET_PPC64)
1168 PPC_SPE_LD_OP(dd, ldq);
1169 PPC_SPE_ST_OP(dd, stq);
1170 PPC_SPE_LD_OP(dd_le, ld64r);
1171 PPC_SPE_ST_OP(dd_le, st64r);
1172 #endif
1173 static always_inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
1175 uint64_t ret;
1176 ret = (uint64_t)glue(ldl, MEMSUFFIX)(EA) << 32;
1177 ret |= (uint64_t)glue(ldl, MEMSUFFIX)(EA + 4);
1178 return ret;
1180 PPC_SPE_LD_OP(dw, spe_ldw);
1181 static always_inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA,
1182 uint64_t data)
1184 glue(stl, MEMSUFFIX)(EA, data >> 32);
1185 glue(stl, MEMSUFFIX)(EA + 4, data);
1187 PPC_SPE_ST_OP(dw, spe_stdw);
1188 static always_inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
1190 uint64_t ret;
1191 ret = (uint64_t)glue(ld32r, MEMSUFFIX)(EA) << 32;
1192 ret |= (uint64_t)glue(ld32r, MEMSUFFIX)(EA + 4);
1193 return ret;
1195 PPC_SPE_LD_OP(dw_le, spe_ldw_le);
1196 static always_inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
1197 uint64_t data)
1199 glue(st32r, MEMSUFFIX)(EA, data >> 32);
1200 glue(st32r, MEMSUFFIX)(EA + 4, data);
1202 PPC_SPE_ST_OP(dw_le, spe_stdw_le);
1203 static always_inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
1205 uint64_t ret;
1206 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1207 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 32;
1208 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 4) << 16;
1209 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 6);
1210 return ret;
1212 PPC_SPE_LD_OP(dh, spe_ldh);
1213 static always_inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA,
1214 uint64_t data)
1216 glue(stw, MEMSUFFIX)(EA, data >> 48);
1217 glue(stw, MEMSUFFIX)(EA + 2, data >> 32);
1218 glue(stw, MEMSUFFIX)(EA + 4, data >> 16);
1219 glue(stw, MEMSUFFIX)(EA + 6, data);
1221 PPC_SPE_ST_OP(dh, spe_stdh);
1222 static always_inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
1224 uint64_t ret;
1225 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1226 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 32;
1227 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 4) << 16;
1228 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 6);
1229 return ret;
1231 PPC_SPE_LD_OP(dh_le, spe_ldh_le);
1232 static always_inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1233 uint64_t data)
1235 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1236 glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
1237 glue(st16r, MEMSUFFIX)(EA + 4, data >> 16);
1238 glue(st16r, MEMSUFFIX)(EA + 6, data);
1240 PPC_SPE_ST_OP(dh_le, spe_stdh_le);
1241 static always_inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1243 uint64_t ret;
1244 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1245 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 16;
1246 return ret;
1248 PPC_SPE_LD_OP(whe, spe_lwhe);
1249 static always_inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA,
1250 uint64_t data)
1252 glue(stw, MEMSUFFIX)(EA, data >> 48);
1253 glue(stw, MEMSUFFIX)(EA + 2, data >> 16);
1255 PPC_SPE_ST_OP(whe, spe_stwhe);
1256 static always_inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1258 uint64_t ret;
1259 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1260 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 16;
1261 return ret;
1263 PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
1264 static always_inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
1265 uint64_t data)
1267 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1268 glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
1270 PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
1271 static always_inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1273 uint64_t ret;
1274 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 32;
1275 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2);
1276 return ret;
1278 PPC_SPE_LD_OP(whou, spe_lwhou);
1279 static always_inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1281 uint64_t ret;
1282 ret = ((uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA))) << 32;
1283 ret |= (uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA + 2));
1284 return ret;
1286 PPC_SPE_LD_OP(whos, spe_lwhos);
1287 static always_inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA,
1288 uint64_t data)
1290 glue(stw, MEMSUFFIX)(EA, data >> 32);
1291 glue(stw, MEMSUFFIX)(EA + 2, data);
1293 PPC_SPE_ST_OP(who, spe_stwho);
1294 static always_inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1296 uint64_t ret;
1297 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 32;
1298 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2);
1299 return ret;
1301 PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1302 static always_inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1304 uint64_t ret;
1305 ret = ((uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA))) << 32;
1306 ret |= (uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA + 2));
1307 return ret;
1309 PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1310 static always_inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1311 uint64_t data)
1313 glue(st16r, MEMSUFFIX)(EA, data >> 32);
1314 glue(st16r, MEMSUFFIX)(EA + 2, data);
1316 PPC_SPE_ST_OP(who_le, spe_stwho_le);
1317 #if !defined(TARGET_PPC64)
1318 static always_inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA,
1319 uint64_t data)
1321 glue(stl, MEMSUFFIX)(EA, data);
1323 PPC_SPE_ST_OP(wwo, spe_stwwo);
1324 static always_inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1325 uint64_t data)
1327 glue(st32r, MEMSUFFIX)(EA, data);
1329 PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1330 #endif
1331 static always_inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1333 uint16_t tmp;
1334 tmp = glue(lduw, MEMSUFFIX)(EA);
1335 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1337 PPC_SPE_LD_OP(h, spe_lh);
1338 static always_inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1340 uint16_t tmp;
1341 tmp = glue(ld16r, MEMSUFFIX)(EA);
1342 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1344 PPC_SPE_LD_OP(h_le, spe_lh_le);
1345 static always_inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1347 uint32_t tmp;
1348 tmp = glue(ldl, MEMSUFFIX)(EA);
1349 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1351 PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1352 static always_inline
1353 uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1355 uint32_t tmp;
1356 tmp = glue(ld32r, MEMSUFFIX)(EA);
1357 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1359 PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1360 static always_inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1362 uint64_t ret;
1363 uint16_t tmp;
1364 tmp = glue(lduw, MEMSUFFIX)(EA);
1365 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1366 tmp = glue(lduw, MEMSUFFIX)(EA + 2);
1367 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1368 return ret;
1370 PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1371 static always_inline
1372 uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1374 uint64_t ret;
1375 uint16_t tmp;
1376 tmp = glue(ld16r, MEMSUFFIX)(EA);
1377 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1378 tmp = glue(ld16r, MEMSUFFIX)(EA + 2);
1379 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1380 return ret;
1382 PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le);
1384 #undef MEMSUFFIX