Preliminary AIX support
[qemu/mini2440.git] / target-ppc / op_mem.h
blobb504555cfd94166fccb5a4fb8aa5b3f8db580c55
1 /*
2 * PowerPC emulation micro-operations for qemu.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "op_mem_access.h"
23 /*** Integer load and store multiple ***/
24 void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
26 glue(do_lmw, MEMSUFFIX)(PARAM1);
27 RETURN();
30 #if defined(TARGET_PPC64)
31 void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
33 glue(do_lmw_64, MEMSUFFIX)(PARAM1);
34 RETURN();
36 #endif
38 void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
40 glue(do_lmw_le, MEMSUFFIX)(PARAM1);
41 RETURN();
44 #if defined(TARGET_PPC64)
45 void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
47 glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
48 RETURN();
50 #endif
52 void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
54 glue(do_stmw, MEMSUFFIX)(PARAM1);
55 RETURN();
58 #if defined(TARGET_PPC64)
59 void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
61 glue(do_stmw_64, MEMSUFFIX)(PARAM1);
62 RETURN();
64 #endif
66 void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
68 glue(do_stmw_le, MEMSUFFIX)(PARAM1);
69 RETURN();
72 #if defined(TARGET_PPC64)
73 void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
75 glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
76 RETURN();
78 #endif
80 /*** Integer load and store strings ***/
81 void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
83 glue(do_lsw, MEMSUFFIX)(PARAM1);
84 RETURN();
87 #if defined(TARGET_PPC64)
88 void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
90 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
91 RETURN();
93 #endif
95 /* PPC32 specification says we must generate an exception if
96 * rA is in the range of registers to be loaded.
97 * In an other hand, IBM says this is valid, but rA won't be loaded.
98 * For now, I'll follow the spec...
100 void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
102 /* Note: T1 comes from xer_bc then no cast is needed */
103 if (likely(T1 != 0)) {
104 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
105 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
106 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
107 POWERPC_EXCP_INVAL |
108 POWERPC_EXCP_INVAL_LSWX);
109 } else {
110 glue(do_lsw, MEMSUFFIX)(PARAM1);
113 RETURN();
116 #if defined(TARGET_PPC64)
117 void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
119 /* Note: T1 comes from xer_bc then no cast is needed */
120 if (likely(T1 != 0)) {
121 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
122 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
123 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
124 POWERPC_EXCP_INVAL |
125 POWERPC_EXCP_INVAL_LSWX);
126 } else {
127 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
130 RETURN();
132 #endif
134 void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
136 glue(do_stsw, MEMSUFFIX)(PARAM1);
137 RETURN();
140 #if defined(TARGET_PPC64)
141 void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
143 glue(do_stsw_64, MEMSUFFIX)(PARAM1);
144 RETURN();
146 #endif
148 /*** Floating-point store ***/
149 #define PPC_STF_OP(name, op) \
150 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
152 glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \
153 RETURN(); \
156 #if defined(TARGET_PPC64)
157 #define PPC_STF_OP_64(name, op) \
158 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
160 glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \
161 RETURN(); \
163 #endif
165 static always_inline void glue(stfs, MEMSUFFIX) (target_ulong EA, float64 d)
167 glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status));
170 static always_inline void glue(stfiw, MEMSUFFIX) (target_ulong EA, float64 d)
172 CPU_DoubleU u;
174 /* Store the low order 32 bits without any conversion */
175 u.d = d;
176 glue(st32, MEMSUFFIX)(EA, u.l.lower);
179 PPC_STF_OP(fd, stfq);
180 PPC_STF_OP(fs, stfs);
181 PPC_STF_OP(fiw, stfiw);
182 #if defined(TARGET_PPC64)
183 PPC_STF_OP_64(fd, stfq);
184 PPC_STF_OP_64(fs, stfs);
185 PPC_STF_OP_64(fiw, stfiw);
186 #endif
188 static always_inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, float64 d)
190 CPU_DoubleU u;
192 u.d = d;
193 u.ll = bswap64(u.ll);
194 glue(stfq, MEMSUFFIX)(EA, u.d);
197 static always_inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, float64 d)
199 CPU_FloatU u;
201 u.f = float64_to_float32(d, &env->fp_status);
202 u.l = bswap32(u.l);
203 glue(stfl, MEMSUFFIX)(EA, u.f);
206 static always_inline void glue(stfiwr, MEMSUFFIX) (target_ulong EA, float64 d)
208 CPU_DoubleU u;
210 /* Store the low order 32 bits without any conversion */
211 u.d = d;
212 u.l.lower = bswap32(u.l.lower);
213 glue(st32, MEMSUFFIX)(EA, u.l.lower);
216 PPC_STF_OP(fd_le, stfqr);
217 PPC_STF_OP(fs_le, stfsr);
218 PPC_STF_OP(fiw_le, stfiwr);
219 #if defined(TARGET_PPC64)
220 PPC_STF_OP_64(fd_le, stfqr);
221 PPC_STF_OP_64(fs_le, stfsr);
222 PPC_STF_OP_64(fiw_le, stfiwr);
223 #endif
225 /*** Floating-point load ***/
226 #define PPC_LDF_OP(name, op) \
227 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
229 FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \
230 RETURN(); \
233 #if defined(TARGET_PPC64)
234 #define PPC_LDF_OP_64(name, op) \
235 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
237 FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \
238 RETURN(); \
240 #endif
242 static always_inline float64 glue(ldfs, MEMSUFFIX) (target_ulong EA)
244 return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status);
247 PPC_LDF_OP(fd, ldfq);
248 PPC_LDF_OP(fs, ldfs);
249 #if defined(TARGET_PPC64)
250 PPC_LDF_OP_64(fd, ldfq);
251 PPC_LDF_OP_64(fs, ldfs);
252 #endif
254 static always_inline float64 glue(ldfqr, MEMSUFFIX) (target_ulong EA)
256 CPU_DoubleU u;
258 u.d = glue(ldfq, MEMSUFFIX)(EA);
259 u.ll = bswap64(u.ll);
261 return u.d;
264 static always_inline float64 glue(ldfsr, MEMSUFFIX) (target_ulong EA)
266 CPU_FloatU u;
268 u.f = glue(ldfl, MEMSUFFIX)(EA);
269 u.l = bswap32(u.l);
271 return float32_to_float64(u.f, &env->fp_status);
274 PPC_LDF_OP(fd_le, ldfqr);
275 PPC_LDF_OP(fs_le, ldfsr);
276 #if defined(TARGET_PPC64)
277 PPC_LDF_OP_64(fd_le, ldfqr);
278 PPC_LDF_OP_64(fs_le, ldfsr);
279 #endif
281 /* Load and set reservation */
282 void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
284 if (unlikely(T0 & 0x03)) {
285 do_raise_exception(POWERPC_EXCP_ALIGN);
286 } else {
287 T1 = glue(ldu32, MEMSUFFIX)((uint32_t)T0);
288 env->reserve = (uint32_t)T0;
290 RETURN();
293 #if defined(TARGET_PPC64)
294 void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
296 if (unlikely(T0 & 0x03)) {
297 do_raise_exception(POWERPC_EXCP_ALIGN);
298 } else {
299 T1 = glue(ldu32, MEMSUFFIX)((uint64_t)T0);
300 env->reserve = (uint64_t)T0;
302 RETURN();
305 void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)
307 if (unlikely(T0 & 0x03)) {
308 do_raise_exception(POWERPC_EXCP_ALIGN);
309 } else {
310 T1 = glue(ldu64, MEMSUFFIX)((uint32_t)T0);
311 env->reserve = (uint32_t)T0;
313 RETURN();
316 void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
318 if (unlikely(T0 & 0x03)) {
319 do_raise_exception(POWERPC_EXCP_ALIGN);
320 } else {
321 T1 = glue(ldu64, MEMSUFFIX)((uint64_t)T0);
322 env->reserve = (uint64_t)T0;
324 RETURN();
326 #endif
328 void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
330 if (unlikely(T0 & 0x03)) {
331 do_raise_exception(POWERPC_EXCP_ALIGN);
332 } else {
333 T1 = glue(ldu32r, MEMSUFFIX)((uint32_t)T0);
334 env->reserve = (uint32_t)T0;
336 RETURN();
339 #if defined(TARGET_PPC64)
340 void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
342 if (unlikely(T0 & 0x03)) {
343 do_raise_exception(POWERPC_EXCP_ALIGN);
344 } else {
345 T1 = glue(ldu32r, MEMSUFFIX)((uint64_t)T0);
346 env->reserve = (uint64_t)T0;
348 RETURN();
351 void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)
353 if (unlikely(T0 & 0x03)) {
354 do_raise_exception(POWERPC_EXCP_ALIGN);
355 } else {
356 T1 = glue(ldu64r, MEMSUFFIX)((uint32_t)T0);
357 env->reserve = (uint32_t)T0;
359 RETURN();
362 void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
364 if (unlikely(T0 & 0x03)) {
365 do_raise_exception(POWERPC_EXCP_ALIGN);
366 } else {
367 T1 = glue(ldu64r, MEMSUFFIX)((uint64_t)T0);
368 env->reserve = (uint64_t)T0;
370 RETURN();
372 #endif
374 /* Store with reservation */
375 void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
377 if (unlikely(T0 & 0x03)) {
378 do_raise_exception(POWERPC_EXCP_ALIGN);
379 } else {
380 if (unlikely(env->reserve != (uint32_t)T0)) {
381 env->crf[0] = xer_so;
382 } else {
383 glue(st32, MEMSUFFIX)((uint32_t)T0, T1);
384 env->crf[0] = xer_so | 0x02;
387 env->reserve = (target_ulong)-1ULL;
388 RETURN();
391 #if defined(TARGET_PPC64)
392 void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
394 if (unlikely(T0 & 0x03)) {
395 do_raise_exception(POWERPC_EXCP_ALIGN);
396 } else {
397 if (unlikely(env->reserve != (uint64_t)T0)) {
398 env->crf[0] = xer_so;
399 } else {
400 glue(st32, MEMSUFFIX)((uint64_t)T0, T1);
401 env->crf[0] = xer_so | 0x02;
404 env->reserve = (target_ulong)-1ULL;
405 RETURN();
408 void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)
410 if (unlikely(T0 & 0x03)) {
411 do_raise_exception(POWERPC_EXCP_ALIGN);
412 } else {
413 if (unlikely(env->reserve != (uint32_t)T0)) {
414 env->crf[0] = xer_so;
415 } else {
416 glue(st64, MEMSUFFIX)((uint32_t)T0, T1);
417 env->crf[0] = xer_so | 0x02;
420 env->reserve = (target_ulong)-1ULL;
421 RETURN();
424 void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
426 if (unlikely(T0 & 0x03)) {
427 do_raise_exception(POWERPC_EXCP_ALIGN);
428 } else {
429 if (unlikely(env->reserve != (uint64_t)T0)) {
430 env->crf[0] = xer_so;
431 } else {
432 glue(st64, MEMSUFFIX)((uint64_t)T0, T1);
433 env->crf[0] = xer_so | 0x02;
436 env->reserve = (target_ulong)-1ULL;
437 RETURN();
439 #endif
441 void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
443 if (unlikely(T0 & 0x03)) {
444 do_raise_exception(POWERPC_EXCP_ALIGN);
445 } else {
446 if (unlikely(env->reserve != (uint32_t)T0)) {
447 env->crf[0] = xer_so;
448 } else {
449 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
450 env->crf[0] = xer_so | 0x02;
453 env->reserve = (target_ulong)-1ULL;
454 RETURN();
457 #if defined(TARGET_PPC64)
458 void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
460 if (unlikely(T0 & 0x03)) {
461 do_raise_exception(POWERPC_EXCP_ALIGN);
462 } else {
463 if (unlikely(env->reserve != (uint64_t)T0)) {
464 env->crf[0] = xer_so;
465 } else {
466 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
467 env->crf[0] = xer_so | 0x02;
470 env->reserve = (target_ulong)-1ULL;
471 RETURN();
474 void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)
476 if (unlikely(T0 & 0x03)) {
477 do_raise_exception(POWERPC_EXCP_ALIGN);
478 } else {
479 if (unlikely(env->reserve != (uint32_t)T0)) {
480 env->crf[0] = xer_so;
481 } else {
482 glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);
483 env->crf[0] = xer_so | 0x02;
486 env->reserve = (target_ulong)-1ULL;
487 RETURN();
490 void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
492 if (unlikely(T0 & 0x03)) {
493 do_raise_exception(POWERPC_EXCP_ALIGN);
494 } else {
495 if (unlikely(env->reserve != (uint64_t)T0)) {
496 env->crf[0] = xer_so;
497 } else {
498 glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
499 env->crf[0] = xer_so | 0x02;
502 env->reserve = (target_ulong)-1ULL;
503 RETURN();
505 #endif
507 void OPPROTO glue(op_dcbz_l32, MEMSUFFIX) (void)
509 T0 &= ~((uint32_t)31);
510 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
511 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
512 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
513 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
514 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
515 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
516 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
517 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
518 RETURN();
521 void OPPROTO glue(op_dcbz_l64, MEMSUFFIX) (void)
523 T0 &= ~((uint32_t)63);
524 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
525 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
526 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
527 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
528 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
529 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
530 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
531 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
532 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
533 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
534 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
535 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
536 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
537 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
538 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
539 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
540 RETURN();
543 void OPPROTO glue(op_dcbz_l128, MEMSUFFIX) (void)
545 T0 &= ~((uint32_t)127);
546 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
547 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
548 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
549 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
550 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
551 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
552 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
553 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
554 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
555 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
556 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
557 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
558 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
559 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
560 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
561 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
562 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x40UL), 0);
563 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x44UL), 0);
564 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x48UL), 0);
565 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x4CUL), 0);
566 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x50UL), 0);
567 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x54UL), 0);
568 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x58UL), 0);
569 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x5CUL), 0);
570 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x60UL), 0);
571 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x64UL), 0);
572 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x68UL), 0);
573 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x6CUL), 0);
574 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x70UL), 0);
575 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x74UL), 0);
576 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x78UL), 0);
577 glue(st32, MEMSUFFIX)((uint32_t)(T0 + 0x7CUL), 0);
578 RETURN();
581 void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
583 glue(do_dcbz, MEMSUFFIX)();
584 RETURN();
587 #if defined(TARGET_PPC64)
588 void OPPROTO glue(op_dcbz_l32_64, MEMSUFFIX) (void)
590 T0 &= ~((uint64_t)31);
591 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
592 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
593 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
594 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
595 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
596 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
597 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
598 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
599 RETURN();
602 void OPPROTO glue(op_dcbz_l64_64, MEMSUFFIX) (void)
604 T0 &= ~((uint64_t)63);
605 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
606 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
607 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
608 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
609 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
610 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
611 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
612 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
613 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
614 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
615 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
616 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
617 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
618 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
619 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
620 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
621 RETURN();
624 void OPPROTO glue(op_dcbz_l128_64, MEMSUFFIX) (void)
626 T0 &= ~((uint64_t)127);
627 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
628 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
629 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
630 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
631 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
632 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
633 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
634 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
635 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
636 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
637 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
638 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
639 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
640 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
641 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
642 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
643 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x40UL), 0);
644 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x44UL), 0);
645 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x48UL), 0);
646 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x4CUL), 0);
647 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x50UL), 0);
648 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x54UL), 0);
649 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x58UL), 0);
650 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x5CUL), 0);
651 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x60UL), 0);
652 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x64UL), 0);
653 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x68UL), 0);
654 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x6CUL), 0);
655 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x70UL), 0);
656 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x74UL), 0);
657 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x78UL), 0);
658 glue(st32, MEMSUFFIX)((uint64_t)(T0 + 0x7CUL), 0);
659 RETURN();
662 void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
664 glue(do_dcbz_64, MEMSUFFIX)();
665 RETURN();
667 #endif
669 /* Instruction cache block invalidate */
670 void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
672 glue(do_icbi, MEMSUFFIX)();
673 RETURN();
676 #if defined(TARGET_PPC64)
677 void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
679 glue(do_icbi_64, MEMSUFFIX)();
680 RETURN();
682 #endif
684 /* External access */
685 void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
687 T1 = glue(ldu32, MEMSUFFIX)((uint32_t)T0);
688 RETURN();
691 #if defined(TARGET_PPC64)
692 void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
694 T1 = glue(ldu32, MEMSUFFIX)((uint64_t)T0);
695 RETURN();
697 #endif
699 void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
701 glue(st32, MEMSUFFIX)((uint32_t)T0, T1);
702 RETURN();
705 #if defined(TARGET_PPC64)
706 void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
708 glue(st32, MEMSUFFIX)((uint64_t)T0, T1);
709 RETURN();
711 #endif
713 void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
715 T1 = glue(ldu32r, MEMSUFFIX)((uint32_t)T0);
716 RETURN();
719 #if defined(TARGET_PPC64)
720 void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
722 T1 = glue(ldu32r, MEMSUFFIX)((uint64_t)T0);
723 RETURN();
725 #endif
727 void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
729 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
730 RETURN();
733 #if defined(TARGET_PPC64)
734 void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
736 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
737 RETURN();
739 #endif
741 /* XXX: those micro-ops need tests ! */
742 /* PowerPC 601 specific instructions (POWER bridge) */
743 void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
745 /* When byte count is 0, do nothing */
746 if (likely(T1 != 0)) {
747 glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
749 RETURN();
752 /* POWER2 quad load and store */
753 /* XXX: TAGs are not managed */
754 void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
756 glue(do_POWER2_lfq, MEMSUFFIX)();
757 RETURN();
760 void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
762 glue(do_POWER2_lfq_le, MEMSUFFIX)();
763 RETURN();
766 void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
768 glue(do_POWER2_stfq, MEMSUFFIX)();
769 RETURN();
772 void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
774 glue(do_POWER2_stfq_le, MEMSUFFIX)();
775 RETURN();
778 /* Altivec vector extension */
779 #if defined(WORDS_BIGENDIAN)
780 #define VR_DWORD0 0
781 #define VR_DWORD1 1
782 #else
783 #define VR_DWORD0 1
784 #define VR_DWORD1 0
785 #endif
786 void OPPROTO glue(op_vr_lvx, MEMSUFFIX) (void)
788 AVR0.u64[VR_DWORD0] = glue(ldu64, MEMSUFFIX)((uint32_t)T0);
789 AVR0.u64[VR_DWORD1] = glue(ldu64, MEMSUFFIX)((uint32_t)T0 + 8);
792 void OPPROTO glue(op_vr_lvx_le, MEMSUFFIX) (void)
794 AVR0.u64[VR_DWORD1] = glue(ldu64r, MEMSUFFIX)((uint32_t)T0);
795 AVR0.u64[VR_DWORD0] = glue(ldu64r, MEMSUFFIX)((uint32_t)T0 + 8);
798 void OPPROTO glue(op_vr_stvx, MEMSUFFIX) (void)
800 glue(st64, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD0]);
801 glue(st64, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD1]);
804 void OPPROTO glue(op_vr_stvx_le, MEMSUFFIX) (void)
806 glue(st64r, MEMSUFFIX)((uint32_t)T0, AVR0.u64[VR_DWORD1]);
807 glue(st64r, MEMSUFFIX)((uint32_t)T0 + 8, AVR0.u64[VR_DWORD0]);
810 #if defined(TARGET_PPC64)
811 void OPPROTO glue(op_vr_lvx_64, MEMSUFFIX) (void)
813 AVR0.u64[VR_DWORD0] = glue(ldu64, MEMSUFFIX)((uint64_t)T0);
814 AVR0.u64[VR_DWORD1] = glue(ldu64, MEMSUFFIX)((uint64_t)T0 + 8);
817 void OPPROTO glue(op_vr_lvx_le_64, MEMSUFFIX) (void)
819 AVR0.u64[VR_DWORD1] = glue(ldu64r, MEMSUFFIX)((uint64_t)T0);
820 AVR0.u64[VR_DWORD0] = glue(ldu64r, MEMSUFFIX)((uint64_t)T0 + 8);
823 void OPPROTO glue(op_vr_stvx_64, MEMSUFFIX) (void)
825 glue(st64, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD0]);
826 glue(st64, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD1]);
829 void OPPROTO glue(op_vr_stvx_le_64, MEMSUFFIX) (void)
831 glue(st64r, MEMSUFFIX)((uint64_t)T0, AVR0.u64[VR_DWORD1]);
832 glue(st64r, MEMSUFFIX)((uint64_t)T0 + 8, AVR0.u64[VR_DWORD0]);
834 #endif
835 #undef VR_DWORD0
836 #undef VR_DWORD1
838 /* SPE extension */
839 #define _PPC_SPE_LD_OP(name, op) \
840 void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void) \
842 T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0); \
843 RETURN(); \
846 #if defined(TARGET_PPC64)
847 #define _PPC_SPE_LD_OP_64(name, op) \
848 void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void) \
850 T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0); \
851 RETURN(); \
853 #define PPC_SPE_LD_OP(name, op) \
854 _PPC_SPE_LD_OP(name, op); \
855 _PPC_SPE_LD_OP_64(name, op)
856 #else
857 #define PPC_SPE_LD_OP(name, op) \
858 _PPC_SPE_LD_OP(name, op)
859 #endif
861 #define _PPC_SPE_ST_OP(name, op) \
862 void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void) \
864 glue(op, MEMSUFFIX)((uint32_t)T0, T1_64); \
865 RETURN(); \
868 #if defined(TARGET_PPC64)
869 #define _PPC_SPE_ST_OP_64(name, op) \
870 void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void) \
872 glue(op, MEMSUFFIX)((uint64_t)T0, T1_64); \
873 RETURN(); \
875 #define PPC_SPE_ST_OP(name, op) \
876 _PPC_SPE_ST_OP(name, op); \
877 _PPC_SPE_ST_OP_64(name, op)
878 #else
879 #define PPC_SPE_ST_OP(name, op) \
880 _PPC_SPE_ST_OP(name, op)
881 #endif
883 PPC_SPE_LD_OP(dd, ldu64);
884 PPC_SPE_ST_OP(dd, st64);
885 PPC_SPE_LD_OP(dd_le, ldu64r);
886 PPC_SPE_ST_OP(dd_le, st64r);
887 static always_inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
889 uint64_t ret;
890 ret = (uint64_t)glue(ldu32, MEMSUFFIX)(EA) << 32;
891 ret |= (uint64_t)glue(ldu32, MEMSUFFIX)(EA + 4);
892 return ret;
894 PPC_SPE_LD_OP(dw, spe_ldw);
895 static always_inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA,
896 uint64_t data)
898 glue(st32, MEMSUFFIX)(EA, data >> 32);
899 glue(st32, MEMSUFFIX)(EA + 4, data);
901 PPC_SPE_ST_OP(dw, spe_stdw);
902 static always_inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
904 uint64_t ret;
905 ret = (uint64_t)glue(ldu32r, MEMSUFFIX)(EA) << 32;
906 ret |= (uint64_t)glue(ldu32r, MEMSUFFIX)(EA + 4);
907 return ret;
909 PPC_SPE_LD_OP(dw_le, spe_ldw_le);
910 static always_inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
911 uint64_t data)
913 glue(st32r, MEMSUFFIX)(EA, data >> 32);
914 glue(st32r, MEMSUFFIX)(EA + 4, data);
916 PPC_SPE_ST_OP(dw_le, spe_stdw_le);
917 static always_inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
919 uint64_t ret;
920 ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 48;
921 ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2) << 32;
922 ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 4) << 16;
923 ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 6);
924 return ret;
926 PPC_SPE_LD_OP(dh, spe_ldh);
927 static always_inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA,
928 uint64_t data)
930 glue(st16, MEMSUFFIX)(EA, data >> 48);
931 glue(st16, MEMSUFFIX)(EA + 2, data >> 32);
932 glue(st16, MEMSUFFIX)(EA + 4, data >> 16);
933 glue(st16, MEMSUFFIX)(EA + 6, data);
935 PPC_SPE_ST_OP(dh, spe_stdh);
936 static always_inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
938 uint64_t ret;
939 ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 48;
940 ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2) << 32;
941 ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 4) << 16;
942 ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 6);
943 return ret;
945 PPC_SPE_LD_OP(dh_le, spe_ldh_le);
946 static always_inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
947 uint64_t data)
949 glue(st16r, MEMSUFFIX)(EA, data >> 48);
950 glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
951 glue(st16r, MEMSUFFIX)(EA + 4, data >> 16);
952 glue(st16r, MEMSUFFIX)(EA + 6, data);
954 PPC_SPE_ST_OP(dh_le, spe_stdh_le);
955 static always_inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
957 uint64_t ret;
958 ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 48;
959 ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2) << 16;
960 return ret;
962 PPC_SPE_LD_OP(whe, spe_lwhe);
963 static always_inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA,
964 uint64_t data)
966 glue(st16, MEMSUFFIX)(EA, data >> 48);
967 glue(st16, MEMSUFFIX)(EA + 2, data >> 16);
969 PPC_SPE_ST_OP(whe, spe_stwhe);
970 static always_inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
972 uint64_t ret;
973 ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 48;
974 ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2) << 16;
975 return ret;
977 PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
978 static always_inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
979 uint64_t data)
981 glue(st16r, MEMSUFFIX)(EA, data >> 48);
982 glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
984 PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
985 static always_inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
987 uint64_t ret;
988 ret = (uint64_t)glue(ldu16, MEMSUFFIX)(EA) << 32;
989 ret |= (uint64_t)glue(ldu16, MEMSUFFIX)(EA + 2);
990 return ret;
992 PPC_SPE_LD_OP(whou, spe_lwhou);
993 static always_inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
995 uint64_t ret;
996 ret = ((uint64_t)((int32_t)glue(lds16, MEMSUFFIX)(EA))) << 32;
997 ret |= (uint64_t)((int32_t)glue(lds16, MEMSUFFIX)(EA + 2));
998 return ret;
1000 PPC_SPE_LD_OP(whos, spe_lwhos);
1001 static always_inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA,
1002 uint64_t data)
1004 glue(st16, MEMSUFFIX)(EA, data >> 32);
1005 glue(st16, MEMSUFFIX)(EA + 2, data);
1007 PPC_SPE_ST_OP(who, spe_stwho);
1008 static always_inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1010 uint64_t ret;
1011 ret = (uint64_t)glue(ldu16r, MEMSUFFIX)(EA) << 32;
1012 ret |= (uint64_t)glue(ldu16r, MEMSUFFIX)(EA + 2);
1013 return ret;
1015 PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1016 static always_inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1018 uint64_t ret;
1019 ret = ((uint64_t)((int32_t)glue(lds16r, MEMSUFFIX)(EA))) << 32;
1020 ret |= (uint64_t)((int32_t)glue(lds16r, MEMSUFFIX)(EA + 2));
1021 return ret;
1023 PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1024 static always_inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1025 uint64_t data)
1027 glue(st16r, MEMSUFFIX)(EA, data >> 32);
1028 glue(st16r, MEMSUFFIX)(EA + 2, data);
1030 PPC_SPE_ST_OP(who_le, spe_stwho_le);
1031 static always_inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA,
1032 uint64_t data)
1034 glue(st32, MEMSUFFIX)(EA, data);
1036 PPC_SPE_ST_OP(wwo, spe_stwwo);
1037 static always_inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1038 uint64_t data)
1040 glue(st32r, MEMSUFFIX)(EA, data);
1042 PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1043 static always_inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1045 uint16_t tmp;
1046 tmp = glue(ldu16, MEMSUFFIX)(EA);
1047 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1049 PPC_SPE_LD_OP(h, spe_lh);
1050 static always_inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1052 uint16_t tmp;
1053 tmp = glue(ldu16r, MEMSUFFIX)(EA);
1054 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1056 PPC_SPE_LD_OP(h_le, spe_lh_le);
1057 static always_inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1059 uint32_t tmp;
1060 tmp = glue(ldu32, MEMSUFFIX)(EA);
1061 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1063 PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1064 static always_inline
1065 uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1067 uint32_t tmp;
1068 tmp = glue(ldu32r, MEMSUFFIX)(EA);
1069 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1071 PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1072 static always_inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1074 uint64_t ret;
1075 uint16_t tmp;
1076 tmp = glue(ldu16, MEMSUFFIX)(EA);
1077 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1078 tmp = glue(ldu16, MEMSUFFIX)(EA + 2);
1079 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1080 return ret;
1082 PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1083 static always_inline
1084 uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1086 uint64_t ret;
1087 uint16_t tmp;
1088 tmp = glue(ldu16r, MEMSUFFIX)(EA);
1089 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1090 tmp = glue(ldu16r, MEMSUFFIX)(EA + 2);
1091 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1092 return ret;
1094 PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le);
1096 #undef MEMSUFFIX