As icbi is not a priviledge instruction and is treated as a load by the MMU
[qemu/mini2440.git] / target-ppc / op_mem.h
blobf5a8c4b64feb6d5876432de196341705064454dd
1 /*
2 * PowerPC emulation micro-operations for qemu.
3 *
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 static inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
23 uint16_t tmp = glue(lduw, MEMSUFFIX)(EA);
24 return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
27 static inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
29 int16_t tmp = glue(lduw, MEMSUFFIX)(EA);
30 return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
33 static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
35 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
36 return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
37 ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
40 #if defined(TARGET_PPC64)
41 static inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
43 return (int32_t)glue(ldl, MEMSUFFIX)(EA);
46 static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
48 uint64_t tmp = glue(ldq, MEMSUFFIX)(EA);
49 return ((tmp & 0xFF00000000000000ULL) >> 56) |
50 ((tmp & 0x00FF000000000000ULL) >> 40) |
51 ((tmp & 0x0000FF0000000000ULL) >> 24) |
52 ((tmp & 0x000000FF00000000ULL) >> 8) |
53 ((tmp & 0x00000000FF000000ULL) << 8) |
54 ((tmp & 0x0000000000FF0000ULL) << 24) |
55 ((tmp & 0x000000000000FF00ULL) << 40) |
56 ((tmp & 0x00000000000000FFULL) << 54);
59 static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
61 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
62 return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
63 ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
65 #endif
67 static inline void glue(st16r, MEMSUFFIX) (target_ulong EA, uint16_t data)
69 uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
70 glue(stw, MEMSUFFIX)(EA, tmp);
73 static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data)
75 uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) |
76 ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24);
77 glue(stl, MEMSUFFIX)(EA, tmp);
80 #if defined(TARGET_PPC64)
81 static inline void glue(st64r, MEMSUFFIX) (target_ulong EA, uint64_t data)
83 uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) |
84 ((data & 0x00FF000000000000ULL) >> 40) |
85 ((data & 0x0000FF0000000000ULL) >> 24) |
86 ((data & 0x000000FF00000000ULL) >> 8) |
87 ((data & 0x00000000FF000000ULL) << 8) |
88 ((data & 0x0000000000FF0000ULL) << 24) |
89 ((data & 0x000000000000FF00ULL) << 40) |
90 ((data & 0x00000000000000FFULL) << 56);
91 glue(stq, MEMSUFFIX)(EA, tmp);
93 #endif
95 /*** Integer load ***/
96 #define PPC_LD_OP(name, op) \
97 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
98 { \
99 T1 = glue(op, MEMSUFFIX)((uint32_t)T0); \
100 RETURN(); \
103 #if defined(TARGET_PPC64)
104 #define PPC_LD_OP_64(name, op) \
105 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
107 T1 = glue(op, MEMSUFFIX)((uint64_t)T0); \
108 RETURN(); \
110 #endif
112 #define PPC_ST_OP(name, op) \
113 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
115 glue(op, MEMSUFFIX)((uint32_t)T0, T1); \
116 RETURN(); \
119 #if defined(TARGET_PPC64)
120 #define PPC_ST_OP_64(name, op) \
121 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
123 glue(op, MEMSUFFIX)((uint64_t)T0, T1); \
124 RETURN(); \
126 #endif
128 PPC_LD_OP(bz, ldub);
129 PPC_LD_OP(ha, ldsw);
130 PPC_LD_OP(hz, lduw);
131 PPC_LD_OP(wz, ldl);
132 #if defined(TARGET_PPC64)
133 PPC_LD_OP(d, ldq);
134 PPC_LD_OP(wa, ldsl);
135 PPC_LD_OP_64(d, ldq);
136 PPC_LD_OP_64(wa, ldsl);
137 PPC_LD_OP_64(bz, ldub);
138 PPC_LD_OP_64(ha, ldsw);
139 PPC_LD_OP_64(hz, lduw);
140 PPC_LD_OP_64(wz, ldl);
141 #endif
143 PPC_LD_OP(ha_le, ld16rs);
144 PPC_LD_OP(hz_le, ld16r);
145 PPC_LD_OP(wz_le, ld32r);
146 #if defined(TARGET_PPC64)
147 PPC_LD_OP(d_le, ld64r);
148 PPC_LD_OP(wa_le, ld32rs);
149 PPC_LD_OP_64(d_le, ld64r);
150 PPC_LD_OP_64(wa_le, ld32rs);
151 PPC_LD_OP_64(ha_le, ld16rs);
152 PPC_LD_OP_64(hz_le, ld16r);
153 PPC_LD_OP_64(wz_le, ld32r);
154 #endif
156 /*** Integer store ***/
157 PPC_ST_OP(b, stb);
158 PPC_ST_OP(h, stw);
159 PPC_ST_OP(w, stl);
160 #if defined(TARGET_PPC64)
161 PPC_ST_OP(d, stq);
162 PPC_ST_OP_64(d, stq);
163 PPC_ST_OP_64(b, stb);
164 PPC_ST_OP_64(h, stw);
165 PPC_ST_OP_64(w, stl);
166 #endif
168 PPC_ST_OP(h_le, st16r);
169 PPC_ST_OP(w_le, st32r);
170 #if defined(TARGET_PPC64)
171 PPC_ST_OP(d_le, st64r);
172 PPC_ST_OP_64(d_le, st64r);
173 PPC_ST_OP_64(h_le, st16r);
174 PPC_ST_OP_64(w_le, st32r);
175 #endif
177 /*** Integer load and store with byte reverse ***/
178 PPC_LD_OP(hbr, ld16r);
179 PPC_LD_OP(wbr, ld32r);
180 PPC_ST_OP(hbr, st16r);
181 PPC_ST_OP(wbr, st32r);
182 #if defined(TARGET_PPC64)
183 PPC_LD_OP_64(hbr, ld16r);
184 PPC_LD_OP_64(wbr, ld32r);
185 PPC_ST_OP_64(hbr, st16r);
186 PPC_ST_OP_64(wbr, st32r);
187 #endif
189 PPC_LD_OP(hbr_le, lduw);
190 PPC_LD_OP(wbr_le, ldl);
191 PPC_ST_OP(hbr_le, stw);
192 PPC_ST_OP(wbr_le, stl);
193 #if defined(TARGET_PPC64)
194 PPC_LD_OP_64(hbr_le, lduw);
195 PPC_LD_OP_64(wbr_le, ldl);
196 PPC_ST_OP_64(hbr_le, stw);
197 PPC_ST_OP_64(wbr_le, stl);
198 #endif
200 /*** Integer load and store multiple ***/
201 void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
203 glue(do_lmw, MEMSUFFIX)(PARAM1);
204 RETURN();
207 #if defined(TARGET_PPC64)
208 void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
210 glue(do_lmw_64, MEMSUFFIX)(PARAM1);
211 RETURN();
213 #endif
215 void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
217 glue(do_lmw_le, MEMSUFFIX)(PARAM1);
218 RETURN();
221 #if defined(TARGET_PPC64)
222 void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
224 glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
225 RETURN();
227 #endif
229 void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
231 glue(do_stmw, MEMSUFFIX)(PARAM1);
232 RETURN();
235 #if defined(TARGET_PPC64)
236 void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
238 glue(do_stmw_64, MEMSUFFIX)(PARAM1);
239 RETURN();
241 #endif
243 void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
245 glue(do_stmw_le, MEMSUFFIX)(PARAM1);
246 RETURN();
249 #if defined(TARGET_PPC64)
250 void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
252 glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
253 RETURN();
255 #endif
257 /*** Integer load and store strings ***/
258 void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
260 glue(do_lsw, MEMSUFFIX)(PARAM1);
261 RETURN();
264 #if defined(TARGET_PPC64)
265 void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
267 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
268 RETURN();
270 #endif
272 void OPPROTO glue(op_lswi_le, MEMSUFFIX) (void)
274 glue(do_lsw_le, MEMSUFFIX)(PARAM1);
275 RETURN();
278 #if defined(TARGET_PPC64)
279 void OPPROTO glue(op_lswi_le_64, MEMSUFFIX) (void)
281 glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
282 RETURN();
284 #endif
286 /* PPC32 specification says we must generate an exception if
287 * rA is in the range of registers to be loaded.
288 * In an other hand, IBM says this is valid, but rA won't be loaded.
289 * For now, I'll follow the spec...
291 void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
293 /* Note: T1 comes from xer_bc then no cast is needed */
294 if (likely(T1 != 0)) {
295 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
296 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
297 do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
298 } else {
299 glue(do_lsw, MEMSUFFIX)(PARAM1);
302 RETURN();
305 #if defined(TARGET_PPC64)
306 void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
308 /* Note: T1 comes from xer_bc then no cast is needed */
309 if (likely(T1 != 0)) {
310 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
311 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
312 do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
313 } else {
314 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
317 RETURN();
319 #endif
321 void OPPROTO glue(op_lswx_le, MEMSUFFIX) (void)
323 /* Note: T1 comes from xer_bc then no cast is needed */
324 if (likely(T1 != 0)) {
325 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
326 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
327 do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
328 } else {
329 glue(do_lsw_le, MEMSUFFIX)(PARAM1);
332 RETURN();
335 #if defined(TARGET_PPC64)
336 void OPPROTO glue(op_lswx_le_64, MEMSUFFIX) (void)
338 /* Note: T1 comes from xer_bc then no cast is needed */
339 if (likely(T1 != 0)) {
340 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
341 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
342 do_raise_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
343 } else {
344 glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
347 RETURN();
349 #endif
351 void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
353 glue(do_stsw, MEMSUFFIX)(PARAM1);
354 RETURN();
357 #if defined(TARGET_PPC64)
358 void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
360 glue(do_stsw_64, MEMSUFFIX)(PARAM1);
361 RETURN();
363 #endif
365 void OPPROTO glue(op_stsw_le, MEMSUFFIX) (void)
367 glue(do_stsw_le, MEMSUFFIX)(PARAM1);
368 RETURN();
371 #if defined(TARGET_PPC64)
372 void OPPROTO glue(op_stsw_le_64, MEMSUFFIX) (void)
374 glue(do_stsw_le_64, MEMSUFFIX)(PARAM1);
375 RETURN();
377 #endif
379 /*** Floating-point store ***/
380 #define PPC_STF_OP(name, op) \
381 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
383 glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \
384 RETURN(); \
387 #if defined(TARGET_PPC64)
388 #define PPC_STF_OP_64(name, op) \
389 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
391 glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \
392 RETURN(); \
394 #endif
396 PPC_STF_OP(fd, stfq);
397 PPC_STF_OP(fs, stfl);
398 #if defined(TARGET_PPC64)
399 PPC_STF_OP_64(fd, stfq);
400 PPC_STF_OP_64(fs, stfl);
401 #endif
403 static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
405 union {
406 double d;
407 uint64_t u;
408 } u;
410 u.d = d;
411 u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
412 ((u.u & 0x00FF000000000000ULL) >> 40) |
413 ((u.u & 0x0000FF0000000000ULL) >> 24) |
414 ((u.u & 0x000000FF00000000ULL) >> 8) |
415 ((u.u & 0x00000000FF000000ULL) << 8) |
416 ((u.u & 0x0000000000FF0000ULL) << 24) |
417 ((u.u & 0x000000000000FF00ULL) << 40) |
418 ((u.u & 0x00000000000000FFULL) << 56);
419 glue(stfq, MEMSUFFIX)(EA, u.d);
422 static inline void glue(stflr, MEMSUFFIX) (target_ulong EA, float f)
424 union {
425 float f;
426 uint32_t u;
427 } u;
429 u.f = f;
430 u.u = ((u.u & 0xFF000000UL) >> 24) |
431 ((u.u & 0x00FF0000ULL) >> 8) |
432 ((u.u & 0x0000FF00UL) << 8) |
433 ((u.u & 0x000000FFULL) << 24);
434 glue(stfl, MEMSUFFIX)(EA, u.f);
437 PPC_STF_OP(fd_le, stfqr);
438 PPC_STF_OP(fs_le, stflr);
439 #if defined(TARGET_PPC64)
440 PPC_STF_OP_64(fd_le, stfqr);
441 PPC_STF_OP_64(fs_le, stflr);
442 #endif
444 /*** Floating-point load ***/
445 #define PPC_LDF_OP(name, op) \
446 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
448 FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \
449 RETURN(); \
452 #if defined(TARGET_PPC64)
453 #define PPC_LDF_OP_64(name, op) \
454 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
456 FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \
457 RETURN(); \
459 #endif
461 PPC_LDF_OP(fd, ldfq);
462 PPC_LDF_OP(fs, ldfl);
463 #if defined(TARGET_PPC64)
464 PPC_LDF_OP_64(fd, ldfq);
465 PPC_LDF_OP_64(fs, ldfl);
466 #endif
468 static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
470 union {
471 double d;
472 uint64_t u;
473 } u;
475 u.d = glue(ldfq, MEMSUFFIX)(EA);
476 u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
477 ((u.u & 0x00FF000000000000ULL) >> 40) |
478 ((u.u & 0x0000FF0000000000ULL) >> 24) |
479 ((u.u & 0x000000FF00000000ULL) >> 8) |
480 ((u.u & 0x00000000FF000000ULL) << 8) |
481 ((u.u & 0x0000000000FF0000ULL) << 24) |
482 ((u.u & 0x000000000000FF00ULL) << 40) |
483 ((u.u & 0x00000000000000FFULL) << 56);
485 return u.d;
488 static inline float glue(ldflr, MEMSUFFIX) (target_ulong EA)
490 union {
491 float f;
492 uint32_t u;
493 } u;
495 u.f = glue(ldfl, MEMSUFFIX)(EA);
496 u.u = ((u.u & 0xFF000000UL) >> 24) |
497 ((u.u & 0x00FF0000ULL) >> 8) |
498 ((u.u & 0x0000FF00UL) << 8) |
499 ((u.u & 0x000000FFULL) << 24);
501 return u.f;
504 PPC_LDF_OP(fd_le, ldfqr);
505 PPC_LDF_OP(fs_le, ldflr);
506 #if defined(TARGET_PPC64)
507 PPC_LDF_OP_64(fd_le, ldfqr);
508 PPC_LDF_OP_64(fs_le, ldflr);
509 #endif
511 /* Load and set reservation */
512 void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
514 if (unlikely(T0 & 0x03)) {
515 do_raise_exception(EXCP_ALIGN);
516 } else {
517 T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
518 regs->reserve = (uint32_t)T0;
520 RETURN();
523 #if defined(TARGET_PPC64)
524 void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
526 if (unlikely(T0 & 0x03)) {
527 do_raise_exception(EXCP_ALIGN);
528 } else {
529 T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
530 regs->reserve = (uint64_t)T0;
532 RETURN();
535 void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
537 if (unlikely(T0 & 0x03)) {
538 do_raise_exception(EXCP_ALIGN);
539 } else {
540 T1 = glue(ldq, MEMSUFFIX)((uint64_t)T0);
541 regs->reserve = (uint64_t)T0;
543 RETURN();
545 #endif
547 void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
549 if (unlikely(T0 & 0x03)) {
550 do_raise_exception(EXCP_ALIGN);
551 } else {
552 T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
553 regs->reserve = (uint32_t)T0;
555 RETURN();
558 #if defined(TARGET_PPC64)
559 void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
561 if (unlikely(T0 & 0x03)) {
562 do_raise_exception(EXCP_ALIGN);
563 } else {
564 T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
565 regs->reserve = (uint64_t)T0;
567 RETURN();
570 void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
572 if (unlikely(T0 & 0x03)) {
573 do_raise_exception(EXCP_ALIGN);
574 } else {
575 T1 = glue(ld64r, MEMSUFFIX)((uint64_t)T0);
576 regs->reserve = (uint64_t)T0;
578 RETURN();
580 #endif
582 /* Store with reservation */
583 void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
585 if (unlikely(T0 & 0x03)) {
586 do_raise_exception(EXCP_ALIGN);
587 } else {
588 if (unlikely(regs->reserve != (uint32_t)T0)) {
589 env->crf[0] = xer_ov;
590 } else {
591 glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
592 env->crf[0] = xer_ov | 0x02;
595 regs->reserve = -1;
596 RETURN();
599 #if defined(TARGET_PPC64)
600 void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
602 if (unlikely(T0 & 0x03)) {
603 do_raise_exception(EXCP_ALIGN);
604 } else {
605 if (unlikely(regs->reserve != (uint64_t)T0)) {
606 env->crf[0] = xer_ov;
607 } else {
608 glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
609 env->crf[0] = xer_ov | 0x02;
612 regs->reserve = -1;
613 RETURN();
616 void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
618 if (unlikely(T0 & 0x03)) {
619 do_raise_exception(EXCP_ALIGN);
620 } else {
621 if (unlikely(regs->reserve != (uint64_t)T0)) {
622 env->crf[0] = xer_ov;
623 } else {
624 glue(stq, MEMSUFFIX)((uint64_t)T0, T1);
625 env->crf[0] = xer_ov | 0x02;
628 regs->reserve = -1;
629 RETURN();
631 #endif
633 void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
635 if (unlikely(T0 & 0x03)) {
636 do_raise_exception(EXCP_ALIGN);
637 } else {
638 if (unlikely(regs->reserve != (uint32_t)T0)) {
639 env->crf[0] = xer_ov;
640 } else {
641 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
642 env->crf[0] = xer_ov | 0x02;
645 regs->reserve = -1;
646 RETURN();
649 #if defined(TARGET_PPC64)
650 void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
652 if (unlikely(T0 & 0x03)) {
653 do_raise_exception(EXCP_ALIGN);
654 } else {
655 if (unlikely(regs->reserve != (uint64_t)T0)) {
656 env->crf[0] = xer_ov;
657 } else {
658 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
659 env->crf[0] = xer_ov | 0x02;
662 regs->reserve = -1;
663 RETURN();
666 void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
668 if (unlikely(T0 & 0x03)) {
669 do_raise_exception(EXCP_ALIGN);
670 } else {
671 if (unlikely(regs->reserve != (uint64_t)T0)) {
672 env->crf[0] = xer_ov;
673 } else {
674 glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
675 env->crf[0] = xer_ov | 0x02;
678 regs->reserve = -1;
679 RETURN();
681 #endif
683 void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
685 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
686 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
687 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
688 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
689 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
690 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
691 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
692 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
693 #if DCACHE_LINE_SIZE == 64
694 /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
695 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
696 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
697 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
698 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
699 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
700 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
701 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
702 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
703 #endif
704 RETURN();
707 #if defined(TARGET_PPC64)
708 void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
710 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
711 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
712 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
713 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
714 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
715 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
716 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
717 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
718 #if DCACHE_LINE_SIZE == 64
719 /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
720 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
721 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
722 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
723 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
724 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
725 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
726 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
727 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
728 #endif
729 RETURN();
731 #endif
733 /* Instruction cache block invalidate */
734 void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
736 glue(do_icbi, MEMSUFFIX)();
737 RETURN();
740 #if defined(TARGET_PPC64)
741 void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
743 glue(do_icbi_64, MEMSUFFIX)();
744 RETURN();
746 #endif
748 /* External access */
749 void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
751 T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
752 RETURN();
755 #if defined(TARGET_PPC64)
756 void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
758 T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
759 RETURN();
761 #endif
763 void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
765 glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
766 RETURN();
769 #if defined(TARGET_PPC64)
770 void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
772 glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
773 RETURN();
775 #endif
777 void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
779 T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
780 RETURN();
783 #if defined(TARGET_PPC64)
784 void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
786 T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
787 RETURN();
789 #endif
791 void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
793 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
794 RETURN();
797 #if defined(TARGET_PPC64)
798 void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
800 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
801 RETURN();
803 #endif
805 /* XXX: those micro-ops need tests ! */
806 /* PowerPC 601 specific instructions (POWER bridge) */
807 void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
809 /* When byte count is 0, do nothing */
810 if (likely(T1 != 0)) {
811 glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
813 RETURN();
816 /* POWER2 quad load and store */
817 /* XXX: TAGs are not managed */
818 void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
820 glue(do_POWER2_lfq, MEMSUFFIX)();
821 RETURN();
824 void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
826 glue(do_POWER2_lfq_le, MEMSUFFIX)();
827 RETURN();
830 void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
832 glue(do_POWER2_stfq, MEMSUFFIX)();
833 RETURN();
836 void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
838 glue(do_POWER2_stfq_le, MEMSUFFIX)();
839 RETURN();
842 #undef MEMSUFFIX