Add new command line option for controlling shadow cache size
[qemu-kvm/fedora.git] / target-ppc / op_mem.h
blob71dfb1e347073772035ff1b81bd2091a4d0a661e
1 /*
2 * PowerPC emulation micro-operations for qemu.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 static inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
23 uint16_t tmp = glue(lduw, MEMSUFFIX)(EA);
24 return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
27 static inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
29 int16_t tmp = glue(lduw, MEMSUFFIX)(EA);
30 return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
33 static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
35 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
36 return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
37 ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
40 #if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
41 static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
43 uint64_t tmp = glue(ldq, MEMSUFFIX)(EA);
44 return ((tmp & 0xFF00000000000000ULL) >> 56) |
45 ((tmp & 0x00FF000000000000ULL) >> 40) |
46 ((tmp & 0x0000FF0000000000ULL) >> 24) |
47 ((tmp & 0x000000FF00000000ULL) >> 8) |
48 ((tmp & 0x00000000FF000000ULL) << 8) |
49 ((tmp & 0x0000000000FF0000ULL) << 24) |
50 ((tmp & 0x000000000000FF00ULL) << 40) |
51 ((tmp & 0x00000000000000FFULL) << 54);
53 #endif
55 #if defined(TARGET_PPC64)
56 static inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
58 return (int32_t)glue(ldl, MEMSUFFIX)(EA);
61 static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
63 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
64 return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
65 ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
67 #endif
69 static inline void glue(st16r, MEMSUFFIX) (target_ulong EA, uint16_t data)
71 uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
72 glue(stw, MEMSUFFIX)(EA, tmp);
75 static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data)
77 uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) |
78 ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24);
79 glue(stl, MEMSUFFIX)(EA, tmp);
82 #if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
83 static inline void glue(st64r, MEMSUFFIX) (target_ulong EA, uint64_t data)
85 uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) |
86 ((data & 0x00FF000000000000ULL) >> 40) |
87 ((data & 0x0000FF0000000000ULL) >> 24) |
88 ((data & 0x000000FF00000000ULL) >> 8) |
89 ((data & 0x00000000FF000000ULL) << 8) |
90 ((data & 0x0000000000FF0000ULL) << 24) |
91 ((data & 0x000000000000FF00ULL) << 40) |
92 ((data & 0x00000000000000FFULL) << 56);
93 glue(stq, MEMSUFFIX)(EA, tmp);
95 #endif
97 /*** Integer load ***/
98 #define PPC_LD_OP(name, op) \
99 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
101 T1 = glue(op, MEMSUFFIX)((uint32_t)T0); \
102 RETURN(); \
105 #if defined(TARGET_PPC64)
106 #define PPC_LD_OP_64(name, op) \
107 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
109 T1 = glue(op, MEMSUFFIX)((uint64_t)T0); \
110 RETURN(); \
112 #endif
114 #define PPC_ST_OP(name, op) \
115 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
117 glue(op, MEMSUFFIX)((uint32_t)T0, T1); \
118 RETURN(); \
121 #if defined(TARGET_PPC64)
122 #define PPC_ST_OP_64(name, op) \
123 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
125 glue(op, MEMSUFFIX)((uint64_t)T0, T1); \
126 RETURN(); \
128 #endif
130 PPC_LD_OP(bz, ldub);
131 PPC_LD_OP(ha, ldsw);
132 PPC_LD_OP(hz, lduw);
133 PPC_LD_OP(wz, ldl);
134 #if defined(TARGET_PPC64)
135 PPC_LD_OP(d, ldq);
136 PPC_LD_OP(wa, ldsl);
137 PPC_LD_OP_64(d, ldq);
138 PPC_LD_OP_64(wa, ldsl);
139 PPC_LD_OP_64(bz, ldub);
140 PPC_LD_OP_64(ha, ldsw);
141 PPC_LD_OP_64(hz, lduw);
142 PPC_LD_OP_64(wz, ldl);
143 #endif
145 PPC_LD_OP(ha_le, ld16rs);
146 PPC_LD_OP(hz_le, ld16r);
147 PPC_LD_OP(wz_le, ld32r);
148 #if defined(TARGET_PPC64)
149 PPC_LD_OP(d_le, ld64r);
150 PPC_LD_OP(wa_le, ld32rs);
151 PPC_LD_OP_64(d_le, ld64r);
152 PPC_LD_OP_64(wa_le, ld32rs);
153 PPC_LD_OP_64(ha_le, ld16rs);
154 PPC_LD_OP_64(hz_le, ld16r);
155 PPC_LD_OP_64(wz_le, ld32r);
156 #endif
158 /*** Integer store ***/
159 PPC_ST_OP(b, stb);
160 PPC_ST_OP(h, stw);
161 PPC_ST_OP(w, stl);
162 #if defined(TARGET_PPC64)
163 PPC_ST_OP(d, stq);
164 PPC_ST_OP_64(d, stq);
165 PPC_ST_OP_64(b, stb);
166 PPC_ST_OP_64(h, stw);
167 PPC_ST_OP_64(w, stl);
168 #endif
170 PPC_ST_OP(h_le, st16r);
171 PPC_ST_OP(w_le, st32r);
172 #if defined(TARGET_PPC64)
173 PPC_ST_OP(d_le, st64r);
174 PPC_ST_OP_64(d_le, st64r);
175 PPC_ST_OP_64(h_le, st16r);
176 PPC_ST_OP_64(w_le, st32r);
177 #endif
179 /*** Integer load and store with byte reverse ***/
180 PPC_LD_OP(hbr, ld16r);
181 PPC_LD_OP(wbr, ld32r);
182 PPC_ST_OP(hbr, st16r);
183 PPC_ST_OP(wbr, st32r);
184 #if defined(TARGET_PPC64)
185 PPC_LD_OP_64(hbr, ld16r);
186 PPC_LD_OP_64(wbr, ld32r);
187 PPC_ST_OP_64(hbr, st16r);
188 PPC_ST_OP_64(wbr, st32r);
189 #endif
191 PPC_LD_OP(hbr_le, lduw);
192 PPC_LD_OP(wbr_le, ldl);
193 PPC_ST_OP(hbr_le, stw);
194 PPC_ST_OP(wbr_le, stl);
195 #if defined(TARGET_PPC64)
196 PPC_LD_OP_64(hbr_le, lduw);
197 PPC_LD_OP_64(wbr_le, ldl);
198 PPC_ST_OP_64(hbr_le, stw);
199 PPC_ST_OP_64(wbr_le, stl);
200 #endif
202 /*** Integer load and store multiple ***/
203 void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
205 glue(do_lmw, MEMSUFFIX)(PARAM1);
206 RETURN();
209 #if defined(TARGET_PPC64)
210 void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
212 glue(do_lmw_64, MEMSUFFIX)(PARAM1);
213 RETURN();
215 #endif
217 void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
219 glue(do_lmw_le, MEMSUFFIX)(PARAM1);
220 RETURN();
223 #if defined(TARGET_PPC64)
224 void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
226 glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
227 RETURN();
229 #endif
231 void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
233 glue(do_stmw, MEMSUFFIX)(PARAM1);
234 RETURN();
237 #if defined(TARGET_PPC64)
238 void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
240 glue(do_stmw_64, MEMSUFFIX)(PARAM1);
241 RETURN();
243 #endif
245 void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
247 glue(do_stmw_le, MEMSUFFIX)(PARAM1);
248 RETURN();
251 #if defined(TARGET_PPC64)
252 void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
254 glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
255 RETURN();
257 #endif
259 /*** Integer load and store strings ***/
260 void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
262 glue(do_lsw, MEMSUFFIX)(PARAM1);
263 RETURN();
266 #if defined(TARGET_PPC64)
267 void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
269 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
270 RETURN();
272 #endif
274 void OPPROTO glue(op_lswi_le, MEMSUFFIX) (void)
276 glue(do_lsw_le, MEMSUFFIX)(PARAM1);
277 RETURN();
280 #if defined(TARGET_PPC64)
281 void OPPROTO glue(op_lswi_le_64, MEMSUFFIX) (void)
283 glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
284 RETURN();
286 #endif
288 /* PPC32 specification says we must generate an exception if
289 * rA is in the range of registers to be loaded.
290 * In an other hand, IBM says this is valid, but rA won't be loaded.
291 * For now, I'll follow the spec...
293 void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
295 /* Note: T1 comes from xer_bc then no cast is needed */
296 if (likely(T1 != 0)) {
297 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
298 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
299 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
300 POWERPC_EXCP_INVAL |
301 POWERPC_EXCP_INVAL_LSWX);
302 } else {
303 glue(do_lsw, MEMSUFFIX)(PARAM1);
306 RETURN();
309 #if defined(TARGET_PPC64)
310 void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
312 /* Note: T1 comes from xer_bc then no cast is needed */
313 if (likely(T1 != 0)) {
314 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
315 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
316 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
317 POWERPC_EXCP_INVAL |
318 POWERPC_EXCP_INVAL_LSWX);
319 } else {
320 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
323 RETURN();
325 #endif
327 void OPPROTO glue(op_lswx_le, MEMSUFFIX) (void)
329 /* Note: T1 comes from xer_bc then no cast is needed */
330 if (likely(T1 != 0)) {
331 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
332 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
333 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
334 POWERPC_EXCP_INVAL |
335 POWERPC_EXCP_INVAL_LSWX);
336 } else {
337 glue(do_lsw_le, MEMSUFFIX)(PARAM1);
340 RETURN();
343 #if defined(TARGET_PPC64)
344 void OPPROTO glue(op_lswx_le_64, MEMSUFFIX) (void)
346 /* Note: T1 comes from xer_bc then no cast is needed */
347 if (likely(T1 != 0)) {
348 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
349 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
350 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
351 POWERPC_EXCP_INVAL |
352 POWERPC_EXCP_INVAL_LSWX);
353 } else {
354 glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
357 RETURN();
359 #endif
361 void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
363 glue(do_stsw, MEMSUFFIX)(PARAM1);
364 RETURN();
367 #if defined(TARGET_PPC64)
368 void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
370 glue(do_stsw_64, MEMSUFFIX)(PARAM1);
371 RETURN();
373 #endif
375 void OPPROTO glue(op_stsw_le, MEMSUFFIX) (void)
377 glue(do_stsw_le, MEMSUFFIX)(PARAM1);
378 RETURN();
381 #if defined(TARGET_PPC64)
382 void OPPROTO glue(op_stsw_le_64, MEMSUFFIX) (void)
384 glue(do_stsw_le_64, MEMSUFFIX)(PARAM1);
385 RETURN();
387 #endif
389 /*** Floating-point store ***/
390 #define PPC_STF_OP(name, op) \
391 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
393 glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \
394 RETURN(); \
397 #if defined(TARGET_PPC64)
398 #define PPC_STF_OP_64(name, op) \
399 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
401 glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \
402 RETURN(); \
404 #endif
406 static inline void glue(stfs, MEMSUFFIX) (target_ulong EA, double d)
408 glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status));
411 static inline void glue(stfiwx, MEMSUFFIX) (target_ulong EA, double d)
413 union {
414 double d;
415 uint64_t u;
416 } u;
418 /* Store the low order 32 bits without any conversion */
419 u.d = d;
420 glue(stl, MEMSUFFIX)(EA, u.u);
423 PPC_STF_OP(fd, stfq);
424 PPC_STF_OP(fs, stfs);
425 PPC_STF_OP(fiwx, stfiwx);
426 #if defined(TARGET_PPC64)
427 PPC_STF_OP_64(fd, stfq);
428 PPC_STF_OP_64(fs, stfs);
429 PPC_STF_OP_64(fiwx, stfiwx);
430 #endif
432 static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
434 union {
435 double d;
436 uint64_t u;
437 } u;
439 u.d = d;
440 u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
441 ((u.u & 0x00FF000000000000ULL) >> 40) |
442 ((u.u & 0x0000FF0000000000ULL) >> 24) |
443 ((u.u & 0x000000FF00000000ULL) >> 8) |
444 ((u.u & 0x00000000FF000000ULL) << 8) |
445 ((u.u & 0x0000000000FF0000ULL) << 24) |
446 ((u.u & 0x000000000000FF00ULL) << 40) |
447 ((u.u & 0x00000000000000FFULL) << 56);
448 glue(stfq, MEMSUFFIX)(EA, u.d);
451 static inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d)
453 union {
454 float f;
455 uint32_t u;
456 } u;
458 u.f = float64_to_float32(d, &env->fp_status);
459 u.u = ((u.u & 0xFF000000UL) >> 24) |
460 ((u.u & 0x00FF0000ULL) >> 8) |
461 ((u.u & 0x0000FF00UL) << 8) |
462 ((u.u & 0x000000FFULL) << 24);
463 glue(stfl, MEMSUFFIX)(EA, u.f);
466 static inline void glue(stfiwxr, MEMSUFFIX) (target_ulong EA, double d)
468 union {
469 double d;
470 uint64_t u;
471 } u;
473 /* Store the low order 32 bits without any conversion */
474 u.d = d;
475 u.u = ((u.u & 0xFF000000UL) >> 24) |
476 ((u.u & 0x00FF0000ULL) >> 8) |
477 ((u.u & 0x0000FF00UL) << 8) |
478 ((u.u & 0x000000FFULL) << 24);
479 glue(stl, MEMSUFFIX)(EA, u.u);
483 PPC_STF_OP(fd_le, stfqr);
484 PPC_STF_OP(fs_le, stfsr);
485 PPC_STF_OP(fiwx_le, stfiwxr);
486 #if defined(TARGET_PPC64)
487 PPC_STF_OP_64(fd_le, stfqr);
488 PPC_STF_OP_64(fs_le, stfsr);
489 PPC_STF_OP_64(fiwx_le, stfiwxr);
490 #endif
492 /*** Floating-point load ***/
493 #define PPC_LDF_OP(name, op) \
494 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
496 FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \
497 RETURN(); \
500 #if defined(TARGET_PPC64)
501 #define PPC_LDF_OP_64(name, op) \
502 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
504 FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \
505 RETURN(); \
507 #endif
509 static inline double glue(ldfs, MEMSUFFIX) (target_ulong EA)
511 return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status);
514 PPC_LDF_OP(fd, ldfq);
515 PPC_LDF_OP(fs, ldfs);
516 #if defined(TARGET_PPC64)
517 PPC_LDF_OP_64(fd, ldfq);
518 PPC_LDF_OP_64(fs, ldfs);
519 #endif
521 static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
523 union {
524 double d;
525 uint64_t u;
526 } u;
528 u.d = glue(ldfq, MEMSUFFIX)(EA);
529 u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
530 ((u.u & 0x00FF000000000000ULL) >> 40) |
531 ((u.u & 0x0000FF0000000000ULL) >> 24) |
532 ((u.u & 0x000000FF00000000ULL) >> 8) |
533 ((u.u & 0x00000000FF000000ULL) << 8) |
534 ((u.u & 0x0000000000FF0000ULL) << 24) |
535 ((u.u & 0x000000000000FF00ULL) << 40) |
536 ((u.u & 0x00000000000000FFULL) << 56);
538 return u.d;
541 static inline double glue(ldfsr, MEMSUFFIX) (target_ulong EA)
543 union {
544 float f;
545 uint32_t u;
546 } u;
548 u.f = glue(ldfl, MEMSUFFIX)(EA);
549 u.u = ((u.u & 0xFF000000UL) >> 24) |
550 ((u.u & 0x00FF0000ULL) >> 8) |
551 ((u.u & 0x0000FF00UL) << 8) |
552 ((u.u & 0x000000FFULL) << 24);
554 return float32_to_float64(u.f, &env->fp_status);
557 PPC_LDF_OP(fd_le, ldfqr);
558 PPC_LDF_OP(fs_le, ldfsr);
559 #if defined(TARGET_PPC64)
560 PPC_LDF_OP_64(fd_le, ldfqr);
561 PPC_LDF_OP_64(fs_le, ldfsr);
562 #endif
564 /* Load and set reservation */
565 void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
567 if (unlikely(T0 & 0x03)) {
568 do_raise_exception(POWERPC_EXCP_ALIGN);
569 } else {
570 T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
571 env->reserve = (uint32_t)T0;
573 RETURN();
576 #if defined(TARGET_PPC64)
577 void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
579 if (unlikely(T0 & 0x03)) {
580 do_raise_exception(POWERPC_EXCP_ALIGN);
581 } else {
582 T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
583 env->reserve = (uint64_t)T0;
585 RETURN();
588 void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)
590 if (unlikely(T0 & 0x03)) {
591 do_raise_exception(POWERPC_EXCP_ALIGN);
592 } else {
593 T1 = glue(ldq, MEMSUFFIX)((uint32_t)T0);
594 env->reserve = (uint32_t)T0;
596 RETURN();
599 void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
601 if (unlikely(T0 & 0x03)) {
602 do_raise_exception(POWERPC_EXCP_ALIGN);
603 } else {
604 T1 = glue(ldq, MEMSUFFIX)((uint64_t)T0);
605 env->reserve = (uint64_t)T0;
607 RETURN();
609 #endif
611 void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
613 if (unlikely(T0 & 0x03)) {
614 do_raise_exception(POWERPC_EXCP_ALIGN);
615 } else {
616 T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
617 env->reserve = (uint32_t)T0;
619 RETURN();
622 #if defined(TARGET_PPC64)
623 void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
625 if (unlikely(T0 & 0x03)) {
626 do_raise_exception(POWERPC_EXCP_ALIGN);
627 } else {
628 T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
629 env->reserve = (uint64_t)T0;
631 RETURN();
634 void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)
636 if (unlikely(T0 & 0x03)) {
637 do_raise_exception(POWERPC_EXCP_ALIGN);
638 } else {
639 T1 = glue(ld64r, MEMSUFFIX)((uint32_t)T0);
640 env->reserve = (uint32_t)T0;
642 RETURN();
645 void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
647 if (unlikely(T0 & 0x03)) {
648 do_raise_exception(POWERPC_EXCP_ALIGN);
649 } else {
650 T1 = glue(ld64r, MEMSUFFIX)((uint64_t)T0);
651 env->reserve = (uint64_t)T0;
653 RETURN();
655 #endif
657 /* Store with reservation */
658 void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
660 if (unlikely(T0 & 0x03)) {
661 do_raise_exception(POWERPC_EXCP_ALIGN);
662 } else {
663 if (unlikely(env->reserve != (uint32_t)T0)) {
664 env->crf[0] = xer_so;
665 } else {
666 glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
667 env->crf[0] = xer_so | 0x02;
670 env->reserve = -1;
671 RETURN();
674 #if defined(TARGET_PPC64)
675 void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
677 if (unlikely(T0 & 0x03)) {
678 do_raise_exception(POWERPC_EXCP_ALIGN);
679 } else {
680 if (unlikely(env->reserve != (uint64_t)T0)) {
681 env->crf[0] = xer_so;
682 } else {
683 glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
684 env->crf[0] = xer_so | 0x02;
687 env->reserve = -1;
688 RETURN();
691 void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)
693 if (unlikely(T0 & 0x03)) {
694 do_raise_exception(POWERPC_EXCP_ALIGN);
695 } else {
696 if (unlikely(env->reserve != (uint32_t)T0)) {
697 env->crf[0] = xer_so;
698 } else {
699 glue(stq, MEMSUFFIX)((uint32_t)T0, T1);
700 env->crf[0] = xer_so | 0x02;
703 env->reserve = -1;
704 RETURN();
707 void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
709 if (unlikely(T0 & 0x03)) {
710 do_raise_exception(POWERPC_EXCP_ALIGN);
711 } else {
712 if (unlikely(env->reserve != (uint64_t)T0)) {
713 env->crf[0] = xer_so;
714 } else {
715 glue(stq, MEMSUFFIX)((uint64_t)T0, T1);
716 env->crf[0] = xer_so | 0x02;
719 env->reserve = -1;
720 RETURN();
722 #endif
724 void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
726 if (unlikely(T0 & 0x03)) {
727 do_raise_exception(POWERPC_EXCP_ALIGN);
728 } else {
729 if (unlikely(env->reserve != (uint32_t)T0)) {
730 env->crf[0] = xer_so;
731 } else {
732 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
733 env->crf[0] = xer_so | 0x02;
736 env->reserve = -1;
737 RETURN();
740 #if defined(TARGET_PPC64)
741 void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
743 if (unlikely(T0 & 0x03)) {
744 do_raise_exception(POWERPC_EXCP_ALIGN);
745 } else {
746 if (unlikely(env->reserve != (uint64_t)T0)) {
747 env->crf[0] = xer_so;
748 } else {
749 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
750 env->crf[0] = xer_so | 0x02;
753 env->reserve = -1;
754 RETURN();
757 void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)
759 if (unlikely(T0 & 0x03)) {
760 do_raise_exception(POWERPC_EXCP_ALIGN);
761 } else {
762 if (unlikely(env->reserve != (uint32_t)T0)) {
763 env->crf[0] = xer_so;
764 } else {
765 glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);
766 env->crf[0] = xer_so | 0x02;
769 env->reserve = -1;
770 RETURN();
773 void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
775 if (unlikely(T0 & 0x03)) {
776 do_raise_exception(POWERPC_EXCP_ALIGN);
777 } else {
778 if (unlikely(env->reserve != (uint64_t)T0)) {
779 env->crf[0] = xer_so;
780 } else {
781 glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
782 env->crf[0] = xer_so | 0x02;
785 env->reserve = -1;
786 RETURN();
788 #endif
790 void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
792 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
793 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
794 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
795 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
796 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
797 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
798 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
799 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
800 #if DCACHE_LINE_SIZE == 64
801 /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
802 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
803 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
804 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
805 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
806 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
807 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
808 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
809 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
810 #endif
811 RETURN();
814 #if defined(TARGET_PPC64)
815 void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
817 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
818 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
819 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
820 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
821 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
822 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
823 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
824 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
825 #if DCACHE_LINE_SIZE == 64
826 /* XXX: cache line size should be 64 for POWER & PowerPC 601 */
827 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
828 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
829 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
830 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
831 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
832 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
833 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
834 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
835 #endif
836 RETURN();
838 #endif
840 /* Instruction cache block invalidate */
841 void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
843 glue(do_icbi, MEMSUFFIX)();
844 RETURN();
847 #if defined(TARGET_PPC64)
848 void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
850 glue(do_icbi_64, MEMSUFFIX)();
851 RETURN();
853 #endif
855 /* External access */
856 void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
858 T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
859 RETURN();
862 #if defined(TARGET_PPC64)
863 void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
865 T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
866 RETURN();
868 #endif
870 void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
872 glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
873 RETURN();
876 #if defined(TARGET_PPC64)
877 void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
879 glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
880 RETURN();
882 #endif
884 void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
886 T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
887 RETURN();
890 #if defined(TARGET_PPC64)
891 void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
893 T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
894 RETURN();
896 #endif
898 void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
900 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
901 RETURN();
904 #if defined(TARGET_PPC64)
905 void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
907 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
908 RETURN();
910 #endif
912 /* XXX: those micro-ops need tests ! */
913 /* PowerPC 601 specific instructions (POWER bridge) */
914 void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
916 /* When byte count is 0, do nothing */
917 if (likely(T1 != 0)) {
918 glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
920 RETURN();
923 /* POWER2 quad load and store */
924 /* XXX: TAGs are not managed */
925 void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
927 glue(do_POWER2_lfq, MEMSUFFIX)();
928 RETURN();
931 void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
933 glue(do_POWER2_lfq_le, MEMSUFFIX)();
934 RETURN();
937 void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
939 glue(do_POWER2_stfq, MEMSUFFIX)();
940 RETURN();
943 void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
945 glue(do_POWER2_stfq_le, MEMSUFFIX)();
946 RETURN();
949 #if defined(TARGET_PPCEMB)
950 /* SPE extension */
951 #define _PPC_SPE_LD_OP(name, op) \
952 void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void) \
954 T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0); \
955 RETURN(); \
958 #if defined(TARGET_PPC64)
959 #define _PPC_SPE_LD_OP_64(name, op) \
960 void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void) \
962 T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0); \
963 RETURN(); \
965 #define PPC_SPE_LD_OP(name, op) \
966 _PPC_SPE_LD_OP(name, op); \
967 _PPC_SPE_LD_OP_64(name, op)
968 #else
969 #define PPC_SPE_LD_OP(name, op) \
970 _PPC_SPE_LD_OP(name, op)
971 #endif
974 #define _PPC_SPE_ST_OP(name, op) \
975 void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void) \
977 glue(op, MEMSUFFIX)((uint32_t)T0, T1_64); \
978 RETURN(); \
981 #if defined(TARGET_PPC64)
982 #define _PPC_SPE_ST_OP_64(name, op) \
983 void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void) \
985 glue(op, MEMSUFFIX)((uint64_t)T0, T1_64); \
986 RETURN(); \
988 #define PPC_SPE_ST_OP(name, op) \
989 _PPC_SPE_ST_OP(name, op); \
990 _PPC_SPE_ST_OP_64(name, op)
991 #else
992 #define PPC_SPE_ST_OP(name, op) \
993 _PPC_SPE_ST_OP(name, op)
994 #endif
996 #if !defined(TARGET_PPC64)
997 PPC_SPE_LD_OP(dd, ldq);
998 PPC_SPE_ST_OP(dd, stq);
999 PPC_SPE_LD_OP(dd_le, ld64r);
1000 PPC_SPE_ST_OP(dd_le, st64r);
1001 #endif
1002 static inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
1004 uint64_t ret;
1005 ret = (uint64_t)glue(ldl, MEMSUFFIX)(EA) << 32;
1006 ret |= (uint64_t)glue(ldl, MEMSUFFIX)(EA + 4);
1007 return ret;
1009 PPC_SPE_LD_OP(dw, spe_ldw);
1010 static inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA, uint64_t data)
1012 glue(stl, MEMSUFFIX)(EA, data >> 32);
1013 glue(stl, MEMSUFFIX)(EA + 4, data);
1015 PPC_SPE_ST_OP(dw, spe_stdw);
1016 static inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
1018 uint64_t ret;
1019 ret = (uint64_t)glue(ld32r, MEMSUFFIX)(EA) << 32;
1020 ret |= (uint64_t)glue(ld32r, MEMSUFFIX)(EA + 4);
1021 return ret;
1023 PPC_SPE_LD_OP(dw_le, spe_ldw_le);
1024 static inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
1025 uint64_t data)
1027 glue(st32r, MEMSUFFIX)(EA, data >> 32);
1028 glue(st32r, MEMSUFFIX)(EA + 4, data);
1030 PPC_SPE_ST_OP(dw_le, spe_stdw_le);
1031 static inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
1033 uint64_t ret;
1034 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1035 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 32;
1036 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 4) << 16;
1037 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 6);
1038 return ret;
1040 PPC_SPE_LD_OP(dh, spe_ldh);
1041 static inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, uint64_t data)
1043 glue(stw, MEMSUFFIX)(EA, data >> 48);
1044 glue(stw, MEMSUFFIX)(EA + 2, data >> 32);
1045 glue(stw, MEMSUFFIX)(EA + 4, data >> 16);
1046 glue(stw, MEMSUFFIX)(EA + 6, data);
1048 PPC_SPE_ST_OP(dh, spe_stdh);
1049 static inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
1051 uint64_t ret;
1052 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1053 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 32;
1054 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 4) << 16;
1055 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 6);
1056 return ret;
1058 PPC_SPE_LD_OP(dh_le, spe_ldh_le);
1059 static inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1060 uint64_t data)
1062 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1063 glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
1064 glue(st16r, MEMSUFFIX)(EA + 4, data >> 16);
1065 glue(st16r, MEMSUFFIX)(EA + 6, data);
1067 PPC_SPE_ST_OP(dh_le, spe_stdh_le);
1068 static inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1070 uint64_t ret;
1071 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1072 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 16;
1073 return ret;
1075 PPC_SPE_LD_OP(whe, spe_lwhe);
1076 static inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA, uint64_t data)
1078 glue(stw, MEMSUFFIX)(EA, data >> 48);
1079 glue(stw, MEMSUFFIX)(EA + 2, data >> 16);
1081 PPC_SPE_ST_OP(whe, spe_stwhe);
1082 static inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1084 uint64_t ret;
1085 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1086 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 16;
1087 return ret;
1089 PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
1090 static inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
1091 uint64_t data)
1093 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1094 glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
1096 PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
1097 static inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1099 uint64_t ret;
1100 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 32;
1101 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2);
1102 return ret;
1104 PPC_SPE_LD_OP(whou, spe_lwhou);
1105 static inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1107 uint64_t ret;
1108 ret = ((uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA))) << 32;
1109 ret |= (uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA + 2));
1110 return ret;
1112 PPC_SPE_LD_OP(whos, spe_lwhos);
1113 static inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA, uint64_t data)
1115 glue(stw, MEMSUFFIX)(EA, data >> 32);
1116 glue(stw, MEMSUFFIX)(EA + 2, data);
1118 PPC_SPE_ST_OP(who, spe_stwho);
1119 static inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1121 uint64_t ret;
1122 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 32;
1123 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2);
1124 return ret;
1126 PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1127 static inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1129 uint64_t ret;
1130 ret = ((uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA))) << 32;
1131 ret |= (uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA + 2));
1132 return ret;
1134 PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1135 static inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1136 uint64_t data)
1138 glue(st16r, MEMSUFFIX)(EA, data >> 32);
1139 glue(st16r, MEMSUFFIX)(EA + 2, data);
1141 PPC_SPE_ST_OP(who_le, spe_stwho_le);
1142 #if !defined(TARGET_PPC64)
1143 static inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA, uint64_t data)
1145 glue(stl, MEMSUFFIX)(EA, data);
1147 PPC_SPE_ST_OP(wwo, spe_stwwo);
1148 static inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1149 uint64_t data)
1151 glue(st32r, MEMSUFFIX)(EA, data);
1153 PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1154 #endif
1155 static inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1157 uint16_t tmp;
1158 tmp = glue(lduw, MEMSUFFIX)(EA);
1159 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1161 PPC_SPE_LD_OP(h, spe_lh);
1162 static inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1164 uint16_t tmp;
1165 tmp = glue(ld16r, MEMSUFFIX)(EA);
1166 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1168 PPC_SPE_LD_OP(h_le, spe_lh_le);
1169 static inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1171 uint32_t tmp;
1172 tmp = glue(ldl, MEMSUFFIX)(EA);
1173 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1175 PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1176 static inline uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1178 uint32_t tmp;
1179 tmp = glue(ld32r, MEMSUFFIX)(EA);
1180 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1182 PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1183 static inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1185 uint64_t ret;
1186 uint16_t tmp;
1187 tmp = glue(lduw, MEMSUFFIX)(EA);
1188 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1189 tmp = glue(lduw, MEMSUFFIX)(EA + 2);
1190 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1191 return ret;
1193 PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1194 static inline uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1196 uint64_t ret;
1197 uint16_t tmp;
1198 tmp = glue(ld16r, MEMSUFFIX)(EA);
1199 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1200 tmp = glue(ld16r, MEMSUFFIX)(EA + 2);
1201 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1202 return ret;
1204 PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le);
1205 #endif /* defined(TARGET_PPCEMB) */
1207 #undef MEMSUFFIX