target-ppc: convert dcbz instruction to TCG
[qemu/qemu-JZ.git] / target-ppc / op_mem.h
blob6f9eb2054800d973e5481884c8b81c9b152cda55
1 /*
2 * PowerPC emulation micro-operations for qemu.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "op_mem_access.h"
23 /*** Integer load and store strings ***/
24 void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
26 glue(do_lsw, MEMSUFFIX)(PARAM1);
27 RETURN();
30 #if defined(TARGET_PPC64)
31 void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
33 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
34 RETURN();
36 #endif
38 /* PPC32 specification says we must generate an exception if
39 * rA is in the range of registers to be loaded.
40 * In an other hand, IBM says this is valid, but rA won't be loaded.
41 * For now, I'll follow the spec...
43 void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
45 /* Note: T1 comes from xer_bc then no cast is needed */
46 if (likely(T1 != 0)) {
47 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
48 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
49 raise_exception_err(env, POWERPC_EXCP_PROGRAM,
50 POWERPC_EXCP_INVAL |
51 POWERPC_EXCP_INVAL_LSWX);
52 } else {
53 glue(do_lsw, MEMSUFFIX)(PARAM1);
56 RETURN();
59 #if defined(TARGET_PPC64)
60 void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
62 /* Note: T1 comes from xer_bc then no cast is needed */
63 if (likely(T1 != 0)) {
64 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
65 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
66 raise_exception_err(env, POWERPC_EXCP_PROGRAM,
67 POWERPC_EXCP_INVAL |
68 POWERPC_EXCP_INVAL_LSWX);
69 } else {
70 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
73 RETURN();
75 #endif
77 void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
79 glue(do_stsw, MEMSUFFIX)(PARAM1);
80 RETURN();
83 #if defined(TARGET_PPC64)
84 void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
86 glue(do_stsw_64, MEMSUFFIX)(PARAM1);
87 RETURN();
89 #endif
91 /* Load and set reservation */
92 void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
94 if (unlikely(T0 & 0x03)) {
95 raise_exception(env, POWERPC_EXCP_ALIGN);
96 } else {
97 T1 = glue(ldu32, MEMSUFFIX)((uint32_t)T0);
98 env->reserve = (uint32_t)T0;
100 RETURN();
103 #if defined(TARGET_PPC64)
104 void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
106 if (unlikely(T0 & 0x03)) {
107 raise_exception(env, POWERPC_EXCP_ALIGN);
108 } else {
109 T1 = glue(ldu32, MEMSUFFIX)((uint64_t)T0);
110 env->reserve = (uint64_t)T0;
112 RETURN();
115 void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)
117 if (unlikely(T0 & 0x03)) {
118 raise_exception(env, POWERPC_EXCP_ALIGN);
119 } else {
120 T1 = glue(ldu64, MEMSUFFIX)((uint32_t)T0);
121 env->reserve = (uint32_t)T0;
123 RETURN();
126 void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
128 if (unlikely(T0 & 0x03)) {
129 raise_exception(env, POWERPC_EXCP_ALIGN);
130 } else {
131 T1 = glue(ldu64, MEMSUFFIX)((uint64_t)T0);
132 env->reserve = (uint64_t)T0;
134 RETURN();
136 #endif
138 void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
140 if (unlikely(T0 & 0x03)) {
141 raise_exception(env, POWERPC_EXCP_ALIGN);
142 } else {
143 T1 = glue(ldu32r, MEMSUFFIX)((uint32_t)T0);
144 env->reserve = (uint32_t)T0;
146 RETURN();
149 #if defined(TARGET_PPC64)
150 void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
152 if (unlikely(T0 & 0x03)) {
153 raise_exception(env, POWERPC_EXCP_ALIGN);
154 } else {
155 T1 = glue(ldu32r, MEMSUFFIX)((uint64_t)T0);
156 env->reserve = (uint64_t)T0;
158 RETURN();
161 void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)
163 if (unlikely(T0 & 0x03)) {
164 raise_exception(env, POWERPC_EXCP_ALIGN);
165 } else {
166 T1 = glue(ldu64r, MEMSUFFIX)((uint32_t)T0);
167 env->reserve = (uint32_t)T0;
169 RETURN();
172 void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
174 if (unlikely(T0 & 0x03)) {
175 raise_exception(env, POWERPC_EXCP_ALIGN);
176 } else {
177 T1 = glue(ldu64r, MEMSUFFIX)((uint64_t)T0);
178 env->reserve = (uint64_t)T0;
180 RETURN();
182 #endif
184 /* Store with reservation */
185 void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
187 if (unlikely(T0 & 0x03)) {
188 raise_exception(env, POWERPC_EXCP_ALIGN);
189 } else {
190 if (unlikely(env->reserve != (uint32_t)T0)) {
191 env->crf[0] = xer_so;
192 } else {
193 glue(st32, MEMSUFFIX)((uint32_t)T0, T1);
194 env->crf[0] = xer_so | 0x02;
197 env->reserve = (target_ulong)-1ULL;
198 RETURN();
201 #if defined(TARGET_PPC64)
202 void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
204 if (unlikely(T0 & 0x03)) {
205 raise_exception(env, POWERPC_EXCP_ALIGN);
206 } else {
207 if (unlikely(env->reserve != (uint64_t)T0)) {
208 env->crf[0] = xer_so;
209 } else {
210 glue(st32, MEMSUFFIX)((uint64_t)T0, T1);
211 env->crf[0] = xer_so | 0x02;
214 env->reserve = (target_ulong)-1ULL;
215 RETURN();
218 void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)
220 if (unlikely(T0 & 0x03)) {
221 raise_exception(env, POWERPC_EXCP_ALIGN);
222 } else {
223 if (unlikely(env->reserve != (uint32_t)T0)) {
224 env->crf[0] = xer_so;
225 } else {
226 glue(st64, MEMSUFFIX)((uint32_t)T0, T1);
227 env->crf[0] = xer_so | 0x02;
230 env->reserve = (target_ulong)-1ULL;
231 RETURN();
234 void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
236 if (unlikely(T0 & 0x03)) {
237 raise_exception(env, POWERPC_EXCP_ALIGN);
238 } else {
239 if (unlikely(env->reserve != (uint64_t)T0)) {
240 env->crf[0] = xer_so;
241 } else {
242 glue(st64, MEMSUFFIX)((uint64_t)T0, T1);
243 env->crf[0] = xer_so | 0x02;
246 env->reserve = (target_ulong)-1ULL;
247 RETURN();
249 #endif
251 void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
253 if (unlikely(T0 & 0x03)) {
254 raise_exception(env, POWERPC_EXCP_ALIGN);
255 } else {
256 if (unlikely(env->reserve != (uint32_t)T0)) {
257 env->crf[0] = xer_so;
258 } else {
259 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
260 env->crf[0] = xer_so | 0x02;
263 env->reserve = (target_ulong)-1ULL;
264 RETURN();
267 #if defined(TARGET_PPC64)
268 void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
270 if (unlikely(T0 & 0x03)) {
271 raise_exception(env, POWERPC_EXCP_ALIGN);
272 } else {
273 if (unlikely(env->reserve != (uint64_t)T0)) {
274 env->crf[0] = xer_so;
275 } else {
276 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
277 env->crf[0] = xer_so | 0x02;
280 env->reserve = (target_ulong)-1ULL;
281 RETURN();
284 void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)
286 if (unlikely(T0 & 0x03)) {
287 raise_exception(env, POWERPC_EXCP_ALIGN);
288 } else {
289 if (unlikely(env->reserve != (uint32_t)T0)) {
290 env->crf[0] = xer_so;
291 } else {
292 glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);
293 env->crf[0] = xer_so | 0x02;
296 env->reserve = (target_ulong)-1ULL;
297 RETURN();
300 void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
302 if (unlikely(T0 & 0x03)) {
303 raise_exception(env, POWERPC_EXCP_ALIGN);
304 } else {
305 if (unlikely(env->reserve != (uint64_t)T0)) {
306 env->crf[0] = xer_so;
307 } else {
308 glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
309 env->crf[0] = xer_so | 0x02;
312 env->reserve = (target_ulong)-1ULL;
313 RETURN();
315 #endif
317 /* Instruction cache block invalidate */
318 void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
320 glue(do_icbi, MEMSUFFIX)();
321 RETURN();
324 #if defined(TARGET_PPC64)
325 void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
327 glue(do_icbi_64, MEMSUFFIX)();
328 RETURN();
330 #endif
332 /* External access */
333 void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
335 T1 = glue(ldu32, MEMSUFFIX)((uint32_t)T0);
336 RETURN();
339 #if defined(TARGET_PPC64)
340 void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
342 T1 = glue(ldu32, MEMSUFFIX)((uint64_t)T0);
343 RETURN();
345 #endif
347 void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
349 glue(st32, MEMSUFFIX)((uint32_t)T0, T1);
350 RETURN();
353 #if defined(TARGET_PPC64)
354 void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
356 glue(st32, MEMSUFFIX)((uint64_t)T0, T1);
357 RETURN();
359 #endif
361 void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
363 T1 = glue(ldu32r, MEMSUFFIX)((uint32_t)T0);
364 RETURN();
367 #if defined(TARGET_PPC64)
368 void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
370 T1 = glue(ldu32r, MEMSUFFIX)((uint64_t)T0);
371 RETURN();
373 #endif
375 void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
377 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
378 RETURN();
381 #if defined(TARGET_PPC64)
382 void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
384 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
385 RETURN();
387 #endif
389 /* XXX: those micro-ops need tests ! */
390 /* PowerPC 601 specific instructions (POWER bridge) */
391 void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
393 /* When byte count is 0, do nothing */
394 if (likely(T1 != 0)) {
395 glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
397 RETURN();
400 #undef MEMSUFFIX