Device hot-remove
[qemu-kvm/fedora.git] / target-mips / op_mem.c
blob85872ca8fb1416b8ed7da2fb3107b4fe9de909a2
1 /*
2 * MIPS emulation memory micro-operations for qemu.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 /* Standard loads and stores */
22 void glue(op_lb, MEMSUFFIX) (void)
24 T0 = glue(ldsb, MEMSUFFIX)(T0);
25 FORCE_RET();
28 void glue(op_lbu, MEMSUFFIX) (void)
30 T0 = glue(ldub, MEMSUFFIX)(T0);
31 FORCE_RET();
34 void glue(op_sb, MEMSUFFIX) (void)
36 glue(stb, MEMSUFFIX)(T0, T1);
37 FORCE_RET();
40 void glue(op_lh, MEMSUFFIX) (void)
42 T0 = glue(ldsw, MEMSUFFIX)(T0);
43 FORCE_RET();
46 void glue(op_lhu, MEMSUFFIX) (void)
48 T0 = glue(lduw, MEMSUFFIX)(T0);
49 FORCE_RET();
52 void glue(op_sh, MEMSUFFIX) (void)
54 glue(stw, MEMSUFFIX)(T0, T1);
55 FORCE_RET();
58 void glue(op_lw, MEMSUFFIX) (void)
60 T0 = glue(ldl, MEMSUFFIX)(T0);
61 FORCE_RET();
64 void glue(op_lwu, MEMSUFFIX) (void)
66 T0 = (uint32_t)glue(ldl, MEMSUFFIX)(T0);
67 FORCE_RET();
70 void glue(op_sw, MEMSUFFIX) (void)
72 glue(stl, MEMSUFFIX)(T0, T1);
73 FORCE_RET();
76 /* "half" load and stores. We must do the memory access inline,
77 or fault handling won't work. */
79 #ifdef TARGET_WORDS_BIGENDIAN
80 #define GET_LMASK(v) ((v) & 3)
81 #define GET_OFFSET(addr, offset) (addr + (offset))
82 #else
83 #define GET_LMASK(v) (((v) & 3) ^ 3)
84 #define GET_OFFSET(addr, offset) (addr - (offset))
85 #endif
87 void glue(op_lwl, MEMSUFFIX) (void)
89 target_ulong tmp;
91 tmp = glue(ldub, MEMSUFFIX)(T0);
92 T1 = (T1 & 0x00FFFFFF) | (tmp << 24);
94 if (GET_LMASK(T0) <= 2) {
95 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, 1));
96 T1 = (T1 & 0xFF00FFFF) | (tmp << 16);
99 if (GET_LMASK(T0) <= 1) {
100 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, 2));
101 T1 = (T1 & 0xFFFF00FF) | (tmp << 8);
104 if (GET_LMASK(T0) == 0) {
105 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, 3));
106 T1 = (T1 & 0xFFFFFF00) | tmp;
108 T1 = (int32_t)T1;
109 FORCE_RET();
112 void glue(op_lwr, MEMSUFFIX) (void)
114 target_ulong tmp;
116 tmp = glue(ldub, MEMSUFFIX)(T0);
117 T1 = (T1 & 0xFFFFFF00) | tmp;
119 if (GET_LMASK(T0) >= 1) {
120 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, -1));
121 T1 = (T1 & 0xFFFF00FF) | (tmp << 8);
124 if (GET_LMASK(T0) >= 2) {
125 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, -2));
126 T1 = (T1 & 0xFF00FFFF) | (tmp << 16);
129 if (GET_LMASK(T0) == 3) {
130 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, -3));
131 T1 = (T1 & 0x00FFFFFF) | (tmp << 24);
133 T1 = (int32_t)T1;
134 FORCE_RET();
137 void glue(op_swl, MEMSUFFIX) (void)
139 glue(stb, MEMSUFFIX)(T0, (uint8_t)(T1 >> 24));
141 if (GET_LMASK(T0) <= 2)
142 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, 1), (uint8_t)(T1 >> 16));
144 if (GET_LMASK(T0) <= 1)
145 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, 2), (uint8_t)(T1 >> 8));
147 if (GET_LMASK(T0) == 0)
148 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, 3), (uint8_t)T1);
150 FORCE_RET();
153 void glue(op_swr, MEMSUFFIX) (void)
155 glue(stb, MEMSUFFIX)(T0, (uint8_t)T1);
157 if (GET_LMASK(T0) >= 1)
158 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, -1), (uint8_t)(T1 >> 8));
160 if (GET_LMASK(T0) >= 2)
161 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, -2), (uint8_t)(T1 >> 16));
163 if (GET_LMASK(T0) == 3)
164 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, -3), (uint8_t)(T1 >> 24));
166 FORCE_RET();
169 void glue(op_ll, MEMSUFFIX) (void)
171 T1 = T0;
172 T0 = glue(ldl, MEMSUFFIX)(T0);
173 env->CP0_LLAddr = T1;
174 FORCE_RET();
177 void glue(op_sc, MEMSUFFIX) (void)
179 CALL_FROM_TB0(dump_sc);
180 if (T0 & 0x3) {
181 env->CP0_BadVAddr = T0;
182 CALL_FROM_TB1(do_raise_exception, EXCP_AdES);
184 if (T0 == env->CP0_LLAddr) {
185 glue(stl, MEMSUFFIX)(T0, T1);
186 T0 = 1;
187 } else {
188 T0 = 0;
190 FORCE_RET();
193 #if defined(TARGET_MIPS64)
194 void glue(op_ld, MEMSUFFIX) (void)
196 T0 = glue(ldq, MEMSUFFIX)(T0);
197 FORCE_RET();
200 void glue(op_sd, MEMSUFFIX) (void)
202 glue(stq, MEMSUFFIX)(T0, T1);
203 FORCE_RET();
206 /* "half" load and stores. We must do the memory access inline,
207 or fault handling won't work. */
209 #ifdef TARGET_WORDS_BIGENDIAN
210 #define GET_LMASK64(v) ((v) & 7)
211 #else
212 #define GET_LMASK64(v) (((v) & 7) ^ 7)
213 #endif
215 void glue(op_ldl, MEMSUFFIX) (void)
217 uint64_t tmp;
219 tmp = glue(ldub, MEMSUFFIX)(T0);
220 T1 = (T1 & 0x00FFFFFFFFFFFFFFULL) | (tmp << 56);
222 if (GET_LMASK64(T0) <= 6) {
223 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, 1));
224 T1 = (T1 & 0xFF00FFFFFFFFFFFFULL) | (tmp << 48);
227 if (GET_LMASK64(T0) <= 5) {
228 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, 2));
229 T1 = (T1 & 0xFFFF00FFFFFFFFFFULL) | (tmp << 40);
232 if (GET_LMASK64(T0) <= 4) {
233 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, 3));
234 T1 = (T1 & 0xFFFFFF00FFFFFFFFULL) | (tmp << 32);
237 if (GET_LMASK64(T0) <= 3) {
238 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, 4));
239 T1 = (T1 & 0xFFFFFFFF00FFFFFFULL) | (tmp << 24);
242 if (GET_LMASK64(T0) <= 2) {
243 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, 5));
244 T1 = (T1 & 0xFFFFFFFFFF00FFFFULL) | (tmp << 16);
247 if (GET_LMASK64(T0) <= 1) {
248 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, 6));
249 T1 = (T1 & 0xFFFFFFFFFFFF00FFULL) | (tmp << 8);
252 if (GET_LMASK64(T0) == 0) {
253 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, 7));
254 T1 = (T1 & 0xFFFFFFFFFFFFFF00ULL) | tmp;
257 FORCE_RET();
260 void glue(op_ldr, MEMSUFFIX) (void)
262 uint64_t tmp;
264 tmp = glue(ldub, MEMSUFFIX)(T0);
265 T1 = (T1 & 0xFFFFFFFFFFFFFF00ULL) | tmp;
267 if (GET_LMASK64(T0) >= 1) {
268 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, -1));
269 T1 = (T1 & 0xFFFFFFFFFFFF00FFULL) | (tmp << 8);
272 if (GET_LMASK64(T0) >= 2) {
273 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, -2));
274 T1 = (T1 & 0xFFFFFFFFFF00FFFFULL) | (tmp << 16);
277 if (GET_LMASK64(T0) >= 3) {
278 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, -3));
279 T1 = (T1 & 0xFFFFFFFF00FFFFFFULL) | (tmp << 24);
282 if (GET_LMASK64(T0) >= 4) {
283 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, -4));
284 T1 = (T1 & 0xFFFFFF00FFFFFFFFULL) | (tmp << 32);
287 if (GET_LMASK64(T0) >= 5) {
288 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, -5));
289 T1 = (T1 & 0xFFFF00FFFFFFFFFFULL) | (tmp << 40);
292 if (GET_LMASK64(T0) >= 6) {
293 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, -6));
294 T1 = (T1 & 0xFF00FFFFFFFFFFFFULL) | (tmp << 48);
297 if (GET_LMASK64(T0) == 7) {
298 tmp = glue(ldub, MEMSUFFIX)(GET_OFFSET(T0, -7));
299 T1 = (T1 & 0x00FFFFFFFFFFFFFFULL) | (tmp << 56);
302 FORCE_RET();
305 void glue(op_sdl, MEMSUFFIX) (void)
307 glue(stb, MEMSUFFIX)(T0, (uint8_t)(T1 >> 56));
309 if (GET_LMASK64(T0) <= 6)
310 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, 1), (uint8_t)(T1 >> 48));
312 if (GET_LMASK64(T0) <= 5)
313 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, 2), (uint8_t)(T1 >> 40));
315 if (GET_LMASK64(T0) <= 4)
316 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, 3), (uint8_t)(T1 >> 32));
318 if (GET_LMASK64(T0) <= 3)
319 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, 4), (uint8_t)(T1 >> 24));
321 if (GET_LMASK64(T0) <= 2)
322 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, 5), (uint8_t)(T1 >> 16));
324 if (GET_LMASK64(T0) <= 1)
325 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, 6), (uint8_t)(T1 >> 8));
327 if (GET_LMASK64(T0) <= 0)
328 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, 7), (uint8_t)T1);
330 FORCE_RET();
333 void glue(op_sdr, MEMSUFFIX) (void)
335 glue(stb, MEMSUFFIX)(T0, (uint8_t)T1);
337 if (GET_LMASK64(T0) >= 1)
338 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, -1), (uint8_t)(T1 >> 8));
340 if (GET_LMASK64(T0) >= 2)
341 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, -2), (uint8_t)(T1 >> 16));
343 if (GET_LMASK64(T0) >= 3)
344 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, -3), (uint8_t)(T1 >> 24));
346 if (GET_LMASK64(T0) >= 4)
347 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, -4), (uint8_t)(T1 >> 32));
349 if (GET_LMASK64(T0) >= 5)
350 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, -5), (uint8_t)(T1 >> 40));
352 if (GET_LMASK64(T0) >= 6)
353 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, -6), (uint8_t)(T1 >> 48));
355 if (GET_LMASK64(T0) == 7)
356 glue(stb, MEMSUFFIX)(GET_OFFSET(T0, -7), (uint8_t)(T1 >> 56));
358 FORCE_RET();
361 void glue(op_lld, MEMSUFFIX) (void)
363 T1 = T0;
364 T0 = glue(ldq, MEMSUFFIX)(T0);
365 env->CP0_LLAddr = T1;
366 FORCE_RET();
369 void glue(op_scd, MEMSUFFIX) (void)
371 CALL_FROM_TB0(dump_sc);
372 if (T0 & 0x7) {
373 env->CP0_BadVAddr = T0;
374 CALL_FROM_TB1(do_raise_exception, EXCP_AdES);
376 if (T0 == env->CP0_LLAddr) {
377 glue(stq, MEMSUFFIX)(T0, T1);
378 T0 = 1;
379 } else {
380 T0 = 0;
382 FORCE_RET();
384 #endif /* TARGET_MIPS64 */
386 void glue(op_lwc1, MEMSUFFIX) (void)
388 WT0 = glue(ldl, MEMSUFFIX)(T0);
389 FORCE_RET();
391 void glue(op_swc1, MEMSUFFIX) (void)
393 glue(stl, MEMSUFFIX)(T0, WT0);
394 FORCE_RET();
396 void glue(op_ldc1, MEMSUFFIX) (void)
398 DT0 = glue(ldq, MEMSUFFIX)(T0);
399 FORCE_RET();
401 void glue(op_sdc1, MEMSUFFIX) (void)
403 glue(stq, MEMSUFFIX)(T0, DT0);
404 FORCE_RET();
406 void glue(op_luxc1, MEMSUFFIX) (void)
408 DT0 = glue(ldq, MEMSUFFIX)(T0 & ~0x7);
409 FORCE_RET();
411 void glue(op_suxc1, MEMSUFFIX) (void)
413 glue(stq, MEMSUFFIX)(T0 & ~0x7, DT0);
414 FORCE_RET();