hw/arm/musca: Add MPCs
[qemu/ar7.git] / target / ppc / translate / vsx-impl.inc.c
blobe73197e71771e0aadeabca4852d07fa38ce64af0
1 /*** VSX extension ***/
3 static inline void get_vsr(TCGv_i64 dst, int n)
5 tcg_gen_ld_i64(dst, cpu_env, offsetof(CPUPPCState, vsr[n].u64[1]));
8 static inline void set_vsr(int n, TCGv_i64 src)
10 tcg_gen_st_i64(src, cpu_env, offsetof(CPUPPCState, vsr[n].u64[1]));
13 static inline int vsr_full_offset(int n)
15 return offsetof(CPUPPCState, vsr[n].u64[0]);
18 static inline void get_cpu_vsrh(TCGv_i64 dst, int n)
20 if (n < 32) {
21 get_fpr(dst, n);
22 } else {
23 get_avr64(dst, n - 32, true);
27 static inline void get_cpu_vsrl(TCGv_i64 dst, int n)
29 if (n < 32) {
30 get_vsr(dst, n);
31 } else {
32 get_avr64(dst, n - 32, false);
36 static inline void set_cpu_vsrh(int n, TCGv_i64 src)
38 if (n < 32) {
39 set_fpr(n, src);
40 } else {
41 set_avr64(n - 32, src, true);
45 static inline void set_cpu_vsrl(int n, TCGv_i64 src)
47 if (n < 32) {
48 set_vsr(n, src);
49 } else {
50 set_avr64(n - 32, src, false);
54 #define VSX_LOAD_SCALAR(name, operation) \
55 static void gen_##name(DisasContext *ctx) \
56 { \
57 TCGv EA; \
58 TCGv_i64 t0; \
59 if (unlikely(!ctx->vsx_enabled)) { \
60 gen_exception(ctx, POWERPC_EXCP_VSXU); \
61 return; \
62 } \
63 t0 = tcg_temp_new_i64(); \
64 gen_set_access_type(ctx, ACCESS_INT); \
65 EA = tcg_temp_new(); \
66 gen_addr_reg_index(ctx, EA); \
67 gen_qemu_##operation(ctx, t0, EA); \
68 set_cpu_vsrh(xT(ctx->opcode), t0); \
69 /* NOTE: cpu_vsrl is undefined */ \
70 tcg_temp_free(EA); \
71 tcg_temp_free_i64(t0); \
74 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
75 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
76 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
77 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
78 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
79 VSX_LOAD_SCALAR(lxsspx, ld32fs)
81 static void gen_lxvd2x(DisasContext *ctx)
83 TCGv EA;
84 TCGv_i64 t0;
85 if (unlikely(!ctx->vsx_enabled)) {
86 gen_exception(ctx, POWERPC_EXCP_VSXU);
87 return;
89 t0 = tcg_temp_new_i64();
90 gen_set_access_type(ctx, ACCESS_INT);
91 EA = tcg_temp_new();
92 gen_addr_reg_index(ctx, EA);
93 gen_qemu_ld64_i64(ctx, t0, EA);
94 set_cpu_vsrh(xT(ctx->opcode), t0);
95 tcg_gen_addi_tl(EA, EA, 8);
96 gen_qemu_ld64_i64(ctx, t0, EA);
97 set_cpu_vsrl(xT(ctx->opcode), t0);
98 tcg_temp_free(EA);
99 tcg_temp_free_i64(t0);
102 static void gen_lxvdsx(DisasContext *ctx)
104 TCGv EA;
105 TCGv_i64 t0;
106 TCGv_i64 t1;
107 if (unlikely(!ctx->vsx_enabled)) {
108 gen_exception(ctx, POWERPC_EXCP_VSXU);
109 return;
111 t0 = tcg_temp_new_i64();
112 t1 = tcg_temp_new_i64();
113 gen_set_access_type(ctx, ACCESS_INT);
114 EA = tcg_temp_new();
115 gen_addr_reg_index(ctx, EA);
116 gen_qemu_ld64_i64(ctx, t0, EA);
117 set_cpu_vsrh(xT(ctx->opcode), t0);
118 tcg_gen_mov_i64(t1, t0);
119 set_cpu_vsrl(xT(ctx->opcode), t1);
120 tcg_temp_free(EA);
121 tcg_temp_free_i64(t0);
122 tcg_temp_free_i64(t1);
125 static void gen_lxvw4x(DisasContext *ctx)
127 TCGv EA;
128 TCGv_i64 xth;
129 TCGv_i64 xtl;
130 if (unlikely(!ctx->vsx_enabled)) {
131 gen_exception(ctx, POWERPC_EXCP_VSXU);
132 return;
134 xth = tcg_temp_new_i64();
135 xtl = tcg_temp_new_i64();
136 get_cpu_vsrh(xth, xT(ctx->opcode));
137 get_cpu_vsrl(xtl, xT(ctx->opcode));
138 gen_set_access_type(ctx, ACCESS_INT);
139 EA = tcg_temp_new();
141 gen_addr_reg_index(ctx, EA);
142 if (ctx->le_mode) {
143 TCGv_i64 t0 = tcg_temp_new_i64();
144 TCGv_i64 t1 = tcg_temp_new_i64();
146 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
147 tcg_gen_shri_i64(t1, t0, 32);
148 tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
149 tcg_gen_addi_tl(EA, EA, 8);
150 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
151 tcg_gen_shri_i64(t1, t0, 32);
152 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
153 tcg_temp_free_i64(t0);
154 tcg_temp_free_i64(t1);
155 } else {
156 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
157 tcg_gen_addi_tl(EA, EA, 8);
158 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
160 tcg_temp_free(EA);
161 tcg_temp_free_i64(xth);
162 tcg_temp_free_i64(xtl);
165 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
166 TCGv_i64 inh, TCGv_i64 inl)
168 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
169 TCGv_i64 t0 = tcg_temp_new_i64();
170 TCGv_i64 t1 = tcg_temp_new_i64();
172 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
173 tcg_gen_and_i64(t0, inh, mask);
174 tcg_gen_shli_i64(t0, t0, 8);
175 tcg_gen_shri_i64(t1, inh, 8);
176 tcg_gen_and_i64(t1, t1, mask);
177 tcg_gen_or_i64(outh, t0, t1);
179 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
180 tcg_gen_and_i64(t0, inl, mask);
181 tcg_gen_shli_i64(t0, t0, 8);
182 tcg_gen_shri_i64(t1, inl, 8);
183 tcg_gen_and_i64(t1, t1, mask);
184 tcg_gen_or_i64(outl, t0, t1);
186 tcg_temp_free_i64(t0);
187 tcg_temp_free_i64(t1);
188 tcg_temp_free_i64(mask);
191 static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
192 TCGv_i64 inh, TCGv_i64 inl)
194 TCGv_i64 hi = tcg_temp_new_i64();
195 TCGv_i64 lo = tcg_temp_new_i64();
197 tcg_gen_bswap64_i64(hi, inh);
198 tcg_gen_bswap64_i64(lo, inl);
199 tcg_gen_shri_i64(outh, hi, 32);
200 tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
201 tcg_gen_shri_i64(outl, lo, 32);
202 tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
204 tcg_temp_free_i64(hi);
205 tcg_temp_free_i64(lo);
207 static void gen_lxvh8x(DisasContext *ctx)
209 TCGv EA;
210 TCGv_i64 xth;
211 TCGv_i64 xtl;
213 if (unlikely(!ctx->vsx_enabled)) {
214 gen_exception(ctx, POWERPC_EXCP_VSXU);
215 return;
217 xth = tcg_temp_new_i64();
218 xtl = tcg_temp_new_i64();
219 get_cpu_vsrh(xth, xT(ctx->opcode));
220 get_cpu_vsrl(xtl, xT(ctx->opcode));
221 gen_set_access_type(ctx, ACCESS_INT);
223 EA = tcg_temp_new();
224 gen_addr_reg_index(ctx, EA);
225 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
226 tcg_gen_addi_tl(EA, EA, 8);
227 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
228 if (ctx->le_mode) {
229 gen_bswap16x8(xth, xtl, xth, xtl);
231 tcg_temp_free(EA);
232 tcg_temp_free_i64(xth);
233 tcg_temp_free_i64(xtl);
236 static void gen_lxvb16x(DisasContext *ctx)
238 TCGv EA;
239 TCGv_i64 xth;
240 TCGv_i64 xtl;
242 if (unlikely(!ctx->vsx_enabled)) {
243 gen_exception(ctx, POWERPC_EXCP_VSXU);
244 return;
246 xth = tcg_temp_new_i64();
247 xtl = tcg_temp_new_i64();
248 get_cpu_vsrh(xth, xT(ctx->opcode));
249 get_cpu_vsrl(xtl, xT(ctx->opcode));
250 gen_set_access_type(ctx, ACCESS_INT);
251 EA = tcg_temp_new();
252 gen_addr_reg_index(ctx, EA);
253 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
254 tcg_gen_addi_tl(EA, EA, 8);
255 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
256 tcg_temp_free(EA);
257 tcg_temp_free_i64(xth);
258 tcg_temp_free_i64(xtl);
261 #define VSX_VECTOR_LOAD_STORE(name, op, indexed) \
262 static void gen_##name(DisasContext *ctx) \
264 int xt; \
265 TCGv EA; \
266 TCGv_i64 xth; \
267 TCGv_i64 xtl; \
269 if (indexed) { \
270 xt = xT(ctx->opcode); \
271 } else { \
272 xt = DQxT(ctx->opcode); \
275 if (xt < 32) { \
276 if (unlikely(!ctx->vsx_enabled)) { \
277 gen_exception(ctx, POWERPC_EXCP_VSXU); \
278 return; \
280 } else { \
281 if (unlikely(!ctx->altivec_enabled)) { \
282 gen_exception(ctx, POWERPC_EXCP_VPU); \
283 return; \
286 xth = tcg_temp_new_i64(); \
287 xtl = tcg_temp_new_i64(); \
288 get_cpu_vsrh(xth, xt); \
289 get_cpu_vsrl(xtl, xt); \
290 gen_set_access_type(ctx, ACCESS_INT); \
291 EA = tcg_temp_new(); \
292 if (indexed) { \
293 gen_addr_reg_index(ctx, EA); \
294 } else { \
295 gen_addr_imm_index(ctx, EA, 0x0F); \
297 if (ctx->le_mode) { \
298 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
299 set_cpu_vsrl(xt, xtl); \
300 tcg_gen_addi_tl(EA, EA, 8); \
301 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
302 set_cpu_vsrh(xt, xth); \
303 } else { \
304 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
305 set_cpu_vsrh(xt, xth); \
306 tcg_gen_addi_tl(EA, EA, 8); \
307 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
308 set_cpu_vsrl(xt, xtl); \
310 tcg_temp_free(EA); \
311 tcg_temp_free_i64(xth); \
312 tcg_temp_free_i64(xtl); \
315 VSX_VECTOR_LOAD_STORE(lxv, ld_i64, 0)
316 VSX_VECTOR_LOAD_STORE(stxv, st_i64, 0)
317 VSX_VECTOR_LOAD_STORE(lxvx, ld_i64, 1)
318 VSX_VECTOR_LOAD_STORE(stxvx, st_i64, 1)
320 #ifdef TARGET_PPC64
321 #define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
322 static void gen_##name(DisasContext *ctx) \
324 TCGv EA, xt; \
326 if (xT(ctx->opcode) < 32) { \
327 if (unlikely(!ctx->vsx_enabled)) { \
328 gen_exception(ctx, POWERPC_EXCP_VSXU); \
329 return; \
331 } else { \
332 if (unlikely(!ctx->altivec_enabled)) { \
333 gen_exception(ctx, POWERPC_EXCP_VPU); \
334 return; \
337 EA = tcg_temp_new(); \
338 xt = tcg_const_tl(xT(ctx->opcode)); \
339 gen_set_access_type(ctx, ACCESS_INT); \
340 gen_addr_register(ctx, EA); \
341 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
342 tcg_temp_free(EA); \
343 tcg_temp_free(xt); \
346 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
347 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
348 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
349 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
350 #endif
352 #define VSX_LOAD_SCALAR_DS(name, operation) \
353 static void gen_##name(DisasContext *ctx) \
355 TCGv EA; \
356 TCGv_i64 xth; \
358 if (unlikely(!ctx->altivec_enabled)) { \
359 gen_exception(ctx, POWERPC_EXCP_VPU); \
360 return; \
362 xth = tcg_temp_new_i64(); \
363 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
364 gen_set_access_type(ctx, ACCESS_INT); \
365 EA = tcg_temp_new(); \
366 gen_addr_imm_index(ctx, EA, 0x03); \
367 gen_qemu_##operation(ctx, xth, EA); \
368 set_cpu_vsrh(rD(ctx->opcode) + 32, xth); \
369 /* NOTE: cpu_vsrl is undefined */ \
370 tcg_temp_free(EA); \
371 tcg_temp_free_i64(xth); \
374 VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
375 VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
377 #define VSX_STORE_SCALAR(name, operation) \
378 static void gen_##name(DisasContext *ctx) \
380 TCGv EA; \
381 TCGv_i64 t0; \
382 if (unlikely(!ctx->vsx_enabled)) { \
383 gen_exception(ctx, POWERPC_EXCP_VSXU); \
384 return; \
386 t0 = tcg_temp_new_i64(); \
387 gen_set_access_type(ctx, ACCESS_INT); \
388 EA = tcg_temp_new(); \
389 gen_addr_reg_index(ctx, EA); \
390 gen_qemu_##operation(ctx, t0, EA); \
391 set_cpu_vsrh(xS(ctx->opcode), t0); \
392 tcg_temp_free(EA); \
393 tcg_temp_free_i64(t0); \
396 VSX_STORE_SCALAR(stxsdx, st64_i64)
398 VSX_STORE_SCALAR(stxsibx, st8_i64)
399 VSX_STORE_SCALAR(stxsihx, st16_i64)
400 VSX_STORE_SCALAR(stxsiwx, st32_i64)
401 VSX_STORE_SCALAR(stxsspx, st32fs)
403 static void gen_stxvd2x(DisasContext *ctx)
405 TCGv EA;
406 TCGv_i64 t0;
407 if (unlikely(!ctx->vsx_enabled)) {
408 gen_exception(ctx, POWERPC_EXCP_VSXU);
409 return;
411 t0 = tcg_temp_new_i64();
412 gen_set_access_type(ctx, ACCESS_INT);
413 EA = tcg_temp_new();
414 gen_addr_reg_index(ctx, EA);
415 get_cpu_vsrh(t0, xS(ctx->opcode));
416 gen_qemu_st64_i64(ctx, t0, EA);
417 tcg_gen_addi_tl(EA, EA, 8);
418 get_cpu_vsrl(t0, xS(ctx->opcode));
419 gen_qemu_st64_i64(ctx, t0, EA);
420 tcg_temp_free(EA);
421 tcg_temp_free_i64(t0);
424 static void gen_stxvw4x(DisasContext *ctx)
426 TCGv EA;
427 TCGv_i64 xsh;
428 TCGv_i64 xsl;
430 if (unlikely(!ctx->vsx_enabled)) {
431 gen_exception(ctx, POWERPC_EXCP_VSXU);
432 return;
434 xsh = tcg_temp_new_i64();
435 xsl = tcg_temp_new_i64();
436 get_cpu_vsrh(xsh, xS(ctx->opcode));
437 get_cpu_vsrl(xsl, xS(ctx->opcode));
438 gen_set_access_type(ctx, ACCESS_INT);
439 EA = tcg_temp_new();
440 gen_addr_reg_index(ctx, EA);
441 if (ctx->le_mode) {
442 TCGv_i64 t0 = tcg_temp_new_i64();
443 TCGv_i64 t1 = tcg_temp_new_i64();
445 tcg_gen_shri_i64(t0, xsh, 32);
446 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
447 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
448 tcg_gen_addi_tl(EA, EA, 8);
449 tcg_gen_shri_i64(t0, xsl, 32);
450 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
451 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
452 tcg_temp_free_i64(t0);
453 tcg_temp_free_i64(t1);
454 } else {
455 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
456 tcg_gen_addi_tl(EA, EA, 8);
457 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
459 tcg_temp_free(EA);
460 tcg_temp_free_i64(xsh);
461 tcg_temp_free_i64(xsl);
464 static void gen_stxvh8x(DisasContext *ctx)
466 TCGv EA;
467 TCGv_i64 xsh;
468 TCGv_i64 xsl;
470 if (unlikely(!ctx->vsx_enabled)) {
471 gen_exception(ctx, POWERPC_EXCP_VSXU);
472 return;
474 xsh = tcg_temp_new_i64();
475 xsl = tcg_temp_new_i64();
476 get_cpu_vsrh(xsh, xS(ctx->opcode));
477 get_cpu_vsrl(xsl, xS(ctx->opcode));
478 gen_set_access_type(ctx, ACCESS_INT);
479 EA = tcg_temp_new();
480 gen_addr_reg_index(ctx, EA);
481 if (ctx->le_mode) {
482 TCGv_i64 outh = tcg_temp_new_i64();
483 TCGv_i64 outl = tcg_temp_new_i64();
485 gen_bswap16x8(outh, outl, xsh, xsl);
486 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
487 tcg_gen_addi_tl(EA, EA, 8);
488 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
489 tcg_temp_free_i64(outh);
490 tcg_temp_free_i64(outl);
491 } else {
492 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
493 tcg_gen_addi_tl(EA, EA, 8);
494 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
496 tcg_temp_free(EA);
497 tcg_temp_free_i64(xsh);
498 tcg_temp_free_i64(xsl);
501 static void gen_stxvb16x(DisasContext *ctx)
503 TCGv EA;
504 TCGv_i64 xsh;
505 TCGv_i64 xsl;
507 if (unlikely(!ctx->vsx_enabled)) {
508 gen_exception(ctx, POWERPC_EXCP_VSXU);
509 return;
511 xsh = tcg_temp_new_i64();
512 xsl = tcg_temp_new_i64();
513 get_cpu_vsrh(xsh, xS(ctx->opcode));
514 get_cpu_vsrl(xsl, xS(ctx->opcode));
515 gen_set_access_type(ctx, ACCESS_INT);
516 EA = tcg_temp_new();
517 gen_addr_reg_index(ctx, EA);
518 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
519 tcg_gen_addi_tl(EA, EA, 8);
520 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
521 tcg_temp_free(EA);
522 tcg_temp_free_i64(xsh);
523 tcg_temp_free_i64(xsl);
526 #define VSX_STORE_SCALAR_DS(name, operation) \
527 static void gen_##name(DisasContext *ctx) \
529 TCGv EA; \
530 TCGv_i64 xth; \
532 if (unlikely(!ctx->altivec_enabled)) { \
533 gen_exception(ctx, POWERPC_EXCP_VPU); \
534 return; \
536 xth = tcg_temp_new_i64(); \
537 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
538 gen_set_access_type(ctx, ACCESS_INT); \
539 EA = tcg_temp_new(); \
540 gen_addr_imm_index(ctx, EA, 0x03); \
541 gen_qemu_##operation(ctx, xth, EA); \
542 /* NOTE: cpu_vsrl is undefined */ \
543 tcg_temp_free(EA); \
544 tcg_temp_free_i64(xth); \
547 VSX_LOAD_SCALAR_DS(stxsd, st64_i64)
548 VSX_LOAD_SCALAR_DS(stxssp, st32fs)
550 static void gen_mfvsrwz(DisasContext *ctx)
552 if (xS(ctx->opcode) < 32) {
553 if (unlikely(!ctx->fpu_enabled)) {
554 gen_exception(ctx, POWERPC_EXCP_FPU);
555 return;
557 } else {
558 if (unlikely(!ctx->altivec_enabled)) {
559 gen_exception(ctx, POWERPC_EXCP_VPU);
560 return;
563 TCGv_i64 tmp = tcg_temp_new_i64();
564 TCGv_i64 xsh = tcg_temp_new_i64();
565 get_cpu_vsrh(xsh, xS(ctx->opcode));
566 tcg_gen_ext32u_i64(tmp, xsh);
567 tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
568 tcg_temp_free_i64(tmp);
569 tcg_temp_free_i64(xsh);
572 static void gen_mtvsrwa(DisasContext *ctx)
574 if (xS(ctx->opcode) < 32) {
575 if (unlikely(!ctx->fpu_enabled)) {
576 gen_exception(ctx, POWERPC_EXCP_FPU);
577 return;
579 } else {
580 if (unlikely(!ctx->altivec_enabled)) {
581 gen_exception(ctx, POWERPC_EXCP_VPU);
582 return;
585 TCGv_i64 tmp = tcg_temp_new_i64();
586 TCGv_i64 xsh = tcg_temp_new_i64();
587 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
588 tcg_gen_ext32s_i64(xsh, tmp);
589 set_cpu_vsrh(xT(ctx->opcode), xsh);
590 tcg_temp_free_i64(tmp);
591 tcg_temp_free_i64(xsh);
594 static void gen_mtvsrwz(DisasContext *ctx)
596 if (xS(ctx->opcode) < 32) {
597 if (unlikely(!ctx->fpu_enabled)) {
598 gen_exception(ctx, POWERPC_EXCP_FPU);
599 return;
601 } else {
602 if (unlikely(!ctx->altivec_enabled)) {
603 gen_exception(ctx, POWERPC_EXCP_VPU);
604 return;
607 TCGv_i64 tmp = tcg_temp_new_i64();
608 TCGv_i64 xsh = tcg_temp_new_i64();
609 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
610 tcg_gen_ext32u_i64(xsh, tmp);
611 set_cpu_vsrh(xT(ctx->opcode), xsh);
612 tcg_temp_free_i64(tmp);
613 tcg_temp_free_i64(xsh);
616 #if defined(TARGET_PPC64)
617 static void gen_mfvsrd(DisasContext *ctx)
619 TCGv_i64 t0;
620 if (xS(ctx->opcode) < 32) {
621 if (unlikely(!ctx->fpu_enabled)) {
622 gen_exception(ctx, POWERPC_EXCP_FPU);
623 return;
625 } else {
626 if (unlikely(!ctx->altivec_enabled)) {
627 gen_exception(ctx, POWERPC_EXCP_VPU);
628 return;
631 t0 = tcg_temp_new_i64();
632 get_cpu_vsrh(t0, xS(ctx->opcode));
633 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
634 tcg_temp_free_i64(t0);
637 static void gen_mtvsrd(DisasContext *ctx)
639 TCGv_i64 t0;
640 if (xS(ctx->opcode) < 32) {
641 if (unlikely(!ctx->fpu_enabled)) {
642 gen_exception(ctx, POWERPC_EXCP_FPU);
643 return;
645 } else {
646 if (unlikely(!ctx->altivec_enabled)) {
647 gen_exception(ctx, POWERPC_EXCP_VPU);
648 return;
651 t0 = tcg_temp_new_i64();
652 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
653 set_cpu_vsrh(xT(ctx->opcode), t0);
654 tcg_temp_free_i64(t0);
657 static void gen_mfvsrld(DisasContext *ctx)
659 TCGv_i64 t0;
660 if (xS(ctx->opcode) < 32) {
661 if (unlikely(!ctx->vsx_enabled)) {
662 gen_exception(ctx, POWERPC_EXCP_VSXU);
663 return;
665 } else {
666 if (unlikely(!ctx->altivec_enabled)) {
667 gen_exception(ctx, POWERPC_EXCP_VPU);
668 return;
671 t0 = tcg_temp_new_i64();
672 get_cpu_vsrl(t0, xS(ctx->opcode));
673 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
674 tcg_temp_free_i64(t0);
677 static void gen_mtvsrdd(DisasContext *ctx)
679 TCGv_i64 t0;
680 if (xT(ctx->opcode) < 32) {
681 if (unlikely(!ctx->vsx_enabled)) {
682 gen_exception(ctx, POWERPC_EXCP_VSXU);
683 return;
685 } else {
686 if (unlikely(!ctx->altivec_enabled)) {
687 gen_exception(ctx, POWERPC_EXCP_VPU);
688 return;
692 t0 = tcg_temp_new_i64();
693 if (!rA(ctx->opcode)) {
694 tcg_gen_movi_i64(t0, 0);
695 } else {
696 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
698 set_cpu_vsrh(xT(ctx->opcode), t0);
700 tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
701 set_cpu_vsrl(xT(ctx->opcode), t0);
702 tcg_temp_free_i64(t0);
705 static void gen_mtvsrws(DisasContext *ctx)
707 TCGv_i64 t0;
708 if (xT(ctx->opcode) < 32) {
709 if (unlikely(!ctx->vsx_enabled)) {
710 gen_exception(ctx, POWERPC_EXCP_VSXU);
711 return;
713 } else {
714 if (unlikely(!ctx->altivec_enabled)) {
715 gen_exception(ctx, POWERPC_EXCP_VPU);
716 return;
720 t0 = tcg_temp_new_i64();
721 tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
722 cpu_gpr[rA(ctx->opcode)], 32, 32);
723 set_cpu_vsrl(xT(ctx->opcode), t0);
724 set_cpu_vsrh(xT(ctx->opcode), t0);
725 tcg_temp_free_i64(t0);
728 #endif
730 static void gen_xxpermdi(DisasContext *ctx)
732 TCGv_i64 xh, xl;
734 if (unlikely(!ctx->vsx_enabled)) {
735 gen_exception(ctx, POWERPC_EXCP_VSXU);
736 return;
739 xh = tcg_temp_new_i64();
740 xl = tcg_temp_new_i64();
742 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
743 (xT(ctx->opcode) == xB(ctx->opcode)))) {
744 if ((DM(ctx->opcode) & 2) == 0) {
745 get_cpu_vsrh(xh, xA(ctx->opcode));
746 } else {
747 get_cpu_vsrl(xh, xA(ctx->opcode));
749 if ((DM(ctx->opcode) & 1) == 0) {
750 get_cpu_vsrh(xl, xB(ctx->opcode));
751 } else {
752 get_cpu_vsrl(xl, xB(ctx->opcode));
755 set_cpu_vsrh(xT(ctx->opcode), xh);
756 set_cpu_vsrl(xT(ctx->opcode), xl);
757 } else {
758 if ((DM(ctx->opcode) & 2) == 0) {
759 get_cpu_vsrh(xh, xA(ctx->opcode));
760 set_cpu_vsrh(xT(ctx->opcode), xh);
761 } else {
762 get_cpu_vsrl(xh, xA(ctx->opcode));
763 set_cpu_vsrh(xT(ctx->opcode), xh);
765 if ((DM(ctx->opcode) & 1) == 0) {
766 get_cpu_vsrh(xl, xB(ctx->opcode));
767 set_cpu_vsrl(xT(ctx->opcode), xl);
768 } else {
769 get_cpu_vsrl(xl, xB(ctx->opcode));
770 set_cpu_vsrl(xT(ctx->opcode), xl);
773 tcg_temp_free_i64(xh);
774 tcg_temp_free_i64(xl);
777 #define OP_ABS 1
778 #define OP_NABS 2
779 #define OP_NEG 3
780 #define OP_CPSGN 4
781 #define SGN_MASK_DP 0x8000000000000000ull
782 #define SGN_MASK_SP 0x8000000080000000ull
784 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
785 static void glue(gen_, name)(DisasContext * ctx) \
787 TCGv_i64 xb, sgm; \
788 if (unlikely(!ctx->vsx_enabled)) { \
789 gen_exception(ctx, POWERPC_EXCP_VSXU); \
790 return; \
792 xb = tcg_temp_new_i64(); \
793 sgm = tcg_temp_new_i64(); \
794 get_cpu_vsrh(xb, xB(ctx->opcode)); \
795 tcg_gen_movi_i64(sgm, sgn_mask); \
796 switch (op) { \
797 case OP_ABS: { \
798 tcg_gen_andc_i64(xb, xb, sgm); \
799 break; \
801 case OP_NABS: { \
802 tcg_gen_or_i64(xb, xb, sgm); \
803 break; \
805 case OP_NEG: { \
806 tcg_gen_xor_i64(xb, xb, sgm); \
807 break; \
809 case OP_CPSGN: { \
810 TCGv_i64 xa = tcg_temp_new_i64(); \
811 get_cpu_vsrh(xa, xA(ctx->opcode)); \
812 tcg_gen_and_i64(xa, xa, sgm); \
813 tcg_gen_andc_i64(xb, xb, sgm); \
814 tcg_gen_or_i64(xb, xb, xa); \
815 tcg_temp_free_i64(xa); \
816 break; \
819 set_cpu_vsrh(xT(ctx->opcode), xb); \
820 tcg_temp_free_i64(xb); \
821 tcg_temp_free_i64(sgm); \
824 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
825 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
826 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
827 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
829 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
830 static void glue(gen_, name)(DisasContext *ctx) \
832 int xa; \
833 int xt = rD(ctx->opcode) + 32; \
834 int xb = rB(ctx->opcode) + 32; \
835 TCGv_i64 xah, xbh, xbl, sgm, tmp; \
837 if (unlikely(!ctx->vsx_enabled)) { \
838 gen_exception(ctx, POWERPC_EXCP_VSXU); \
839 return; \
841 xbh = tcg_temp_new_i64(); \
842 xbl = tcg_temp_new_i64(); \
843 sgm = tcg_temp_new_i64(); \
844 tmp = tcg_temp_new_i64(); \
845 get_cpu_vsrh(xbh, xb); \
846 get_cpu_vsrl(xbl, xb); \
847 tcg_gen_movi_i64(sgm, sgn_mask); \
848 switch (op) { \
849 case OP_ABS: \
850 tcg_gen_andc_i64(xbh, xbh, sgm); \
851 break; \
852 case OP_NABS: \
853 tcg_gen_or_i64(xbh, xbh, sgm); \
854 break; \
855 case OP_NEG: \
856 tcg_gen_xor_i64(xbh, xbh, sgm); \
857 break; \
858 case OP_CPSGN: \
859 xah = tcg_temp_new_i64(); \
860 xa = rA(ctx->opcode) + 32; \
861 get_cpu_vsrh(tmp, xa); \
862 tcg_gen_and_i64(xah, tmp, sgm); \
863 tcg_gen_andc_i64(xbh, xbh, sgm); \
864 tcg_gen_or_i64(xbh, xbh, xah); \
865 tcg_temp_free_i64(xah); \
866 break; \
868 set_cpu_vsrh(xt, xbh); \
869 set_cpu_vsrl(xt, xbl); \
870 tcg_temp_free_i64(xbl); \
871 tcg_temp_free_i64(xbh); \
872 tcg_temp_free_i64(sgm); \
873 tcg_temp_free_i64(tmp); \
876 VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
877 VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
878 VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
879 VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
881 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
882 static void glue(gen_, name)(DisasContext * ctx) \
884 TCGv_i64 xbh, xbl, sgm; \
885 if (unlikely(!ctx->vsx_enabled)) { \
886 gen_exception(ctx, POWERPC_EXCP_VSXU); \
887 return; \
889 xbh = tcg_temp_new_i64(); \
890 xbl = tcg_temp_new_i64(); \
891 sgm = tcg_temp_new_i64(); \
892 set_cpu_vsrh(xB(ctx->opcode), xbh); \
893 set_cpu_vsrl(xB(ctx->opcode), xbl); \
894 tcg_gen_movi_i64(sgm, sgn_mask); \
895 switch (op) { \
896 case OP_ABS: { \
897 tcg_gen_andc_i64(xbh, xbh, sgm); \
898 tcg_gen_andc_i64(xbl, xbl, sgm); \
899 break; \
901 case OP_NABS: { \
902 tcg_gen_or_i64(xbh, xbh, sgm); \
903 tcg_gen_or_i64(xbl, xbl, sgm); \
904 break; \
906 case OP_NEG: { \
907 tcg_gen_xor_i64(xbh, xbh, sgm); \
908 tcg_gen_xor_i64(xbl, xbl, sgm); \
909 break; \
911 case OP_CPSGN: { \
912 TCGv_i64 xah = tcg_temp_new_i64(); \
913 TCGv_i64 xal = tcg_temp_new_i64(); \
914 get_cpu_vsrh(xah, xA(ctx->opcode)); \
915 get_cpu_vsrl(xal, xA(ctx->opcode)); \
916 tcg_gen_and_i64(xah, xah, sgm); \
917 tcg_gen_and_i64(xal, xal, sgm); \
918 tcg_gen_andc_i64(xbh, xbh, sgm); \
919 tcg_gen_andc_i64(xbl, xbl, sgm); \
920 tcg_gen_or_i64(xbh, xbh, xah); \
921 tcg_gen_or_i64(xbl, xbl, xal); \
922 tcg_temp_free_i64(xah); \
923 tcg_temp_free_i64(xal); \
924 break; \
927 set_cpu_vsrh(xT(ctx->opcode), xbh); \
928 set_cpu_vsrl(xT(ctx->opcode), xbl); \
929 tcg_temp_free_i64(xbh); \
930 tcg_temp_free_i64(xbl); \
931 tcg_temp_free_i64(sgm); \
934 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
935 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
936 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
937 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
938 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
939 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
940 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
941 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
943 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
944 static void gen_##name(DisasContext * ctx) \
946 TCGv_i32 opc; \
947 if (unlikely(!ctx->vsx_enabled)) { \
948 gen_exception(ctx, POWERPC_EXCP_VSXU); \
949 return; \
951 opc = tcg_const_i32(ctx->opcode); \
952 gen_helper_##name(cpu_env, opc); \
953 tcg_temp_free_i32(opc); \
956 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
957 static void gen_##name(DisasContext * ctx) \
959 TCGv_i64 t0; \
960 TCGv_i64 t1; \
961 if (unlikely(!ctx->vsx_enabled)) { \
962 gen_exception(ctx, POWERPC_EXCP_VSXU); \
963 return; \
965 t0 = tcg_temp_new_i64(); \
966 t1 = tcg_temp_new_i64(); \
967 get_cpu_vsrh(t0, xB(ctx->opcode)); \
968 gen_helper_##name(t1, cpu_env, t0); \
969 set_cpu_vsrh(xT(ctx->opcode), t1); \
970 tcg_temp_free_i64(t0); \
971 tcg_temp_free_i64(t1); \
974 GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
975 GEN_VSX_HELPER_2(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
976 GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
977 GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
978 GEN_VSX_HELPER_2(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
979 GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
980 GEN_VSX_HELPER_2(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
981 GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
982 GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
983 GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
984 GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
985 GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
986 GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
987 GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
988 GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
989 GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
990 GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
991 GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
992 GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
993 GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
994 GEN_VSX_HELPER_2(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
995 GEN_VSX_HELPER_2(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
996 GEN_VSX_HELPER_2(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
997 GEN_VSX_HELPER_2(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
998 GEN_VSX_HELPER_2(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
999 GEN_VSX_HELPER_2(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1000 GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1001 GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1002 GEN_VSX_HELPER_2(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1003 GEN_VSX_HELPER_2(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1004 GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1005 GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1006 GEN_VSX_HELPER_2(xsmaxcdp, 0x00, 0x10, 0, PPC2_ISA300)
1007 GEN_VSX_HELPER_2(xsmincdp, 0x00, 0x11, 0, PPC2_ISA300)
1008 GEN_VSX_HELPER_2(xsmaxjdp, 0x00, 0x12, 0, PPC2_ISA300)
1009 GEN_VSX_HELPER_2(xsminjdp, 0x00, 0x12, 0, PPC2_ISA300)
1010 GEN_VSX_HELPER_2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1011 GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1012 GEN_VSX_HELPER_2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1013 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1014 GEN_VSX_HELPER_2(xscvqpdp, 0x04, 0x1A, 0x14, PPC2_ISA300)
1015 GEN_VSX_HELPER_2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1016 GEN_VSX_HELPER_2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1017 GEN_VSX_HELPER_2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1018 GEN_VSX_HELPER_2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1019 GEN_VSX_HELPER_2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1020 GEN_VSX_HELPER_2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1021 GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1022 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
1023 GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1024 GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1025 GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1026 GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1027 GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1028 GEN_VSX_HELPER_2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1029 GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1030 GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1031 GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1032 GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1033 GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1034 GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1035 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1037 GEN_VSX_HELPER_2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1038 GEN_VSX_HELPER_2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1039 GEN_VSX_HELPER_2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1040 GEN_VSX_HELPER_2(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1042 GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1043 GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1044 GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1045 GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1046 GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1047 GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1048 GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1049 GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
1050 GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
1051 GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
1052 GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
1053 GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
1054 GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
1055 GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
1056 GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
1057 GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1058 GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1059 GEN_VSX_HELPER_2(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1060 GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1061 GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1063 GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1064 GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1065 GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1066 GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1067 GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1068 GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1069 GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1070 GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1071 GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1072 GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
1073 GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
1074 GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
1075 GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
1076 GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
1077 GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
1078 GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
1079 GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
1080 GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1081 GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1082 GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
1083 GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
1084 GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
1085 GEN_VSX_HELPER_2(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
1086 GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1087 GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1088 GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1089 GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1090 GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1091 GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1092 GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1093 GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1094 GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1095 GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1096 GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1097 GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1098 GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1099 GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1101 GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1102 GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1103 GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1104 GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1105 GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1106 GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1107 GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1108 GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1109 GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1110 GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
1111 GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
1112 GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
1113 GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
1114 GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
1115 GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
1116 GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
1117 GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
1118 GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1119 GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1120 GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
1121 GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
1122 GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
1123 GEN_VSX_HELPER_2(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
1124 GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1125 GEN_VSX_HELPER_2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1126 GEN_VSX_HELPER_2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1127 GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1128 GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1129 GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1130 GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1131 GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1132 GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1133 GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1134 GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1135 GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1136 GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1137 GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1138 GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1139 GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1140 GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1141 GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1142 GEN_VSX_HELPER_2(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
1143 GEN_VSX_HELPER_2(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
1145 static void gen_xxbrd(DisasContext *ctx)
1147 TCGv_i64 xth;
1148 TCGv_i64 xtl;
1149 TCGv_i64 xbh;
1150 TCGv_i64 xbl;
1152 if (unlikely(!ctx->vsx_enabled)) {
1153 gen_exception(ctx, POWERPC_EXCP_VSXU);
1154 return;
1156 xth = tcg_temp_new_i64();
1157 xtl = tcg_temp_new_i64();
1158 xbh = tcg_temp_new_i64();
1159 xbl = tcg_temp_new_i64();
1160 get_cpu_vsrh(xbh, xB(ctx->opcode));
1161 get_cpu_vsrl(xbl, xB(ctx->opcode));
1163 tcg_gen_bswap64_i64(xth, xbh);
1164 tcg_gen_bswap64_i64(xtl, xbl);
1165 set_cpu_vsrh(xT(ctx->opcode), xth);
1166 set_cpu_vsrl(xT(ctx->opcode), xtl);
1168 tcg_temp_free_i64(xth);
1169 tcg_temp_free_i64(xtl);
1170 tcg_temp_free_i64(xbh);
1171 tcg_temp_free_i64(xbl);
1174 static void gen_xxbrh(DisasContext *ctx)
1176 TCGv_i64 xth;
1177 TCGv_i64 xtl;
1178 TCGv_i64 xbh;
1179 TCGv_i64 xbl;
1181 if (unlikely(!ctx->vsx_enabled)) {
1182 gen_exception(ctx, POWERPC_EXCP_VSXU);
1183 return;
1185 xth = tcg_temp_new_i64();
1186 xtl = tcg_temp_new_i64();
1187 xbh = tcg_temp_new_i64();
1188 xbl = tcg_temp_new_i64();
1189 get_cpu_vsrh(xbh, xB(ctx->opcode));
1190 get_cpu_vsrl(xbl, xB(ctx->opcode));
1192 gen_bswap16x8(xth, xtl, xbh, xbl);
1193 set_cpu_vsrh(xT(ctx->opcode), xth);
1194 set_cpu_vsrl(xT(ctx->opcode), xtl);
1196 tcg_temp_free_i64(xth);
1197 tcg_temp_free_i64(xtl);
1198 tcg_temp_free_i64(xbh);
1199 tcg_temp_free_i64(xbl);
1202 static void gen_xxbrq(DisasContext *ctx)
1204 TCGv_i64 xth;
1205 TCGv_i64 xtl;
1206 TCGv_i64 xbh;
1207 TCGv_i64 xbl;
1208 TCGv_i64 t0;
1210 if (unlikely(!ctx->vsx_enabled)) {
1211 gen_exception(ctx, POWERPC_EXCP_VSXU);
1212 return;
1214 xth = tcg_temp_new_i64();
1215 xtl = tcg_temp_new_i64();
1216 xbh = tcg_temp_new_i64();
1217 xbl = tcg_temp_new_i64();
1218 get_cpu_vsrh(xbh, xB(ctx->opcode));
1219 get_cpu_vsrl(xbl, xB(ctx->opcode));
1220 t0 = tcg_temp_new_i64();
1222 tcg_gen_bswap64_i64(t0, xbl);
1223 tcg_gen_bswap64_i64(xtl, xbh);
1224 set_cpu_vsrl(xT(ctx->opcode), xtl);
1225 tcg_gen_mov_i64(xth, t0);
1226 set_cpu_vsrl(xT(ctx->opcode), xth);
1228 tcg_temp_free_i64(t0);
1229 tcg_temp_free_i64(xth);
1230 tcg_temp_free_i64(xtl);
1231 tcg_temp_free_i64(xbh);
1232 tcg_temp_free_i64(xbl);
1235 static void gen_xxbrw(DisasContext *ctx)
1237 TCGv_i64 xth;
1238 TCGv_i64 xtl;
1239 TCGv_i64 xbh;
1240 TCGv_i64 xbl;
1242 if (unlikely(!ctx->vsx_enabled)) {
1243 gen_exception(ctx, POWERPC_EXCP_VSXU);
1244 return;
1246 xth = tcg_temp_new_i64();
1247 xtl = tcg_temp_new_i64();
1248 xbh = tcg_temp_new_i64();
1249 xbl = tcg_temp_new_i64();
1250 get_cpu_vsrh(xbh, xB(ctx->opcode));
1251 get_cpu_vsrl(xbl, xB(ctx->opcode));
1253 gen_bswap32x4(xth, xtl, xbh, xbl);
1254 set_cpu_vsrl(xT(ctx->opcode), xth);
1255 set_cpu_vsrl(xT(ctx->opcode), xtl);
1257 tcg_temp_free_i64(xth);
1258 tcg_temp_free_i64(xtl);
1259 tcg_temp_free_i64(xbh);
1260 tcg_temp_free_i64(xbl);
1263 #define VSX_LOGICAL(name, vece, tcg_op) \
1264 static void glue(gen_, name)(DisasContext * ctx) \
1266 if (unlikely(!ctx->vsx_enabled)) { \
1267 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1268 return; \
1270 tcg_op(vece, vsr_full_offset(xT(ctx->opcode)), \
1271 vsr_full_offset(xA(ctx->opcode)), \
1272 vsr_full_offset(xB(ctx->opcode)), 16, 16); \
1275 VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1276 VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1277 VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1278 VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1279 VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1280 VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1281 VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1282 VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1284 #define VSX_XXMRG(name, high) \
1285 static void glue(gen_, name)(DisasContext * ctx) \
1287 TCGv_i64 a0, a1, b0, b1, tmp; \
1288 if (unlikely(!ctx->vsx_enabled)) { \
1289 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1290 return; \
1292 a0 = tcg_temp_new_i64(); \
1293 a1 = tcg_temp_new_i64(); \
1294 b0 = tcg_temp_new_i64(); \
1295 b1 = tcg_temp_new_i64(); \
1296 tmp = tcg_temp_new_i64(); \
1297 if (high) { \
1298 get_cpu_vsrh(a0, xA(ctx->opcode)); \
1299 get_cpu_vsrh(a1, xA(ctx->opcode)); \
1300 get_cpu_vsrh(b0, xB(ctx->opcode)); \
1301 get_cpu_vsrh(b1, xB(ctx->opcode)); \
1302 } else { \
1303 get_cpu_vsrl(a0, xA(ctx->opcode)); \
1304 get_cpu_vsrl(a1, xA(ctx->opcode)); \
1305 get_cpu_vsrl(b0, xB(ctx->opcode)); \
1306 get_cpu_vsrl(b1, xB(ctx->opcode)); \
1308 tcg_gen_shri_i64(a0, a0, 32); \
1309 tcg_gen_shri_i64(b0, b0, 32); \
1310 tcg_gen_deposit_i64(tmp, b0, a0, 32, 32); \
1311 set_cpu_vsrh(xT(ctx->opcode), tmp); \
1312 tcg_gen_deposit_i64(tmp, b1, a1, 32, 32); \
1313 set_cpu_vsrl(xT(ctx->opcode), tmp); \
1314 tcg_temp_free_i64(a0); \
1315 tcg_temp_free_i64(a1); \
1316 tcg_temp_free_i64(b0); \
1317 tcg_temp_free_i64(b1); \
1318 tcg_temp_free_i64(tmp); \
1321 VSX_XXMRG(xxmrghw, 1)
1322 VSX_XXMRG(xxmrglw, 0)
1324 static void xxsel_i64(TCGv_i64 t, TCGv_i64 a, TCGv_i64 b, TCGv_i64 c)
1326 tcg_gen_and_i64(b, b, c);
1327 tcg_gen_andc_i64(a, a, c);
1328 tcg_gen_or_i64(t, a, b);
1331 static void xxsel_vec(unsigned vece, TCGv_vec t, TCGv_vec a,
1332 TCGv_vec b, TCGv_vec c)
1334 tcg_gen_and_vec(vece, b, b, c);
1335 tcg_gen_andc_vec(vece, a, a, c);
1336 tcg_gen_or_vec(vece, t, a, b);
1339 static void gen_xxsel(DisasContext *ctx)
1341 static const GVecGen4 g = {
1342 .fni8 = xxsel_i64,
1343 .fniv = xxsel_vec,
1344 .vece = MO_64,
1346 int rt = xT(ctx->opcode);
1347 int ra = xA(ctx->opcode);
1348 int rb = xB(ctx->opcode);
1349 int rc = xC(ctx->opcode);
1351 if (unlikely(!ctx->vsx_enabled)) {
1352 gen_exception(ctx, POWERPC_EXCP_VSXU);
1353 return;
1355 tcg_gen_gvec_4(vsr_full_offset(rt), vsr_full_offset(ra),
1356 vsr_full_offset(rb), vsr_full_offset(rc), 16, 16, &g);
1359 static void gen_xxspltw(DisasContext *ctx)
1361 int rt = xT(ctx->opcode);
1362 int rb = xB(ctx->opcode);
1363 int uim = UIM(ctx->opcode);
1364 int tofs, bofs;
1366 if (unlikely(!ctx->vsx_enabled)) {
1367 gen_exception(ctx, POWERPC_EXCP_VSXU);
1368 return;
1371 tofs = vsr_full_offset(rt);
1372 bofs = vsr_full_offset(rb);
1373 bofs += uim << MO_32;
1374 #ifndef HOST_WORDS_BIG_ENDIAN
1375 bofs ^= 8 | 4;
1376 #endif
1378 tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1381 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1383 static void gen_xxspltib(DisasContext *ctx)
1385 uint8_t uim8 = IMM8(ctx->opcode);
1386 int rt = xT(ctx->opcode);
1388 if (rt < 32) {
1389 if (unlikely(!ctx->altivec_enabled)) {
1390 gen_exception(ctx, POWERPC_EXCP_VPU);
1391 return;
1393 } else {
1394 if (unlikely(!ctx->vsx_enabled)) {
1395 gen_exception(ctx, POWERPC_EXCP_VSXU);
1396 return;
1399 tcg_gen_gvec_dup8i(vsr_full_offset(rt), 16, 16, uim8);
1402 static void gen_xxsldwi(DisasContext *ctx)
1404 TCGv_i64 xth, xtl;
1405 if (unlikely(!ctx->vsx_enabled)) {
1406 gen_exception(ctx, POWERPC_EXCP_VSXU);
1407 return;
1409 xth = tcg_temp_new_i64();
1410 xtl = tcg_temp_new_i64();
1412 switch (SHW(ctx->opcode)) {
1413 case 0: {
1414 get_cpu_vsrh(xth, xA(ctx->opcode));
1415 get_cpu_vsrl(xtl, xA(ctx->opcode));
1416 break;
1418 case 1: {
1419 TCGv_i64 t0 = tcg_temp_new_i64();
1420 get_cpu_vsrh(xth, xA(ctx->opcode));
1421 tcg_gen_shli_i64(xth, xth, 32);
1422 get_cpu_vsrl(t0, xA(ctx->opcode));
1423 tcg_gen_shri_i64(t0, t0, 32);
1424 tcg_gen_or_i64(xth, xth, t0);
1425 get_cpu_vsrl(xtl, xA(ctx->opcode));
1426 tcg_gen_shli_i64(xtl, xtl, 32);
1427 get_cpu_vsrh(t0, xB(ctx->opcode));
1428 tcg_gen_shri_i64(t0, t0, 32);
1429 tcg_gen_or_i64(xtl, xtl, t0);
1430 tcg_temp_free_i64(t0);
1431 break;
1433 case 2: {
1434 get_cpu_vsrl(xth, xA(ctx->opcode));
1435 get_cpu_vsrh(xtl, xB(ctx->opcode));
1436 break;
1438 case 3: {
1439 TCGv_i64 t0 = tcg_temp_new_i64();
1440 get_cpu_vsrl(xth, xA(ctx->opcode));
1441 tcg_gen_shli_i64(xth, xth, 32);
1442 get_cpu_vsrh(t0, xB(ctx->opcode));
1443 tcg_gen_shri_i64(t0, t0, 32);
1444 tcg_gen_or_i64(xth, xth, t0);
1445 get_cpu_vsrh(xtl, xB(ctx->opcode));
1446 tcg_gen_shli_i64(xtl, xtl, 32);
1447 get_cpu_vsrl(t0, xB(ctx->opcode));
1448 tcg_gen_shri_i64(t0, t0, 32);
1449 tcg_gen_or_i64(xtl, xtl, t0);
1450 tcg_temp_free_i64(t0);
1451 break;
1455 set_cpu_vsrh(xT(ctx->opcode), xth);
1456 set_cpu_vsrl(xT(ctx->opcode), xtl);
1458 tcg_temp_free_i64(xth);
1459 tcg_temp_free_i64(xtl);
1462 #define VSX_EXTRACT_INSERT(name) \
1463 static void gen_##name(DisasContext *ctx) \
1465 TCGv xt, xb; \
1466 TCGv_i32 t0; \
1467 TCGv_i64 t1; \
1468 uint8_t uimm = UIMM4(ctx->opcode); \
1470 if (unlikely(!ctx->vsx_enabled)) { \
1471 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1472 return; \
1474 xt = tcg_const_tl(xT(ctx->opcode)); \
1475 xb = tcg_const_tl(xB(ctx->opcode)); \
1476 t0 = tcg_temp_new_i32(); \
1477 t1 = tcg_temp_new_i64(); \
1478 /* uimm > 15 out of bound and for \
1479 * uimm > 12 handle as per hardware in helper \
1480 */ \
1481 if (uimm > 15) { \
1482 tcg_gen_movi_i64(t1, 0); \
1483 set_cpu_vsrh(xT(ctx->opcode), t1); \
1484 set_cpu_vsrl(xT(ctx->opcode), t1); \
1485 return; \
1487 tcg_gen_movi_i32(t0, uimm); \
1488 gen_helper_##name(cpu_env, xt, xb, t0); \
1489 tcg_temp_free(xb); \
1490 tcg_temp_free(xt); \
1491 tcg_temp_free_i32(t0); \
1492 tcg_temp_free_i64(t1); \
1495 VSX_EXTRACT_INSERT(xxextractuw)
1496 VSX_EXTRACT_INSERT(xxinsertw)
1498 #ifdef TARGET_PPC64
1499 static void gen_xsxexpdp(DisasContext *ctx)
1501 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1502 TCGv_i64 t0;
1503 if (unlikely(!ctx->vsx_enabled)) {
1504 gen_exception(ctx, POWERPC_EXCP_VSXU);
1505 return;
1507 t0 = tcg_temp_new_i64();
1508 get_cpu_vsrh(t0, xB(ctx->opcode));
1509 tcg_gen_extract_i64(rt, t0, 52, 11);
1510 tcg_temp_free_i64(t0);
1513 static void gen_xsxexpqp(DisasContext *ctx)
1515 TCGv_i64 xth;
1516 TCGv_i64 xtl;
1517 TCGv_i64 xbh;
1519 if (unlikely(!ctx->vsx_enabled)) {
1520 gen_exception(ctx, POWERPC_EXCP_VSXU);
1521 return;
1523 xth = tcg_temp_new_i64();
1524 xtl = tcg_temp_new_i64();
1525 xbh = tcg_temp_new_i64();
1526 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1528 tcg_gen_extract_i64(xth, xbh, 48, 15);
1529 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1530 tcg_gen_movi_i64(xtl, 0);
1531 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1533 tcg_temp_free_i64(xbh);
1534 tcg_temp_free_i64(xth);
1535 tcg_temp_free_i64(xtl);
1538 static void gen_xsiexpdp(DisasContext *ctx)
1540 TCGv_i64 xth;
1541 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1542 TCGv rb = cpu_gpr[rB(ctx->opcode)];
1543 TCGv_i64 t0;
1545 if (unlikely(!ctx->vsx_enabled)) {
1546 gen_exception(ctx, POWERPC_EXCP_VSXU);
1547 return;
1549 t0 = tcg_temp_new_i64();
1550 xth = tcg_temp_new_i64();
1551 tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1552 tcg_gen_andi_i64(t0, rb, 0x7FF);
1553 tcg_gen_shli_i64(t0, t0, 52);
1554 tcg_gen_or_i64(xth, xth, t0);
1555 set_cpu_vsrh(xT(ctx->opcode), xth);
1556 /* dword[1] is undefined */
1557 tcg_temp_free_i64(t0);
1558 tcg_temp_free_i64(xth);
1561 static void gen_xsiexpqp(DisasContext *ctx)
1563 TCGv_i64 xth;
1564 TCGv_i64 xtl;
1565 TCGv_i64 xah;
1566 TCGv_i64 xal;
1567 TCGv_i64 xbh;
1568 TCGv_i64 t0;
1570 if (unlikely(!ctx->vsx_enabled)) {
1571 gen_exception(ctx, POWERPC_EXCP_VSXU);
1572 return;
1574 xth = tcg_temp_new_i64();
1575 xtl = tcg_temp_new_i64();
1576 xah = tcg_temp_new_i64();
1577 xal = tcg_temp_new_i64();
1578 get_cpu_vsrh(xah, rA(ctx->opcode) + 32);
1579 get_cpu_vsrl(xal, rA(ctx->opcode) + 32);
1580 xbh = tcg_temp_new_i64();
1581 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1582 t0 = tcg_temp_new_i64();
1584 tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1585 tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1586 tcg_gen_shli_i64(t0, t0, 48);
1587 tcg_gen_or_i64(xth, xth, t0);
1588 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1589 tcg_gen_mov_i64(xtl, xal);
1590 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1592 tcg_temp_free_i64(t0);
1593 tcg_temp_free_i64(xth);
1594 tcg_temp_free_i64(xtl);
1595 tcg_temp_free_i64(xah);
1596 tcg_temp_free_i64(xal);
1597 tcg_temp_free_i64(xbh);
1600 static void gen_xsxsigdp(DisasContext *ctx)
1602 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1603 TCGv_i64 t0, t1, zr, nan, exp;
1605 if (unlikely(!ctx->vsx_enabled)) {
1606 gen_exception(ctx, POWERPC_EXCP_VSXU);
1607 return;
1609 exp = tcg_temp_new_i64();
1610 t0 = tcg_temp_new_i64();
1611 t1 = tcg_temp_new_i64();
1612 zr = tcg_const_i64(0);
1613 nan = tcg_const_i64(2047);
1615 get_cpu_vsrh(t1, xB(ctx->opcode));
1616 tcg_gen_extract_i64(exp, t1, 52, 11);
1617 tcg_gen_movi_i64(t0, 0x0010000000000000);
1618 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1619 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1620 get_cpu_vsrh(t1, xB(ctx->opcode));
1621 tcg_gen_andi_i64(rt, t1, 0x000FFFFFFFFFFFFF);
1622 tcg_gen_or_i64(rt, rt, t0);
1624 tcg_temp_free_i64(t0);
1625 tcg_temp_free_i64(t1);
1626 tcg_temp_free_i64(exp);
1627 tcg_temp_free_i64(zr);
1628 tcg_temp_free_i64(nan);
1631 static void gen_xsxsigqp(DisasContext *ctx)
1633 TCGv_i64 t0, zr, nan, exp;
1634 TCGv_i64 xth;
1635 TCGv_i64 xtl;
1636 TCGv_i64 xbh;
1637 TCGv_i64 xbl;
1639 if (unlikely(!ctx->vsx_enabled)) {
1640 gen_exception(ctx, POWERPC_EXCP_VSXU);
1641 return;
1643 xth = tcg_temp_new_i64();
1644 xtl = tcg_temp_new_i64();
1645 xbh = tcg_temp_new_i64();
1646 xbl = tcg_temp_new_i64();
1647 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1648 get_cpu_vsrl(xbl, rB(ctx->opcode) + 32);
1649 exp = tcg_temp_new_i64();
1650 t0 = tcg_temp_new_i64();
1651 zr = tcg_const_i64(0);
1652 nan = tcg_const_i64(32767);
1654 tcg_gen_extract_i64(exp, xbh, 48, 15);
1655 tcg_gen_movi_i64(t0, 0x0001000000000000);
1656 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1657 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1658 tcg_gen_andi_i64(xth, xbh, 0x0000FFFFFFFFFFFF);
1659 tcg_gen_or_i64(xth, xth, t0);
1660 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1661 tcg_gen_mov_i64(xtl, xbl);
1662 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1664 tcg_temp_free_i64(t0);
1665 tcg_temp_free_i64(exp);
1666 tcg_temp_free_i64(zr);
1667 tcg_temp_free_i64(nan);
1668 tcg_temp_free_i64(xth);
1669 tcg_temp_free_i64(xtl);
1670 tcg_temp_free_i64(xbh);
1671 tcg_temp_free_i64(xbl);
1673 #endif
1675 static void gen_xviexpsp(DisasContext *ctx)
1677 TCGv_i64 xth;
1678 TCGv_i64 xtl;
1679 TCGv_i64 xah;
1680 TCGv_i64 xal;
1681 TCGv_i64 xbh;
1682 TCGv_i64 xbl;
1683 TCGv_i64 t0;
1685 if (unlikely(!ctx->vsx_enabled)) {
1686 gen_exception(ctx, POWERPC_EXCP_VSXU);
1687 return;
1689 xth = tcg_temp_new_i64();
1690 xtl = tcg_temp_new_i64();
1691 xah = tcg_temp_new_i64();
1692 xal = tcg_temp_new_i64();
1693 xbh = tcg_temp_new_i64();
1694 xbl = tcg_temp_new_i64();
1695 get_cpu_vsrh(xah, xA(ctx->opcode));
1696 get_cpu_vsrl(xal, xA(ctx->opcode));
1697 get_cpu_vsrh(xbh, xB(ctx->opcode));
1698 get_cpu_vsrl(xbl, xB(ctx->opcode));
1699 t0 = tcg_temp_new_i64();
1701 tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1702 tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1703 tcg_gen_shli_i64(t0, t0, 23);
1704 tcg_gen_or_i64(xth, xth, t0);
1705 set_cpu_vsrh(xT(ctx->opcode), xth);
1706 tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1707 tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1708 tcg_gen_shli_i64(t0, t0, 23);
1709 tcg_gen_or_i64(xtl, xtl, t0);
1710 set_cpu_vsrl(xT(ctx->opcode), xtl);
1712 tcg_temp_free_i64(t0);
1713 tcg_temp_free_i64(xth);
1714 tcg_temp_free_i64(xtl);
1715 tcg_temp_free_i64(xah);
1716 tcg_temp_free_i64(xal);
1717 tcg_temp_free_i64(xbh);
1718 tcg_temp_free_i64(xbl);
1721 static void gen_xviexpdp(DisasContext *ctx)
1723 TCGv_i64 xth;
1724 TCGv_i64 xtl;
1725 TCGv_i64 xah;
1726 TCGv_i64 xal;
1727 TCGv_i64 xbh;
1728 TCGv_i64 xbl;
1729 TCGv_i64 t0;
1731 if (unlikely(!ctx->vsx_enabled)) {
1732 gen_exception(ctx, POWERPC_EXCP_VSXU);
1733 return;
1735 xth = tcg_temp_new_i64();
1736 xtl = tcg_temp_new_i64();
1737 xah = tcg_temp_new_i64();
1738 xal = tcg_temp_new_i64();
1739 xbh = tcg_temp_new_i64();
1740 xbl = tcg_temp_new_i64();
1741 get_cpu_vsrh(xah, xA(ctx->opcode));
1742 get_cpu_vsrl(xal, xA(ctx->opcode));
1743 get_cpu_vsrh(xbh, xB(ctx->opcode));
1744 get_cpu_vsrl(xbl, xB(ctx->opcode));
1745 t0 = tcg_temp_new_i64();
1747 tcg_gen_andi_i64(xth, xah, 0x800FFFFFFFFFFFFF);
1748 tcg_gen_andi_i64(t0, xbh, 0x7FF);
1749 tcg_gen_shli_i64(t0, t0, 52);
1750 tcg_gen_or_i64(xth, xth, t0);
1751 set_cpu_vsrh(xT(ctx->opcode), xth);
1752 tcg_gen_andi_i64(xtl, xal, 0x800FFFFFFFFFFFFF);
1753 tcg_gen_andi_i64(t0, xbl, 0x7FF);
1754 tcg_gen_shli_i64(t0, t0, 52);
1755 tcg_gen_or_i64(xtl, xtl, t0);
1756 set_cpu_vsrl(xT(ctx->opcode), xtl);
1758 tcg_temp_free_i64(t0);
1759 tcg_temp_free_i64(xth);
1760 tcg_temp_free_i64(xtl);
1761 tcg_temp_free_i64(xah);
1762 tcg_temp_free_i64(xal);
1763 tcg_temp_free_i64(xbh);
1764 tcg_temp_free_i64(xbl);
1767 static void gen_xvxexpsp(DisasContext *ctx)
1769 TCGv_i64 xth;
1770 TCGv_i64 xtl;
1771 TCGv_i64 xbh;
1772 TCGv_i64 xbl;
1774 if (unlikely(!ctx->vsx_enabled)) {
1775 gen_exception(ctx, POWERPC_EXCP_VSXU);
1776 return;
1778 xth = tcg_temp_new_i64();
1779 xtl = tcg_temp_new_i64();
1780 xbh = tcg_temp_new_i64();
1781 xbl = tcg_temp_new_i64();
1782 get_cpu_vsrh(xbh, xB(ctx->opcode));
1783 get_cpu_vsrl(xbl, xB(ctx->opcode));
1785 tcg_gen_shri_i64(xth, xbh, 23);
1786 tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
1787 set_cpu_vsrh(xT(ctx->opcode), xth);
1788 tcg_gen_shri_i64(xtl, xbl, 23);
1789 tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
1790 set_cpu_vsrl(xT(ctx->opcode), xtl);
1792 tcg_temp_free_i64(xth);
1793 tcg_temp_free_i64(xtl);
1794 tcg_temp_free_i64(xbh);
1795 tcg_temp_free_i64(xbl);
1798 static void gen_xvxexpdp(DisasContext *ctx)
1800 TCGv_i64 xth;
1801 TCGv_i64 xtl;
1802 TCGv_i64 xbh;
1803 TCGv_i64 xbl;
1805 if (unlikely(!ctx->vsx_enabled)) {
1806 gen_exception(ctx, POWERPC_EXCP_VSXU);
1807 return;
1809 xth = tcg_temp_new_i64();
1810 xtl = tcg_temp_new_i64();
1811 xbh = tcg_temp_new_i64();
1812 xbl = tcg_temp_new_i64();
1813 get_cpu_vsrh(xbh, xB(ctx->opcode));
1814 get_cpu_vsrl(xbl, xB(ctx->opcode));
1816 tcg_gen_extract_i64(xth, xbh, 52, 11);
1817 set_cpu_vsrh(xT(ctx->opcode), xth);
1818 tcg_gen_extract_i64(xtl, xbl, 52, 11);
1819 set_cpu_vsrl(xT(ctx->opcode), xtl);
1821 tcg_temp_free_i64(xth);
1822 tcg_temp_free_i64(xtl);
1823 tcg_temp_free_i64(xbh);
1824 tcg_temp_free_i64(xbl);
1827 GEN_VSX_HELPER_2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
1829 static void gen_xvxsigdp(DisasContext *ctx)
1831 TCGv_i64 xth;
1832 TCGv_i64 xtl;
1833 TCGv_i64 xbh;
1834 TCGv_i64 xbl;
1835 TCGv_i64 t0, zr, nan, exp;
1837 if (unlikely(!ctx->vsx_enabled)) {
1838 gen_exception(ctx, POWERPC_EXCP_VSXU);
1839 return;
1841 xth = tcg_temp_new_i64();
1842 xtl = tcg_temp_new_i64();
1843 xbh = tcg_temp_new_i64();
1844 xbl = tcg_temp_new_i64();
1845 get_cpu_vsrh(xbh, xB(ctx->opcode));
1846 get_cpu_vsrl(xbl, xB(ctx->opcode));
1847 exp = tcg_temp_new_i64();
1848 t0 = tcg_temp_new_i64();
1849 zr = tcg_const_i64(0);
1850 nan = tcg_const_i64(2047);
1852 tcg_gen_extract_i64(exp, xbh, 52, 11);
1853 tcg_gen_movi_i64(t0, 0x0010000000000000);
1854 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1855 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1856 tcg_gen_andi_i64(xth, xbh, 0x000FFFFFFFFFFFFF);
1857 tcg_gen_or_i64(xth, xth, t0);
1858 set_cpu_vsrh(xT(ctx->opcode), xth);
1860 tcg_gen_extract_i64(exp, xbl, 52, 11);
1861 tcg_gen_movi_i64(t0, 0x0010000000000000);
1862 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1863 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1864 tcg_gen_andi_i64(xtl, xbl, 0x000FFFFFFFFFFFFF);
1865 tcg_gen_or_i64(xtl, xtl, t0);
1866 set_cpu_vsrl(xT(ctx->opcode), xtl);
1868 tcg_temp_free_i64(t0);
1869 tcg_temp_free_i64(exp);
1870 tcg_temp_free_i64(zr);
1871 tcg_temp_free_i64(nan);
1872 tcg_temp_free_i64(xth);
1873 tcg_temp_free_i64(xtl);
1874 tcg_temp_free_i64(xbh);
1875 tcg_temp_free_i64(xbl);
1878 #undef GEN_XX2FORM
1879 #undef GEN_XX3FORM
1880 #undef GEN_XX2IFORM
1881 #undef GEN_XX3_RC_FORM
1882 #undef GEN_XX3FORM_DM
1883 #undef VSX_LOGICAL