target/ppc: convert VSX logical operations to vector operations
[qemu/ar7.git] / target / ppc / translate / vsx-impl.inc.c
blob2576d81ac5884e76f7a11e0c12f9f7e464278087
1 /*** VSX extension ***/
3 static inline void get_vsr(TCGv_i64 dst, int n)
5 tcg_gen_ld_i64(dst, cpu_env, offsetof(CPUPPCState, vsr[n].u64[1]));
8 static inline void set_vsr(int n, TCGv_i64 src)
10 tcg_gen_st_i64(src, cpu_env, offsetof(CPUPPCState, vsr[n].u64[1]));
13 static inline int vsr_full_offset(int n)
15 return offsetof(CPUPPCState, vsr[n].u64[0]);
18 static inline void get_cpu_vsrh(TCGv_i64 dst, int n)
20 if (n < 32) {
21 get_fpr(dst, n);
22 } else {
23 get_avr64(dst, n - 32, true);
27 static inline void get_cpu_vsrl(TCGv_i64 dst, int n)
29 if (n < 32) {
30 get_vsr(dst, n);
31 } else {
32 get_avr64(dst, n - 32, false);
36 static inline void set_cpu_vsrh(int n, TCGv_i64 src)
38 if (n < 32) {
39 set_fpr(n, src);
40 } else {
41 set_avr64(n - 32, src, true);
45 static inline void set_cpu_vsrl(int n, TCGv_i64 src)
47 if (n < 32) {
48 set_vsr(n, src);
49 } else {
50 set_avr64(n - 32, src, false);
54 #define VSX_LOAD_SCALAR(name, operation) \
55 static void gen_##name(DisasContext *ctx) \
56 { \
57 TCGv EA; \
58 TCGv_i64 t0; \
59 if (unlikely(!ctx->vsx_enabled)) { \
60 gen_exception(ctx, POWERPC_EXCP_VSXU); \
61 return; \
62 } \
63 t0 = tcg_temp_new_i64(); \
64 gen_set_access_type(ctx, ACCESS_INT); \
65 EA = tcg_temp_new(); \
66 gen_addr_reg_index(ctx, EA); \
67 gen_qemu_##operation(ctx, t0, EA); \
68 set_cpu_vsrh(xT(ctx->opcode), t0); \
69 /* NOTE: cpu_vsrl is undefined */ \
70 tcg_temp_free(EA); \
71 tcg_temp_free_i64(t0); \
74 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
75 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
76 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
77 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
78 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
79 VSX_LOAD_SCALAR(lxsspx, ld32fs)
81 static void gen_lxvd2x(DisasContext *ctx)
83 TCGv EA;
84 TCGv_i64 t0;
85 if (unlikely(!ctx->vsx_enabled)) {
86 gen_exception(ctx, POWERPC_EXCP_VSXU);
87 return;
89 t0 = tcg_temp_new_i64();
90 gen_set_access_type(ctx, ACCESS_INT);
91 EA = tcg_temp_new();
92 gen_addr_reg_index(ctx, EA);
93 gen_qemu_ld64_i64(ctx, t0, EA);
94 set_cpu_vsrh(xT(ctx->opcode), t0);
95 tcg_gen_addi_tl(EA, EA, 8);
96 gen_qemu_ld64_i64(ctx, t0, EA);
97 set_cpu_vsrl(xT(ctx->opcode), t0);
98 tcg_temp_free(EA);
99 tcg_temp_free_i64(t0);
102 static void gen_lxvdsx(DisasContext *ctx)
104 TCGv EA;
105 TCGv_i64 t0;
106 TCGv_i64 t1;
107 if (unlikely(!ctx->vsx_enabled)) {
108 gen_exception(ctx, POWERPC_EXCP_VSXU);
109 return;
111 t0 = tcg_temp_new_i64();
112 t1 = tcg_temp_new_i64();
113 gen_set_access_type(ctx, ACCESS_INT);
114 EA = tcg_temp_new();
115 gen_addr_reg_index(ctx, EA);
116 gen_qemu_ld64_i64(ctx, t0, EA);
117 set_cpu_vsrh(xT(ctx->opcode), t0);
118 tcg_gen_mov_i64(t1, t0);
119 set_cpu_vsrl(xT(ctx->opcode), t1);
120 tcg_temp_free(EA);
121 tcg_temp_free_i64(t0);
122 tcg_temp_free_i64(t1);
125 static void gen_lxvw4x(DisasContext *ctx)
127 TCGv EA;
128 TCGv_i64 xth;
129 TCGv_i64 xtl;
130 if (unlikely(!ctx->vsx_enabled)) {
131 gen_exception(ctx, POWERPC_EXCP_VSXU);
132 return;
134 xth = tcg_temp_new_i64();
135 xtl = tcg_temp_new_i64();
136 get_cpu_vsrh(xth, xT(ctx->opcode));
137 get_cpu_vsrl(xtl, xT(ctx->opcode));
138 gen_set_access_type(ctx, ACCESS_INT);
139 EA = tcg_temp_new();
141 gen_addr_reg_index(ctx, EA);
142 if (ctx->le_mode) {
143 TCGv_i64 t0 = tcg_temp_new_i64();
144 TCGv_i64 t1 = tcg_temp_new_i64();
146 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
147 tcg_gen_shri_i64(t1, t0, 32);
148 tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
149 tcg_gen_addi_tl(EA, EA, 8);
150 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
151 tcg_gen_shri_i64(t1, t0, 32);
152 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
153 tcg_temp_free_i64(t0);
154 tcg_temp_free_i64(t1);
155 } else {
156 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
157 tcg_gen_addi_tl(EA, EA, 8);
158 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
160 tcg_temp_free(EA);
161 tcg_temp_free_i64(xth);
162 tcg_temp_free_i64(xtl);
165 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
166 TCGv_i64 inh, TCGv_i64 inl)
168 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
169 TCGv_i64 t0 = tcg_temp_new_i64();
170 TCGv_i64 t1 = tcg_temp_new_i64();
172 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
173 tcg_gen_and_i64(t0, inh, mask);
174 tcg_gen_shli_i64(t0, t0, 8);
175 tcg_gen_shri_i64(t1, inh, 8);
176 tcg_gen_and_i64(t1, t1, mask);
177 tcg_gen_or_i64(outh, t0, t1);
179 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
180 tcg_gen_and_i64(t0, inl, mask);
181 tcg_gen_shli_i64(t0, t0, 8);
182 tcg_gen_shri_i64(t1, inl, 8);
183 tcg_gen_and_i64(t1, t1, mask);
184 tcg_gen_or_i64(outl, t0, t1);
186 tcg_temp_free_i64(t0);
187 tcg_temp_free_i64(t1);
188 tcg_temp_free_i64(mask);
191 static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
192 TCGv_i64 inh, TCGv_i64 inl)
194 TCGv_i64 hi = tcg_temp_new_i64();
195 TCGv_i64 lo = tcg_temp_new_i64();
197 tcg_gen_bswap64_i64(hi, inh);
198 tcg_gen_bswap64_i64(lo, inl);
199 tcg_gen_shri_i64(outh, hi, 32);
200 tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
201 tcg_gen_shri_i64(outl, lo, 32);
202 tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
204 tcg_temp_free_i64(hi);
205 tcg_temp_free_i64(lo);
207 static void gen_lxvh8x(DisasContext *ctx)
209 TCGv EA;
210 TCGv_i64 xth;
211 TCGv_i64 xtl;
213 if (unlikely(!ctx->vsx_enabled)) {
214 gen_exception(ctx, POWERPC_EXCP_VSXU);
215 return;
217 xth = tcg_temp_new_i64();
218 xtl = tcg_temp_new_i64();
219 get_cpu_vsrh(xth, xT(ctx->opcode));
220 get_cpu_vsrl(xtl, xT(ctx->opcode));
221 gen_set_access_type(ctx, ACCESS_INT);
223 EA = tcg_temp_new();
224 gen_addr_reg_index(ctx, EA);
225 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
226 tcg_gen_addi_tl(EA, EA, 8);
227 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
228 if (ctx->le_mode) {
229 gen_bswap16x8(xth, xtl, xth, xtl);
231 tcg_temp_free(EA);
232 tcg_temp_free_i64(xth);
233 tcg_temp_free_i64(xtl);
236 static void gen_lxvb16x(DisasContext *ctx)
238 TCGv EA;
239 TCGv_i64 xth;
240 TCGv_i64 xtl;
242 if (unlikely(!ctx->vsx_enabled)) {
243 gen_exception(ctx, POWERPC_EXCP_VSXU);
244 return;
246 xth = tcg_temp_new_i64();
247 xtl = tcg_temp_new_i64();
248 get_cpu_vsrh(xth, xT(ctx->opcode));
249 get_cpu_vsrl(xtl, xT(ctx->opcode));
250 gen_set_access_type(ctx, ACCESS_INT);
251 EA = tcg_temp_new();
252 gen_addr_reg_index(ctx, EA);
253 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
254 tcg_gen_addi_tl(EA, EA, 8);
255 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
256 tcg_temp_free(EA);
257 tcg_temp_free_i64(xth);
258 tcg_temp_free_i64(xtl);
261 #define VSX_VECTOR_LOAD_STORE(name, op, indexed) \
262 static void gen_##name(DisasContext *ctx) \
264 int xt; \
265 TCGv EA; \
266 TCGv_i64 xth; \
267 TCGv_i64 xtl; \
269 if (indexed) { \
270 xt = xT(ctx->opcode); \
271 } else { \
272 xt = DQxT(ctx->opcode); \
275 if (xt < 32) { \
276 if (unlikely(!ctx->vsx_enabled)) { \
277 gen_exception(ctx, POWERPC_EXCP_VSXU); \
278 return; \
280 } else { \
281 if (unlikely(!ctx->altivec_enabled)) { \
282 gen_exception(ctx, POWERPC_EXCP_VPU); \
283 return; \
286 xth = tcg_temp_new_i64(); \
287 xtl = tcg_temp_new_i64(); \
288 get_cpu_vsrh(xth, xt); \
289 get_cpu_vsrl(xtl, xt); \
290 gen_set_access_type(ctx, ACCESS_INT); \
291 EA = tcg_temp_new(); \
292 if (indexed) { \
293 gen_addr_reg_index(ctx, EA); \
294 } else { \
295 gen_addr_imm_index(ctx, EA, 0x0F); \
297 if (ctx->le_mode) { \
298 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
299 set_cpu_vsrl(xt, xtl); \
300 tcg_gen_addi_tl(EA, EA, 8); \
301 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
302 set_cpu_vsrh(xt, xth); \
303 } else { \
304 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
305 set_cpu_vsrh(xt, xth); \
306 tcg_gen_addi_tl(EA, EA, 8); \
307 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
308 set_cpu_vsrl(xt, xtl); \
310 tcg_temp_free(EA); \
311 tcg_temp_free_i64(xth); \
312 tcg_temp_free_i64(xtl); \
315 VSX_VECTOR_LOAD_STORE(lxv, ld_i64, 0)
316 VSX_VECTOR_LOAD_STORE(stxv, st_i64, 0)
317 VSX_VECTOR_LOAD_STORE(lxvx, ld_i64, 1)
318 VSX_VECTOR_LOAD_STORE(stxvx, st_i64, 1)
320 #ifdef TARGET_PPC64
321 #define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
322 static void gen_##name(DisasContext *ctx) \
324 TCGv EA, xt; \
326 if (xT(ctx->opcode) < 32) { \
327 if (unlikely(!ctx->vsx_enabled)) { \
328 gen_exception(ctx, POWERPC_EXCP_VSXU); \
329 return; \
331 } else { \
332 if (unlikely(!ctx->altivec_enabled)) { \
333 gen_exception(ctx, POWERPC_EXCP_VPU); \
334 return; \
337 EA = tcg_temp_new(); \
338 xt = tcg_const_tl(xT(ctx->opcode)); \
339 gen_set_access_type(ctx, ACCESS_INT); \
340 gen_addr_register(ctx, EA); \
341 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
342 tcg_temp_free(EA); \
343 tcg_temp_free(xt); \
346 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
347 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
348 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
349 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
350 #endif
352 #define VSX_LOAD_SCALAR_DS(name, operation) \
353 static void gen_##name(DisasContext *ctx) \
355 TCGv EA; \
356 TCGv_i64 xth; \
358 if (unlikely(!ctx->altivec_enabled)) { \
359 gen_exception(ctx, POWERPC_EXCP_VPU); \
360 return; \
362 xth = tcg_temp_new_i64(); \
363 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
364 gen_set_access_type(ctx, ACCESS_INT); \
365 EA = tcg_temp_new(); \
366 gen_addr_imm_index(ctx, EA, 0x03); \
367 gen_qemu_##operation(ctx, xth, EA); \
368 set_cpu_vsrh(rD(ctx->opcode) + 32, xth); \
369 /* NOTE: cpu_vsrl is undefined */ \
370 tcg_temp_free(EA); \
371 tcg_temp_free_i64(xth); \
374 VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
375 VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
377 #define VSX_STORE_SCALAR(name, operation) \
378 static void gen_##name(DisasContext *ctx) \
380 TCGv EA; \
381 TCGv_i64 t0; \
382 if (unlikely(!ctx->vsx_enabled)) { \
383 gen_exception(ctx, POWERPC_EXCP_VSXU); \
384 return; \
386 t0 = tcg_temp_new_i64(); \
387 gen_set_access_type(ctx, ACCESS_INT); \
388 EA = tcg_temp_new(); \
389 gen_addr_reg_index(ctx, EA); \
390 gen_qemu_##operation(ctx, t0, EA); \
391 set_cpu_vsrh(xS(ctx->opcode), t0); \
392 tcg_temp_free(EA); \
393 tcg_temp_free_i64(t0); \
396 VSX_STORE_SCALAR(stxsdx, st64_i64)
398 VSX_STORE_SCALAR(stxsibx, st8_i64)
399 VSX_STORE_SCALAR(stxsihx, st16_i64)
400 VSX_STORE_SCALAR(stxsiwx, st32_i64)
401 VSX_STORE_SCALAR(stxsspx, st32fs)
403 static void gen_stxvd2x(DisasContext *ctx)
405 TCGv EA;
406 TCGv_i64 t0;
407 if (unlikely(!ctx->vsx_enabled)) {
408 gen_exception(ctx, POWERPC_EXCP_VSXU);
409 return;
411 t0 = tcg_temp_new_i64();
412 gen_set_access_type(ctx, ACCESS_INT);
413 EA = tcg_temp_new();
414 gen_addr_reg_index(ctx, EA);
415 get_cpu_vsrh(t0, xS(ctx->opcode));
416 gen_qemu_st64_i64(ctx, t0, EA);
417 tcg_gen_addi_tl(EA, EA, 8);
418 get_cpu_vsrl(t0, xS(ctx->opcode));
419 gen_qemu_st64_i64(ctx, t0, EA);
420 tcg_temp_free(EA);
421 tcg_temp_free_i64(t0);
424 static void gen_stxvw4x(DisasContext *ctx)
426 TCGv EA;
427 TCGv_i64 xsh;
428 TCGv_i64 xsl;
430 if (unlikely(!ctx->vsx_enabled)) {
431 gen_exception(ctx, POWERPC_EXCP_VSXU);
432 return;
434 xsh = tcg_temp_new_i64();
435 xsl = tcg_temp_new_i64();
436 get_cpu_vsrh(xsh, xS(ctx->opcode));
437 get_cpu_vsrl(xsl, xS(ctx->opcode));
438 gen_set_access_type(ctx, ACCESS_INT);
439 EA = tcg_temp_new();
440 gen_addr_reg_index(ctx, EA);
441 if (ctx->le_mode) {
442 TCGv_i64 t0 = tcg_temp_new_i64();
443 TCGv_i64 t1 = tcg_temp_new_i64();
445 tcg_gen_shri_i64(t0, xsh, 32);
446 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
447 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
448 tcg_gen_addi_tl(EA, EA, 8);
449 tcg_gen_shri_i64(t0, xsl, 32);
450 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
451 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
452 tcg_temp_free_i64(t0);
453 tcg_temp_free_i64(t1);
454 } else {
455 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
456 tcg_gen_addi_tl(EA, EA, 8);
457 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
459 tcg_temp_free(EA);
460 tcg_temp_free_i64(xsh);
461 tcg_temp_free_i64(xsl);
464 static void gen_stxvh8x(DisasContext *ctx)
466 TCGv EA;
467 TCGv_i64 xsh;
468 TCGv_i64 xsl;
470 if (unlikely(!ctx->vsx_enabled)) {
471 gen_exception(ctx, POWERPC_EXCP_VSXU);
472 return;
474 xsh = tcg_temp_new_i64();
475 xsl = tcg_temp_new_i64();
476 get_cpu_vsrh(xsh, xS(ctx->opcode));
477 get_cpu_vsrl(xsl, xS(ctx->opcode));
478 gen_set_access_type(ctx, ACCESS_INT);
479 EA = tcg_temp_new();
480 gen_addr_reg_index(ctx, EA);
481 if (ctx->le_mode) {
482 TCGv_i64 outh = tcg_temp_new_i64();
483 TCGv_i64 outl = tcg_temp_new_i64();
485 gen_bswap16x8(outh, outl, xsh, xsl);
486 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
487 tcg_gen_addi_tl(EA, EA, 8);
488 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
489 tcg_temp_free_i64(outh);
490 tcg_temp_free_i64(outl);
491 } else {
492 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
493 tcg_gen_addi_tl(EA, EA, 8);
494 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
496 tcg_temp_free(EA);
497 tcg_temp_free_i64(xsh);
498 tcg_temp_free_i64(xsl);
501 static void gen_stxvb16x(DisasContext *ctx)
503 TCGv EA;
504 TCGv_i64 xsh;
505 TCGv_i64 xsl;
507 if (unlikely(!ctx->vsx_enabled)) {
508 gen_exception(ctx, POWERPC_EXCP_VSXU);
509 return;
511 xsh = tcg_temp_new_i64();
512 xsl = tcg_temp_new_i64();
513 get_cpu_vsrh(xsh, xS(ctx->opcode));
514 get_cpu_vsrl(xsl, xS(ctx->opcode));
515 gen_set_access_type(ctx, ACCESS_INT);
516 EA = tcg_temp_new();
517 gen_addr_reg_index(ctx, EA);
518 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
519 tcg_gen_addi_tl(EA, EA, 8);
520 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
521 tcg_temp_free(EA);
522 tcg_temp_free_i64(xsh);
523 tcg_temp_free_i64(xsl);
526 #define VSX_STORE_SCALAR_DS(name, operation) \
527 static void gen_##name(DisasContext *ctx) \
529 TCGv EA; \
530 TCGv_i64 xth; \
532 if (unlikely(!ctx->altivec_enabled)) { \
533 gen_exception(ctx, POWERPC_EXCP_VPU); \
534 return; \
536 xth = tcg_temp_new_i64(); \
537 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
538 gen_set_access_type(ctx, ACCESS_INT); \
539 EA = tcg_temp_new(); \
540 gen_addr_imm_index(ctx, EA, 0x03); \
541 gen_qemu_##operation(ctx, xth, EA); \
542 /* NOTE: cpu_vsrl is undefined */ \
543 tcg_temp_free(EA); \
544 tcg_temp_free_i64(xth); \
547 VSX_LOAD_SCALAR_DS(stxsd, st64_i64)
548 VSX_LOAD_SCALAR_DS(stxssp, st32fs)
550 static void gen_mfvsrwz(DisasContext *ctx)
552 if (xS(ctx->opcode) < 32) {
553 if (unlikely(!ctx->fpu_enabled)) {
554 gen_exception(ctx, POWERPC_EXCP_FPU);
555 return;
557 } else {
558 if (unlikely(!ctx->altivec_enabled)) {
559 gen_exception(ctx, POWERPC_EXCP_VPU);
560 return;
563 TCGv_i64 tmp = tcg_temp_new_i64();
564 TCGv_i64 xsh = tcg_temp_new_i64();
565 get_cpu_vsrh(xsh, xS(ctx->opcode));
566 tcg_gen_ext32u_i64(tmp, xsh);
567 tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
568 tcg_temp_free_i64(tmp);
569 tcg_temp_free_i64(xsh);
572 static void gen_mtvsrwa(DisasContext *ctx)
574 if (xS(ctx->opcode) < 32) {
575 if (unlikely(!ctx->fpu_enabled)) {
576 gen_exception(ctx, POWERPC_EXCP_FPU);
577 return;
579 } else {
580 if (unlikely(!ctx->altivec_enabled)) {
581 gen_exception(ctx, POWERPC_EXCP_VPU);
582 return;
585 TCGv_i64 tmp = tcg_temp_new_i64();
586 TCGv_i64 xsh = tcg_temp_new_i64();
587 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
588 tcg_gen_ext32s_i64(xsh, tmp);
589 set_cpu_vsrh(xT(ctx->opcode), xsh);
590 tcg_temp_free_i64(tmp);
591 tcg_temp_free_i64(xsh);
594 static void gen_mtvsrwz(DisasContext *ctx)
596 if (xS(ctx->opcode) < 32) {
597 if (unlikely(!ctx->fpu_enabled)) {
598 gen_exception(ctx, POWERPC_EXCP_FPU);
599 return;
601 } else {
602 if (unlikely(!ctx->altivec_enabled)) {
603 gen_exception(ctx, POWERPC_EXCP_VPU);
604 return;
607 TCGv_i64 tmp = tcg_temp_new_i64();
608 TCGv_i64 xsh = tcg_temp_new_i64();
609 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
610 tcg_gen_ext32u_i64(xsh, tmp);
611 set_cpu_vsrh(xT(ctx->opcode), xsh);
612 tcg_temp_free_i64(tmp);
613 tcg_temp_free_i64(xsh);
616 #if defined(TARGET_PPC64)
617 static void gen_mfvsrd(DisasContext *ctx)
619 TCGv_i64 t0;
620 if (xS(ctx->opcode) < 32) {
621 if (unlikely(!ctx->fpu_enabled)) {
622 gen_exception(ctx, POWERPC_EXCP_FPU);
623 return;
625 } else {
626 if (unlikely(!ctx->altivec_enabled)) {
627 gen_exception(ctx, POWERPC_EXCP_VPU);
628 return;
631 t0 = tcg_temp_new_i64();
632 get_cpu_vsrh(t0, xS(ctx->opcode));
633 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
634 tcg_temp_free_i64(t0);
637 static void gen_mtvsrd(DisasContext *ctx)
639 TCGv_i64 t0;
640 if (xS(ctx->opcode) < 32) {
641 if (unlikely(!ctx->fpu_enabled)) {
642 gen_exception(ctx, POWERPC_EXCP_FPU);
643 return;
645 } else {
646 if (unlikely(!ctx->altivec_enabled)) {
647 gen_exception(ctx, POWERPC_EXCP_VPU);
648 return;
651 t0 = tcg_temp_new_i64();
652 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
653 set_cpu_vsrh(xT(ctx->opcode), t0);
654 tcg_temp_free_i64(t0);
657 static void gen_mfvsrld(DisasContext *ctx)
659 TCGv_i64 t0;
660 if (xS(ctx->opcode) < 32) {
661 if (unlikely(!ctx->vsx_enabled)) {
662 gen_exception(ctx, POWERPC_EXCP_VSXU);
663 return;
665 } else {
666 if (unlikely(!ctx->altivec_enabled)) {
667 gen_exception(ctx, POWERPC_EXCP_VPU);
668 return;
671 t0 = tcg_temp_new_i64();
672 get_cpu_vsrl(t0, xS(ctx->opcode));
673 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
674 tcg_temp_free_i64(t0);
677 static void gen_mtvsrdd(DisasContext *ctx)
679 TCGv_i64 t0;
680 if (xT(ctx->opcode) < 32) {
681 if (unlikely(!ctx->vsx_enabled)) {
682 gen_exception(ctx, POWERPC_EXCP_VSXU);
683 return;
685 } else {
686 if (unlikely(!ctx->altivec_enabled)) {
687 gen_exception(ctx, POWERPC_EXCP_VPU);
688 return;
692 t0 = tcg_temp_new_i64();
693 if (!rA(ctx->opcode)) {
694 tcg_gen_movi_i64(t0, 0);
695 } else {
696 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
698 set_cpu_vsrh(xT(ctx->opcode), t0);
700 tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
701 set_cpu_vsrl(xT(ctx->opcode), t0);
702 tcg_temp_free_i64(t0);
705 static void gen_mtvsrws(DisasContext *ctx)
707 TCGv_i64 t0;
708 if (xT(ctx->opcode) < 32) {
709 if (unlikely(!ctx->vsx_enabled)) {
710 gen_exception(ctx, POWERPC_EXCP_VSXU);
711 return;
713 } else {
714 if (unlikely(!ctx->altivec_enabled)) {
715 gen_exception(ctx, POWERPC_EXCP_VPU);
716 return;
720 t0 = tcg_temp_new_i64();
721 tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
722 cpu_gpr[rA(ctx->opcode)], 32, 32);
723 set_cpu_vsrl(xT(ctx->opcode), t0);
724 set_cpu_vsrh(xT(ctx->opcode), t0);
725 tcg_temp_free_i64(t0);
728 #endif
730 static void gen_xxpermdi(DisasContext *ctx)
732 TCGv_i64 xh, xl;
734 if (unlikely(!ctx->vsx_enabled)) {
735 gen_exception(ctx, POWERPC_EXCP_VSXU);
736 return;
739 xh = tcg_temp_new_i64();
740 xl = tcg_temp_new_i64();
742 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
743 (xT(ctx->opcode) == xB(ctx->opcode)))) {
744 if ((DM(ctx->opcode) & 2) == 0) {
745 get_cpu_vsrh(xh, xA(ctx->opcode));
746 } else {
747 get_cpu_vsrl(xh, xA(ctx->opcode));
749 if ((DM(ctx->opcode) & 1) == 0) {
750 get_cpu_vsrh(xl, xB(ctx->opcode));
751 } else {
752 get_cpu_vsrl(xl, xB(ctx->opcode));
755 set_cpu_vsrh(xT(ctx->opcode), xh);
756 set_cpu_vsrl(xT(ctx->opcode), xl);
757 } else {
758 if ((DM(ctx->opcode) & 2) == 0) {
759 get_cpu_vsrh(xh, xA(ctx->opcode));
760 set_cpu_vsrh(xT(ctx->opcode), xh);
761 } else {
762 get_cpu_vsrl(xh, xA(ctx->opcode));
763 set_cpu_vsrh(xT(ctx->opcode), xh);
765 if ((DM(ctx->opcode) & 1) == 0) {
766 get_cpu_vsrh(xl, xB(ctx->opcode));
767 set_cpu_vsrl(xT(ctx->opcode), xl);
768 } else {
769 get_cpu_vsrl(xl, xB(ctx->opcode));
770 set_cpu_vsrl(xT(ctx->opcode), xl);
773 tcg_temp_free_i64(xh);
774 tcg_temp_free_i64(xl);
777 #define OP_ABS 1
778 #define OP_NABS 2
779 #define OP_NEG 3
780 #define OP_CPSGN 4
781 #define SGN_MASK_DP 0x8000000000000000ull
782 #define SGN_MASK_SP 0x8000000080000000ull
784 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
785 static void glue(gen_, name)(DisasContext * ctx) \
787 TCGv_i64 xb, sgm; \
788 if (unlikely(!ctx->vsx_enabled)) { \
789 gen_exception(ctx, POWERPC_EXCP_VSXU); \
790 return; \
792 xb = tcg_temp_new_i64(); \
793 sgm = tcg_temp_new_i64(); \
794 get_cpu_vsrh(xb, xB(ctx->opcode)); \
795 tcg_gen_movi_i64(sgm, sgn_mask); \
796 switch (op) { \
797 case OP_ABS: { \
798 tcg_gen_andc_i64(xb, xb, sgm); \
799 break; \
801 case OP_NABS: { \
802 tcg_gen_or_i64(xb, xb, sgm); \
803 break; \
805 case OP_NEG: { \
806 tcg_gen_xor_i64(xb, xb, sgm); \
807 break; \
809 case OP_CPSGN: { \
810 TCGv_i64 xa = tcg_temp_new_i64(); \
811 get_cpu_vsrh(xa, xA(ctx->opcode)); \
812 tcg_gen_and_i64(xa, xa, sgm); \
813 tcg_gen_andc_i64(xb, xb, sgm); \
814 tcg_gen_or_i64(xb, xb, xa); \
815 tcg_temp_free_i64(xa); \
816 break; \
819 set_cpu_vsrh(xT(ctx->opcode), xb); \
820 tcg_temp_free_i64(xb); \
821 tcg_temp_free_i64(sgm); \
824 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
825 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
826 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
827 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
829 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
830 static void glue(gen_, name)(DisasContext *ctx) \
832 int xa; \
833 int xt = rD(ctx->opcode) + 32; \
834 int xb = rB(ctx->opcode) + 32; \
835 TCGv_i64 xah, xbh, xbl, sgm, tmp; \
837 if (unlikely(!ctx->vsx_enabled)) { \
838 gen_exception(ctx, POWERPC_EXCP_VSXU); \
839 return; \
841 xbh = tcg_temp_new_i64(); \
842 xbl = tcg_temp_new_i64(); \
843 sgm = tcg_temp_new_i64(); \
844 tmp = tcg_temp_new_i64(); \
845 get_cpu_vsrh(xbh, xb); \
846 get_cpu_vsrl(xbl, xb); \
847 tcg_gen_movi_i64(sgm, sgn_mask); \
848 switch (op) { \
849 case OP_ABS: \
850 tcg_gen_andc_i64(xbh, xbh, sgm); \
851 break; \
852 case OP_NABS: \
853 tcg_gen_or_i64(xbh, xbh, sgm); \
854 break; \
855 case OP_NEG: \
856 tcg_gen_xor_i64(xbh, xbh, sgm); \
857 break; \
858 case OP_CPSGN: \
859 xah = tcg_temp_new_i64(); \
860 xa = rA(ctx->opcode) + 32; \
861 get_cpu_vsrh(tmp, xa); \
862 tcg_gen_and_i64(xah, tmp, sgm); \
863 tcg_gen_andc_i64(xbh, xbh, sgm); \
864 tcg_gen_or_i64(xbh, xbh, xah); \
865 tcg_temp_free_i64(xah); \
866 break; \
868 set_cpu_vsrh(xt, xbh); \
869 set_cpu_vsrl(xt, xbl); \
870 tcg_temp_free_i64(xbl); \
871 tcg_temp_free_i64(xbh); \
872 tcg_temp_free_i64(sgm); \
873 tcg_temp_free_i64(tmp); \
876 VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
877 VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
878 VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
879 VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
881 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
882 static void glue(gen_, name)(DisasContext * ctx) \
884 TCGv_i64 xbh, xbl, sgm; \
885 if (unlikely(!ctx->vsx_enabled)) { \
886 gen_exception(ctx, POWERPC_EXCP_VSXU); \
887 return; \
889 xbh = tcg_temp_new_i64(); \
890 xbl = tcg_temp_new_i64(); \
891 sgm = tcg_temp_new_i64(); \
892 set_cpu_vsrh(xB(ctx->opcode), xbh); \
893 set_cpu_vsrl(xB(ctx->opcode), xbl); \
894 tcg_gen_movi_i64(sgm, sgn_mask); \
895 switch (op) { \
896 case OP_ABS: { \
897 tcg_gen_andc_i64(xbh, xbh, sgm); \
898 tcg_gen_andc_i64(xbl, xbl, sgm); \
899 break; \
901 case OP_NABS: { \
902 tcg_gen_or_i64(xbh, xbh, sgm); \
903 tcg_gen_or_i64(xbl, xbl, sgm); \
904 break; \
906 case OP_NEG: { \
907 tcg_gen_xor_i64(xbh, xbh, sgm); \
908 tcg_gen_xor_i64(xbl, xbl, sgm); \
909 break; \
911 case OP_CPSGN: { \
912 TCGv_i64 xah = tcg_temp_new_i64(); \
913 TCGv_i64 xal = tcg_temp_new_i64(); \
914 get_cpu_vsrh(xah, xA(ctx->opcode)); \
915 get_cpu_vsrl(xal, xA(ctx->opcode)); \
916 tcg_gen_and_i64(xah, xah, sgm); \
917 tcg_gen_and_i64(xal, xal, sgm); \
918 tcg_gen_andc_i64(xbh, xbh, sgm); \
919 tcg_gen_andc_i64(xbl, xbl, sgm); \
920 tcg_gen_or_i64(xbh, xbh, xah); \
921 tcg_gen_or_i64(xbl, xbl, xal); \
922 tcg_temp_free_i64(xah); \
923 tcg_temp_free_i64(xal); \
924 break; \
927 set_cpu_vsrh(xT(ctx->opcode), xbh); \
928 set_cpu_vsrl(xT(ctx->opcode), xbl); \
929 tcg_temp_free_i64(xbh); \
930 tcg_temp_free_i64(xbl); \
931 tcg_temp_free_i64(sgm); \
934 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
935 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
936 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
937 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
938 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
939 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
940 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
941 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
943 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
944 static void gen_##name(DisasContext * ctx) \
946 TCGv_i32 opc; \
947 if (unlikely(!ctx->vsx_enabled)) { \
948 gen_exception(ctx, POWERPC_EXCP_VSXU); \
949 return; \
951 opc = tcg_const_i32(ctx->opcode); \
952 gen_helper_##name(cpu_env, opc); \
953 tcg_temp_free_i32(opc); \
956 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
957 static void gen_##name(DisasContext * ctx) \
959 TCGv_i64 t0; \
960 TCGv_i64 t1; \
961 if (unlikely(!ctx->vsx_enabled)) { \
962 gen_exception(ctx, POWERPC_EXCP_VSXU); \
963 return; \
965 t0 = tcg_temp_new_i64(); \
966 t1 = tcg_temp_new_i64(); \
967 get_cpu_vsrh(t0, xB(ctx->opcode)); \
968 gen_helper_##name(t1, cpu_env, t0); \
969 set_cpu_vsrh(xT(ctx->opcode), t1); \
970 tcg_temp_free_i64(t0); \
971 tcg_temp_free_i64(t1); \
974 GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
975 GEN_VSX_HELPER_2(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
976 GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
977 GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
978 GEN_VSX_HELPER_2(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
979 GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
980 GEN_VSX_HELPER_2(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
981 GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
982 GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
983 GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
984 GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
985 GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
986 GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
987 GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
988 GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
989 GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
990 GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
991 GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
992 GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
993 GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
994 GEN_VSX_HELPER_2(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
995 GEN_VSX_HELPER_2(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
996 GEN_VSX_HELPER_2(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
997 GEN_VSX_HELPER_2(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
998 GEN_VSX_HELPER_2(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
999 GEN_VSX_HELPER_2(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1000 GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1001 GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1002 GEN_VSX_HELPER_2(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1003 GEN_VSX_HELPER_2(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1004 GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1005 GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1006 GEN_VSX_HELPER_2(xsmaxcdp, 0x00, 0x10, 0, PPC2_ISA300)
1007 GEN_VSX_HELPER_2(xsmincdp, 0x00, 0x11, 0, PPC2_ISA300)
1008 GEN_VSX_HELPER_2(xsmaxjdp, 0x00, 0x12, 0, PPC2_ISA300)
1009 GEN_VSX_HELPER_2(xsminjdp, 0x00, 0x12, 0, PPC2_ISA300)
1010 GEN_VSX_HELPER_2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1011 GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1012 GEN_VSX_HELPER_2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1013 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1014 GEN_VSX_HELPER_2(xscvqpdp, 0x04, 0x1A, 0x14, PPC2_ISA300)
1015 GEN_VSX_HELPER_2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1016 GEN_VSX_HELPER_2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1017 GEN_VSX_HELPER_2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1018 GEN_VSX_HELPER_2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1019 GEN_VSX_HELPER_2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1020 GEN_VSX_HELPER_2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1021 GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1022 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
1023 GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1024 GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1025 GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1026 GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1027 GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1028 GEN_VSX_HELPER_2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1029 GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1030 GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1031 GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1032 GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1033 GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1034 GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1035 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1037 GEN_VSX_HELPER_2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1038 GEN_VSX_HELPER_2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1039 GEN_VSX_HELPER_2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1040 GEN_VSX_HELPER_2(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1042 GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1043 GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1044 GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1045 GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1046 GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1047 GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1048 GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1049 GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
1050 GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
1051 GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
1052 GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
1053 GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
1054 GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
1055 GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
1056 GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
1057 GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1058 GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1059 GEN_VSX_HELPER_2(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1060 GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1061 GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1063 GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1064 GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1065 GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1066 GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1067 GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1068 GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1069 GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1070 GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1071 GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1072 GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
1073 GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
1074 GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
1075 GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
1076 GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
1077 GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
1078 GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
1079 GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
1080 GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1081 GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1082 GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
1083 GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
1084 GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
1085 GEN_VSX_HELPER_2(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
1086 GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1087 GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1088 GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1089 GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1090 GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1091 GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1092 GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1093 GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1094 GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1095 GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1096 GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1097 GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1098 GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1099 GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1101 GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1102 GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1103 GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1104 GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1105 GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1106 GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1107 GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1108 GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1109 GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1110 GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
1111 GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
1112 GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
1113 GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
1114 GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
1115 GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
1116 GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
1117 GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
1118 GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1119 GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1120 GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
1121 GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
1122 GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
1123 GEN_VSX_HELPER_2(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
1124 GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1125 GEN_VSX_HELPER_2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1126 GEN_VSX_HELPER_2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1127 GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1128 GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1129 GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1130 GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1131 GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1132 GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1133 GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1134 GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1135 GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1136 GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1137 GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1138 GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1139 GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1140 GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1141 GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1142 GEN_VSX_HELPER_2(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
1143 GEN_VSX_HELPER_2(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
1145 static void gen_xxbrd(DisasContext *ctx)
1147 TCGv_i64 xth;
1148 TCGv_i64 xtl;
1149 TCGv_i64 xbh;
1150 TCGv_i64 xbl;
1152 if (unlikely(!ctx->vsx_enabled)) {
1153 gen_exception(ctx, POWERPC_EXCP_VSXU);
1154 return;
1156 xth = tcg_temp_new_i64();
1157 xtl = tcg_temp_new_i64();
1158 xbh = tcg_temp_new_i64();
1159 xbl = tcg_temp_new_i64();
1160 get_cpu_vsrh(xbh, xB(ctx->opcode));
1161 get_cpu_vsrl(xbl, xB(ctx->opcode));
1163 tcg_gen_bswap64_i64(xth, xbh);
1164 tcg_gen_bswap64_i64(xtl, xbl);
1165 set_cpu_vsrh(xT(ctx->opcode), xth);
1166 set_cpu_vsrl(xT(ctx->opcode), xtl);
1168 tcg_temp_free_i64(xth);
1169 tcg_temp_free_i64(xtl);
1170 tcg_temp_free_i64(xbh);
1171 tcg_temp_free_i64(xbl);
1174 static void gen_xxbrh(DisasContext *ctx)
1176 TCGv_i64 xth;
1177 TCGv_i64 xtl;
1178 TCGv_i64 xbh;
1179 TCGv_i64 xbl;
1181 if (unlikely(!ctx->vsx_enabled)) {
1182 gen_exception(ctx, POWERPC_EXCP_VSXU);
1183 return;
1185 xth = tcg_temp_new_i64();
1186 xtl = tcg_temp_new_i64();
1187 xbh = tcg_temp_new_i64();
1188 xbl = tcg_temp_new_i64();
1189 get_cpu_vsrh(xbh, xB(ctx->opcode));
1190 get_cpu_vsrl(xbl, xB(ctx->opcode));
1192 gen_bswap16x8(xth, xtl, xbh, xbl);
1193 set_cpu_vsrh(xT(ctx->opcode), xth);
1194 set_cpu_vsrl(xT(ctx->opcode), xtl);
1196 tcg_temp_free_i64(xth);
1197 tcg_temp_free_i64(xtl);
1198 tcg_temp_free_i64(xbh);
1199 tcg_temp_free_i64(xbl);
1202 static void gen_xxbrq(DisasContext *ctx)
1204 TCGv_i64 xth;
1205 TCGv_i64 xtl;
1206 TCGv_i64 xbh;
1207 TCGv_i64 xbl;
1208 TCGv_i64 t0;
1210 if (unlikely(!ctx->vsx_enabled)) {
1211 gen_exception(ctx, POWERPC_EXCP_VSXU);
1212 return;
1214 xth = tcg_temp_new_i64();
1215 xtl = tcg_temp_new_i64();
1216 xbh = tcg_temp_new_i64();
1217 xbl = tcg_temp_new_i64();
1218 get_cpu_vsrh(xbh, xB(ctx->opcode));
1219 get_cpu_vsrl(xbl, xB(ctx->opcode));
1220 t0 = tcg_temp_new_i64();
1222 tcg_gen_bswap64_i64(t0, xbl);
1223 tcg_gen_bswap64_i64(xtl, xbh);
1224 set_cpu_vsrl(xT(ctx->opcode), xtl);
1225 tcg_gen_mov_i64(xth, t0);
1226 set_cpu_vsrl(xT(ctx->opcode), xth);
1228 tcg_temp_free_i64(t0);
1229 tcg_temp_free_i64(xth);
1230 tcg_temp_free_i64(xtl);
1231 tcg_temp_free_i64(xbh);
1232 tcg_temp_free_i64(xbl);
1235 static void gen_xxbrw(DisasContext *ctx)
1237 TCGv_i64 xth;
1238 TCGv_i64 xtl;
1239 TCGv_i64 xbh;
1240 TCGv_i64 xbl;
1242 if (unlikely(!ctx->vsx_enabled)) {
1243 gen_exception(ctx, POWERPC_EXCP_VSXU);
1244 return;
1246 xth = tcg_temp_new_i64();
1247 xtl = tcg_temp_new_i64();
1248 xbh = tcg_temp_new_i64();
1249 xbl = tcg_temp_new_i64();
1250 get_cpu_vsrh(xbh, xB(ctx->opcode));
1251 get_cpu_vsrl(xbl, xB(ctx->opcode));
1253 gen_bswap32x4(xth, xtl, xbh, xbl);
1254 set_cpu_vsrl(xT(ctx->opcode), xth);
1255 set_cpu_vsrl(xT(ctx->opcode), xtl);
1257 tcg_temp_free_i64(xth);
1258 tcg_temp_free_i64(xtl);
1259 tcg_temp_free_i64(xbh);
1260 tcg_temp_free_i64(xbl);
1263 #define VSX_LOGICAL(name, vece, tcg_op) \
1264 static void glue(gen_, name)(DisasContext * ctx) \
1266 if (unlikely(!ctx->vsx_enabled)) { \
1267 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1268 return; \
1270 tcg_op(vece, vsr_full_offset(xT(ctx->opcode)), \
1271 vsr_full_offset(xA(ctx->opcode)), \
1272 vsr_full_offset(xB(ctx->opcode)), 16, 16); \
1275 VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1276 VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1277 VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1278 VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1279 VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1280 VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1281 VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1282 VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1284 #define VSX_XXMRG(name, high) \
1285 static void glue(gen_, name)(DisasContext * ctx) \
1287 TCGv_i64 a0, a1, b0, b1, tmp; \
1288 if (unlikely(!ctx->vsx_enabled)) { \
1289 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1290 return; \
1292 a0 = tcg_temp_new_i64(); \
1293 a1 = tcg_temp_new_i64(); \
1294 b0 = tcg_temp_new_i64(); \
1295 b1 = tcg_temp_new_i64(); \
1296 tmp = tcg_temp_new_i64(); \
1297 if (high) { \
1298 get_cpu_vsrh(a0, xA(ctx->opcode)); \
1299 get_cpu_vsrh(a1, xA(ctx->opcode)); \
1300 get_cpu_vsrh(b0, xB(ctx->opcode)); \
1301 get_cpu_vsrh(b1, xB(ctx->opcode)); \
1302 } else { \
1303 get_cpu_vsrl(a0, xA(ctx->opcode)); \
1304 get_cpu_vsrl(a1, xA(ctx->opcode)); \
1305 get_cpu_vsrl(b0, xB(ctx->opcode)); \
1306 get_cpu_vsrl(b1, xB(ctx->opcode)); \
1308 tcg_gen_shri_i64(a0, a0, 32); \
1309 tcg_gen_shri_i64(b0, b0, 32); \
1310 tcg_gen_deposit_i64(tmp, b0, a0, 32, 32); \
1311 set_cpu_vsrh(xT(ctx->opcode), tmp); \
1312 tcg_gen_deposit_i64(tmp, b1, a1, 32, 32); \
1313 set_cpu_vsrl(xT(ctx->opcode), tmp); \
1314 tcg_temp_free_i64(a0); \
1315 tcg_temp_free_i64(a1); \
1316 tcg_temp_free_i64(b0); \
1317 tcg_temp_free_i64(b1); \
1318 tcg_temp_free_i64(tmp); \
1321 VSX_XXMRG(xxmrghw, 1)
1322 VSX_XXMRG(xxmrglw, 0)
1324 static void gen_xxsel(DisasContext * ctx)
1326 TCGv_i64 a, b, c, tmp;
1327 if (unlikely(!ctx->vsx_enabled)) {
1328 gen_exception(ctx, POWERPC_EXCP_VSXU);
1329 return;
1331 a = tcg_temp_new_i64();
1332 b = tcg_temp_new_i64();
1333 c = tcg_temp_new_i64();
1334 tmp = tcg_temp_new_i64();
1336 get_cpu_vsrh(a, xA(ctx->opcode));
1337 get_cpu_vsrh(b, xB(ctx->opcode));
1338 get_cpu_vsrh(c, xC(ctx->opcode));
1340 tcg_gen_and_i64(b, b, c);
1341 tcg_gen_andc_i64(a, a, c);
1342 tcg_gen_or_i64(tmp, a, b);
1343 set_cpu_vsrh(xT(ctx->opcode), tmp);
1345 get_cpu_vsrl(a, xA(ctx->opcode));
1346 get_cpu_vsrl(b, xB(ctx->opcode));
1347 get_cpu_vsrl(c, xC(ctx->opcode));
1349 tcg_gen_and_i64(b, b, c);
1350 tcg_gen_andc_i64(a, a, c);
1351 tcg_gen_or_i64(tmp, a, b);
1352 set_cpu_vsrl(xT(ctx->opcode), tmp);
1354 tcg_temp_free_i64(a);
1355 tcg_temp_free_i64(b);
1356 tcg_temp_free_i64(c);
1357 tcg_temp_free_i64(tmp);
1360 static void gen_xxspltw(DisasContext *ctx)
1362 TCGv_i64 b, b2;
1363 TCGv_i64 vsr;
1365 if (unlikely(!ctx->vsx_enabled)) {
1366 gen_exception(ctx, POWERPC_EXCP_VSXU);
1367 return;
1370 vsr = tcg_temp_new_i64();
1371 if (UIM(ctx->opcode) & 2) {
1372 get_cpu_vsrl(vsr, xB(ctx->opcode));
1373 } else {
1374 get_cpu_vsrh(vsr, xB(ctx->opcode));
1377 b = tcg_temp_new_i64();
1378 b2 = tcg_temp_new_i64();
1380 if (UIM(ctx->opcode) & 1) {
1381 tcg_gen_ext32u_i64(b, vsr);
1382 } else {
1383 tcg_gen_shri_i64(b, vsr, 32);
1386 tcg_gen_shli_i64(b2, b, 32);
1387 tcg_gen_or_i64(vsr, b, b2);
1388 set_cpu_vsrh(xT(ctx->opcode), vsr);
1389 set_cpu_vsrl(xT(ctx->opcode), vsr);
1391 tcg_temp_free_i64(vsr);
1392 tcg_temp_free_i64(b);
1393 tcg_temp_free_i64(b2);
1396 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1398 static void gen_xxspltib(DisasContext *ctx)
1400 unsigned char uim8 = IMM8(ctx->opcode);
1401 TCGv_i64 vsr;
1402 if (xS(ctx->opcode) < 32) {
1403 if (unlikely(!ctx->altivec_enabled)) {
1404 gen_exception(ctx, POWERPC_EXCP_VPU);
1405 return;
1407 } else {
1408 if (unlikely(!ctx->vsx_enabled)) {
1409 gen_exception(ctx, POWERPC_EXCP_VSXU);
1410 return;
1413 vsr = tcg_temp_new_i64();
1414 tcg_gen_movi_i64(vsr, pattern(uim8));
1415 set_cpu_vsrh(xT(ctx->opcode), vsr);
1416 set_cpu_vsrl(xT(ctx->opcode), vsr);
1417 tcg_temp_free_i64(vsr);
1420 static void gen_xxsldwi(DisasContext *ctx)
1422 TCGv_i64 xth, xtl;
1423 if (unlikely(!ctx->vsx_enabled)) {
1424 gen_exception(ctx, POWERPC_EXCP_VSXU);
1425 return;
1427 xth = tcg_temp_new_i64();
1428 xtl = tcg_temp_new_i64();
1430 switch (SHW(ctx->opcode)) {
1431 case 0: {
1432 get_cpu_vsrh(xth, xA(ctx->opcode));
1433 get_cpu_vsrl(xtl, xA(ctx->opcode));
1434 break;
1436 case 1: {
1437 TCGv_i64 t0 = tcg_temp_new_i64();
1438 get_cpu_vsrh(xth, xA(ctx->opcode));
1439 tcg_gen_shli_i64(xth, xth, 32);
1440 get_cpu_vsrl(t0, xA(ctx->opcode));
1441 tcg_gen_shri_i64(t0, t0, 32);
1442 tcg_gen_or_i64(xth, xth, t0);
1443 get_cpu_vsrl(xtl, xA(ctx->opcode));
1444 tcg_gen_shli_i64(xtl, xtl, 32);
1445 get_cpu_vsrh(t0, xB(ctx->opcode));
1446 tcg_gen_shri_i64(t0, t0, 32);
1447 tcg_gen_or_i64(xtl, xtl, t0);
1448 tcg_temp_free_i64(t0);
1449 break;
1451 case 2: {
1452 get_cpu_vsrl(xth, xA(ctx->opcode));
1453 get_cpu_vsrh(xtl, xB(ctx->opcode));
1454 break;
1456 case 3: {
1457 TCGv_i64 t0 = tcg_temp_new_i64();
1458 get_cpu_vsrl(xth, xA(ctx->opcode));
1459 tcg_gen_shli_i64(xth, xth, 32);
1460 get_cpu_vsrh(t0, xB(ctx->opcode));
1461 tcg_gen_shri_i64(t0, t0, 32);
1462 tcg_gen_or_i64(xth, xth, t0);
1463 get_cpu_vsrh(xtl, xB(ctx->opcode));
1464 tcg_gen_shli_i64(xtl, xtl, 32);
1465 get_cpu_vsrl(t0, xB(ctx->opcode));
1466 tcg_gen_shri_i64(t0, t0, 32);
1467 tcg_gen_or_i64(xtl, xtl, t0);
1468 tcg_temp_free_i64(t0);
1469 break;
1473 set_cpu_vsrh(xT(ctx->opcode), xth);
1474 set_cpu_vsrl(xT(ctx->opcode), xtl);
1476 tcg_temp_free_i64(xth);
1477 tcg_temp_free_i64(xtl);
1480 #define VSX_EXTRACT_INSERT(name) \
1481 static void gen_##name(DisasContext *ctx) \
1483 TCGv xt, xb; \
1484 TCGv_i32 t0; \
1485 TCGv_i64 t1; \
1486 uint8_t uimm = UIMM4(ctx->opcode); \
1488 if (unlikely(!ctx->vsx_enabled)) { \
1489 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1490 return; \
1492 xt = tcg_const_tl(xT(ctx->opcode)); \
1493 xb = tcg_const_tl(xB(ctx->opcode)); \
1494 t0 = tcg_temp_new_i32(); \
1495 t1 = tcg_temp_new_i64(); \
1496 /* uimm > 15 out of bound and for \
1497 * uimm > 12 handle as per hardware in helper \
1498 */ \
1499 if (uimm > 15) { \
1500 tcg_gen_movi_i64(t1, 0); \
1501 set_cpu_vsrh(xT(ctx->opcode), t1); \
1502 set_cpu_vsrl(xT(ctx->opcode), t1); \
1503 return; \
1505 tcg_gen_movi_i32(t0, uimm); \
1506 gen_helper_##name(cpu_env, xt, xb, t0); \
1507 tcg_temp_free(xb); \
1508 tcg_temp_free(xt); \
1509 tcg_temp_free_i32(t0); \
1510 tcg_temp_free_i64(t1); \
1513 VSX_EXTRACT_INSERT(xxextractuw)
1514 VSX_EXTRACT_INSERT(xxinsertw)
1516 #ifdef TARGET_PPC64
1517 static void gen_xsxexpdp(DisasContext *ctx)
1519 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1520 TCGv_i64 t0;
1521 if (unlikely(!ctx->vsx_enabled)) {
1522 gen_exception(ctx, POWERPC_EXCP_VSXU);
1523 return;
1525 t0 = tcg_temp_new_i64();
1526 get_cpu_vsrh(t0, xB(ctx->opcode));
1527 tcg_gen_extract_i64(rt, t0, 52, 11);
1528 tcg_temp_free_i64(t0);
1531 static void gen_xsxexpqp(DisasContext *ctx)
1533 TCGv_i64 xth;
1534 TCGv_i64 xtl;
1535 TCGv_i64 xbh;
1537 if (unlikely(!ctx->vsx_enabled)) {
1538 gen_exception(ctx, POWERPC_EXCP_VSXU);
1539 return;
1541 xth = tcg_temp_new_i64();
1542 xtl = tcg_temp_new_i64();
1543 xbh = tcg_temp_new_i64();
1544 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1546 tcg_gen_extract_i64(xth, xbh, 48, 15);
1547 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1548 tcg_gen_movi_i64(xtl, 0);
1549 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1551 tcg_temp_free_i64(xbh);
1552 tcg_temp_free_i64(xth);
1553 tcg_temp_free_i64(xtl);
1556 static void gen_xsiexpdp(DisasContext *ctx)
1558 TCGv_i64 xth;
1559 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1560 TCGv rb = cpu_gpr[rB(ctx->opcode)];
1561 TCGv_i64 t0;
1563 if (unlikely(!ctx->vsx_enabled)) {
1564 gen_exception(ctx, POWERPC_EXCP_VSXU);
1565 return;
1567 t0 = tcg_temp_new_i64();
1568 xth = tcg_temp_new_i64();
1569 tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1570 tcg_gen_andi_i64(t0, rb, 0x7FF);
1571 tcg_gen_shli_i64(t0, t0, 52);
1572 tcg_gen_or_i64(xth, xth, t0);
1573 set_cpu_vsrh(xT(ctx->opcode), xth);
1574 /* dword[1] is undefined */
1575 tcg_temp_free_i64(t0);
1576 tcg_temp_free_i64(xth);
1579 static void gen_xsiexpqp(DisasContext *ctx)
1581 TCGv_i64 xth;
1582 TCGv_i64 xtl;
1583 TCGv_i64 xah;
1584 TCGv_i64 xal;
1585 TCGv_i64 xbh;
1586 TCGv_i64 t0;
1588 if (unlikely(!ctx->vsx_enabled)) {
1589 gen_exception(ctx, POWERPC_EXCP_VSXU);
1590 return;
1592 xth = tcg_temp_new_i64();
1593 xtl = tcg_temp_new_i64();
1594 xah = tcg_temp_new_i64();
1595 xal = tcg_temp_new_i64();
1596 get_cpu_vsrh(xah, rA(ctx->opcode) + 32);
1597 get_cpu_vsrl(xal, rA(ctx->opcode) + 32);
1598 xbh = tcg_temp_new_i64();
1599 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1600 t0 = tcg_temp_new_i64();
1602 tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1603 tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1604 tcg_gen_shli_i64(t0, t0, 48);
1605 tcg_gen_or_i64(xth, xth, t0);
1606 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1607 tcg_gen_mov_i64(xtl, xal);
1608 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1610 tcg_temp_free_i64(t0);
1611 tcg_temp_free_i64(xth);
1612 tcg_temp_free_i64(xtl);
1613 tcg_temp_free_i64(xah);
1614 tcg_temp_free_i64(xal);
1615 tcg_temp_free_i64(xbh);
1618 static void gen_xsxsigdp(DisasContext *ctx)
1620 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1621 TCGv_i64 t0, t1, zr, nan, exp;
1623 if (unlikely(!ctx->vsx_enabled)) {
1624 gen_exception(ctx, POWERPC_EXCP_VSXU);
1625 return;
1627 exp = tcg_temp_new_i64();
1628 t0 = tcg_temp_new_i64();
1629 t1 = tcg_temp_new_i64();
1630 zr = tcg_const_i64(0);
1631 nan = tcg_const_i64(2047);
1633 get_cpu_vsrh(t1, xB(ctx->opcode));
1634 tcg_gen_extract_i64(exp, t1, 52, 11);
1635 tcg_gen_movi_i64(t0, 0x0010000000000000);
1636 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1637 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1638 get_cpu_vsrh(t1, xB(ctx->opcode));
1639 tcg_gen_andi_i64(rt, t1, 0x000FFFFFFFFFFFFF);
1640 tcg_gen_or_i64(rt, rt, t0);
1642 tcg_temp_free_i64(t0);
1643 tcg_temp_free_i64(t1);
1644 tcg_temp_free_i64(exp);
1645 tcg_temp_free_i64(zr);
1646 tcg_temp_free_i64(nan);
1649 static void gen_xsxsigqp(DisasContext *ctx)
1651 TCGv_i64 t0, zr, nan, exp;
1652 TCGv_i64 xth;
1653 TCGv_i64 xtl;
1654 TCGv_i64 xbh;
1655 TCGv_i64 xbl;
1657 if (unlikely(!ctx->vsx_enabled)) {
1658 gen_exception(ctx, POWERPC_EXCP_VSXU);
1659 return;
1661 xth = tcg_temp_new_i64();
1662 xtl = tcg_temp_new_i64();
1663 xbh = tcg_temp_new_i64();
1664 xbl = tcg_temp_new_i64();
1665 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1666 get_cpu_vsrl(xbl, rB(ctx->opcode) + 32);
1667 exp = tcg_temp_new_i64();
1668 t0 = tcg_temp_new_i64();
1669 zr = tcg_const_i64(0);
1670 nan = tcg_const_i64(32767);
1672 tcg_gen_extract_i64(exp, xbh, 48, 15);
1673 tcg_gen_movi_i64(t0, 0x0001000000000000);
1674 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1675 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1676 tcg_gen_andi_i64(xth, xbh, 0x0000FFFFFFFFFFFF);
1677 tcg_gen_or_i64(xth, xth, t0);
1678 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1679 tcg_gen_mov_i64(xtl, xbl);
1680 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1682 tcg_temp_free_i64(t0);
1683 tcg_temp_free_i64(exp);
1684 tcg_temp_free_i64(zr);
1685 tcg_temp_free_i64(nan);
1686 tcg_temp_free_i64(xth);
1687 tcg_temp_free_i64(xtl);
1688 tcg_temp_free_i64(xbh);
1689 tcg_temp_free_i64(xbl);
1691 #endif
1693 static void gen_xviexpsp(DisasContext *ctx)
1695 TCGv_i64 xth;
1696 TCGv_i64 xtl;
1697 TCGv_i64 xah;
1698 TCGv_i64 xal;
1699 TCGv_i64 xbh;
1700 TCGv_i64 xbl;
1701 TCGv_i64 t0;
1703 if (unlikely(!ctx->vsx_enabled)) {
1704 gen_exception(ctx, POWERPC_EXCP_VSXU);
1705 return;
1707 xth = tcg_temp_new_i64();
1708 xtl = tcg_temp_new_i64();
1709 xah = tcg_temp_new_i64();
1710 xal = tcg_temp_new_i64();
1711 xbh = tcg_temp_new_i64();
1712 xbl = tcg_temp_new_i64();
1713 get_cpu_vsrh(xah, xA(ctx->opcode));
1714 get_cpu_vsrl(xal, xA(ctx->opcode));
1715 get_cpu_vsrh(xbh, xB(ctx->opcode));
1716 get_cpu_vsrl(xbl, xB(ctx->opcode));
1717 t0 = tcg_temp_new_i64();
1719 tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1720 tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1721 tcg_gen_shli_i64(t0, t0, 23);
1722 tcg_gen_or_i64(xth, xth, t0);
1723 set_cpu_vsrh(xT(ctx->opcode), xth);
1724 tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1725 tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1726 tcg_gen_shli_i64(t0, t0, 23);
1727 tcg_gen_or_i64(xtl, xtl, t0);
1728 set_cpu_vsrl(xT(ctx->opcode), xtl);
1730 tcg_temp_free_i64(t0);
1731 tcg_temp_free_i64(xth);
1732 tcg_temp_free_i64(xtl);
1733 tcg_temp_free_i64(xah);
1734 tcg_temp_free_i64(xal);
1735 tcg_temp_free_i64(xbh);
1736 tcg_temp_free_i64(xbl);
1739 static void gen_xviexpdp(DisasContext *ctx)
1741 TCGv_i64 xth;
1742 TCGv_i64 xtl;
1743 TCGv_i64 xah;
1744 TCGv_i64 xal;
1745 TCGv_i64 xbh;
1746 TCGv_i64 xbl;
1747 TCGv_i64 t0;
1749 if (unlikely(!ctx->vsx_enabled)) {
1750 gen_exception(ctx, POWERPC_EXCP_VSXU);
1751 return;
1753 xth = tcg_temp_new_i64();
1754 xtl = tcg_temp_new_i64();
1755 xah = tcg_temp_new_i64();
1756 xal = tcg_temp_new_i64();
1757 xbh = tcg_temp_new_i64();
1758 xbl = tcg_temp_new_i64();
1759 get_cpu_vsrh(xah, xA(ctx->opcode));
1760 get_cpu_vsrl(xal, xA(ctx->opcode));
1761 get_cpu_vsrh(xbh, xB(ctx->opcode));
1762 get_cpu_vsrl(xbl, xB(ctx->opcode));
1763 t0 = tcg_temp_new_i64();
1765 tcg_gen_andi_i64(xth, xah, 0x800FFFFFFFFFFFFF);
1766 tcg_gen_andi_i64(t0, xbh, 0x7FF);
1767 tcg_gen_shli_i64(t0, t0, 52);
1768 tcg_gen_or_i64(xth, xth, t0);
1769 set_cpu_vsrh(xT(ctx->opcode), xth);
1770 tcg_gen_andi_i64(xtl, xal, 0x800FFFFFFFFFFFFF);
1771 tcg_gen_andi_i64(t0, xbl, 0x7FF);
1772 tcg_gen_shli_i64(t0, t0, 52);
1773 tcg_gen_or_i64(xtl, xtl, t0);
1774 set_cpu_vsrl(xT(ctx->opcode), xtl);
1776 tcg_temp_free_i64(t0);
1777 tcg_temp_free_i64(xth);
1778 tcg_temp_free_i64(xtl);
1779 tcg_temp_free_i64(xah);
1780 tcg_temp_free_i64(xal);
1781 tcg_temp_free_i64(xbh);
1782 tcg_temp_free_i64(xbl);
1785 static void gen_xvxexpsp(DisasContext *ctx)
1787 TCGv_i64 xth;
1788 TCGv_i64 xtl;
1789 TCGv_i64 xbh;
1790 TCGv_i64 xbl;
1792 if (unlikely(!ctx->vsx_enabled)) {
1793 gen_exception(ctx, POWERPC_EXCP_VSXU);
1794 return;
1796 xth = tcg_temp_new_i64();
1797 xtl = tcg_temp_new_i64();
1798 xbh = tcg_temp_new_i64();
1799 xbl = tcg_temp_new_i64();
1800 get_cpu_vsrh(xbh, xB(ctx->opcode));
1801 get_cpu_vsrl(xbl, xB(ctx->opcode));
1803 tcg_gen_shri_i64(xth, xbh, 23);
1804 tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
1805 set_cpu_vsrh(xT(ctx->opcode), xth);
1806 tcg_gen_shri_i64(xtl, xbl, 23);
1807 tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
1808 set_cpu_vsrl(xT(ctx->opcode), xtl);
1810 tcg_temp_free_i64(xth);
1811 tcg_temp_free_i64(xtl);
1812 tcg_temp_free_i64(xbh);
1813 tcg_temp_free_i64(xbl);
1816 static void gen_xvxexpdp(DisasContext *ctx)
1818 TCGv_i64 xth;
1819 TCGv_i64 xtl;
1820 TCGv_i64 xbh;
1821 TCGv_i64 xbl;
1823 if (unlikely(!ctx->vsx_enabled)) {
1824 gen_exception(ctx, POWERPC_EXCP_VSXU);
1825 return;
1827 xth = tcg_temp_new_i64();
1828 xtl = tcg_temp_new_i64();
1829 xbh = tcg_temp_new_i64();
1830 xbl = tcg_temp_new_i64();
1831 get_cpu_vsrh(xbh, xB(ctx->opcode));
1832 get_cpu_vsrl(xbl, xB(ctx->opcode));
1834 tcg_gen_extract_i64(xth, xbh, 52, 11);
1835 set_cpu_vsrh(xT(ctx->opcode), xth);
1836 tcg_gen_extract_i64(xtl, xbl, 52, 11);
1837 set_cpu_vsrl(xT(ctx->opcode), xtl);
1839 tcg_temp_free_i64(xth);
1840 tcg_temp_free_i64(xtl);
1841 tcg_temp_free_i64(xbh);
1842 tcg_temp_free_i64(xbl);
1845 GEN_VSX_HELPER_2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
1847 static void gen_xvxsigdp(DisasContext *ctx)
1849 TCGv_i64 xth;
1850 TCGv_i64 xtl;
1851 TCGv_i64 xbh;
1852 TCGv_i64 xbl;
1853 TCGv_i64 t0, zr, nan, exp;
1855 if (unlikely(!ctx->vsx_enabled)) {
1856 gen_exception(ctx, POWERPC_EXCP_VSXU);
1857 return;
1859 xth = tcg_temp_new_i64();
1860 xtl = tcg_temp_new_i64();
1861 xbh = tcg_temp_new_i64();
1862 xbl = tcg_temp_new_i64();
1863 get_cpu_vsrh(xbh, xB(ctx->opcode));
1864 get_cpu_vsrl(xbl, xB(ctx->opcode));
1865 exp = tcg_temp_new_i64();
1866 t0 = tcg_temp_new_i64();
1867 zr = tcg_const_i64(0);
1868 nan = tcg_const_i64(2047);
1870 tcg_gen_extract_i64(exp, xbh, 52, 11);
1871 tcg_gen_movi_i64(t0, 0x0010000000000000);
1872 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1873 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1874 tcg_gen_andi_i64(xth, xbh, 0x000FFFFFFFFFFFFF);
1875 tcg_gen_or_i64(xth, xth, t0);
1876 set_cpu_vsrh(xT(ctx->opcode), xth);
1878 tcg_gen_extract_i64(exp, xbl, 52, 11);
1879 tcg_gen_movi_i64(t0, 0x0010000000000000);
1880 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1881 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1882 tcg_gen_andi_i64(xtl, xbl, 0x000FFFFFFFFFFFFF);
1883 tcg_gen_or_i64(xtl, xtl, t0);
1884 set_cpu_vsrl(xT(ctx->opcode), xtl);
1886 tcg_temp_free_i64(t0);
1887 tcg_temp_free_i64(exp);
1888 tcg_temp_free_i64(zr);
1889 tcg_temp_free_i64(nan);
1890 tcg_temp_free_i64(xth);
1891 tcg_temp_free_i64(xtl);
1892 tcg_temp_free_i64(xbh);
1893 tcg_temp_free_i64(xbl);
1896 #undef GEN_XX2FORM
1897 #undef GEN_XX3FORM
1898 #undef GEN_XX2IFORM
1899 #undef GEN_XX3_RC_FORM
1900 #undef GEN_XX3FORM_DM
1901 #undef VSX_LOGICAL