target/ppc: Introduce TRANS*FLAGS macros
[qemu/rayw.git] / target / ppc / translate / vsx-impl.c.inc
blobe8a4ba0cfa60d4cd8f089833e36db807148a3ac6
1 /***                           VSX extension                               ***/
3 static inline void get_cpu_vsr(TCGv_i64 dst, int n, bool high)
5     tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, high));
8 static inline void set_cpu_vsr(int n, TCGv_i64 src, bool high)
10     tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, high));
13 static inline TCGv_ptr gen_vsr_ptr(int reg)
15     TCGv_ptr r = tcg_temp_new_ptr();
16     tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg));
17     return r;
20 #define VSX_LOAD_SCALAR(name, operation)                      \
21 static void gen_##name(DisasContext *ctx)                     \
22 {                                                             \
23     TCGv EA;                                                  \
24     TCGv_i64 t0;                                              \
25     if (unlikely(!ctx->vsx_enabled)) {                        \
26         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
27         return;                                               \
28     }                                                         \
29     t0 = tcg_temp_new_i64();                                  \
30     gen_set_access_type(ctx, ACCESS_INT);                     \
31     EA = tcg_temp_new();                                      \
32     gen_addr_reg_index(ctx, EA);                              \
33     gen_qemu_##operation(ctx, t0, EA);                        \
34     set_cpu_vsr(xT(ctx->opcode), t0, true);                   \
35     /* NOTE: cpu_vsrl is undefined */                         \
36     tcg_temp_free(EA);                                        \
37     tcg_temp_free_i64(t0);                                    \
40 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
41 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
42 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
43 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
44 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
45 VSX_LOAD_SCALAR(lxsspx, ld32fs)
47 static void gen_lxvd2x(DisasContext *ctx)
49     TCGv EA;
50     TCGv_i64 t0;
51     if (unlikely(!ctx->vsx_enabled)) {
52         gen_exception(ctx, POWERPC_EXCP_VSXU);
53         return;
54     }
55     t0 = tcg_temp_new_i64();
56     gen_set_access_type(ctx, ACCESS_INT);
57     EA = tcg_temp_new();
58     gen_addr_reg_index(ctx, EA);
59     gen_qemu_ld64_i64(ctx, t0, EA);
60     set_cpu_vsr(xT(ctx->opcode), t0, true);
61     tcg_gen_addi_tl(EA, EA, 8);
62     gen_qemu_ld64_i64(ctx, t0, EA);
63     set_cpu_vsr(xT(ctx->opcode), t0, false);
64     tcg_temp_free(EA);
65     tcg_temp_free_i64(t0);
68 static void gen_lxvw4x(DisasContext *ctx)
70     TCGv EA;
71     TCGv_i64 xth;
72     TCGv_i64 xtl;
73     if (unlikely(!ctx->vsx_enabled)) {
74         gen_exception(ctx, POWERPC_EXCP_VSXU);
75         return;
76     }
77     xth = tcg_temp_new_i64();
78     xtl = tcg_temp_new_i64();
80     gen_set_access_type(ctx, ACCESS_INT);
81     EA = tcg_temp_new();
83     gen_addr_reg_index(ctx, EA);
84     if (ctx->le_mode) {
85         TCGv_i64 t0 = tcg_temp_new_i64();
86         TCGv_i64 t1 = tcg_temp_new_i64();
88         tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
89         tcg_gen_shri_i64(t1, t0, 32);
90         tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
91         tcg_gen_addi_tl(EA, EA, 8);
92         tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
93         tcg_gen_shri_i64(t1, t0, 32);
94         tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
95         tcg_temp_free_i64(t0);
96         tcg_temp_free_i64(t1);
97     } else {
98         tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
99         tcg_gen_addi_tl(EA, EA, 8);
100         tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
101     }
102     set_cpu_vsr(xT(ctx->opcode), xth, true);
103     set_cpu_vsr(xT(ctx->opcode), xtl, false);
104     tcg_temp_free(EA);
105     tcg_temp_free_i64(xth);
106     tcg_temp_free_i64(xtl);
109 static void gen_lxvwsx(DisasContext *ctx)
111     TCGv EA;
112     TCGv_i32 data;
114     if (xT(ctx->opcode) < 32) {
115         if (unlikely(!ctx->vsx_enabled)) {
116             gen_exception(ctx, POWERPC_EXCP_VSXU);
117             return;
118         }
119     } else {
120         if (unlikely(!ctx->altivec_enabled)) {
121             gen_exception(ctx, POWERPC_EXCP_VPU);
122             return;
123         }
124     }
126     gen_set_access_type(ctx, ACCESS_INT);
127     EA = tcg_temp_new();
129     gen_addr_reg_index(ctx, EA);
131     data = tcg_temp_new_i32();
132     tcg_gen_qemu_ld_i32(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UL));
133     tcg_gen_gvec_dup_i32(MO_UL, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
135     tcg_temp_free(EA);
136     tcg_temp_free_i32(data);
139 static void gen_lxvdsx(DisasContext *ctx)
141     TCGv EA;
142     TCGv_i64 data;
144     if (unlikely(!ctx->vsx_enabled)) {
145         gen_exception(ctx, POWERPC_EXCP_VSXU);
146         return;
147     }
149     gen_set_access_type(ctx, ACCESS_INT);
150     EA = tcg_temp_new();
152     gen_addr_reg_index(ctx, EA);
154     data = tcg_temp_new_i64();
155     tcg_gen_qemu_ld_i64(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UQ));
156     tcg_gen_gvec_dup_i64(MO_UQ, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
158     tcg_temp_free(EA);
159     tcg_temp_free_i64(data);
162 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
163                           TCGv_i64 inh, TCGv_i64 inl)
165     TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
166     TCGv_i64 t0 = tcg_temp_new_i64();
167     TCGv_i64 t1 = tcg_temp_new_i64();
169     /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
170     tcg_gen_and_i64(t0, inh, mask);
171     tcg_gen_shli_i64(t0, t0, 8);
172     tcg_gen_shri_i64(t1, inh, 8);
173     tcg_gen_and_i64(t1, t1, mask);
174     tcg_gen_or_i64(outh, t0, t1);
176     /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
177     tcg_gen_and_i64(t0, inl, mask);
178     tcg_gen_shli_i64(t0, t0, 8);
179     tcg_gen_shri_i64(t1, inl, 8);
180     tcg_gen_and_i64(t1, t1, mask);
181     tcg_gen_or_i64(outl, t0, t1);
183     tcg_temp_free_i64(t0);
184     tcg_temp_free_i64(t1);
185     tcg_temp_free_i64(mask);
188 static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
189                           TCGv_i64 inh, TCGv_i64 inl)
191     TCGv_i64 hi = tcg_temp_new_i64();
192     TCGv_i64 lo = tcg_temp_new_i64();
194     tcg_gen_bswap64_i64(hi, inh);
195     tcg_gen_bswap64_i64(lo, inl);
196     tcg_gen_shri_i64(outh, hi, 32);
197     tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
198     tcg_gen_shri_i64(outl, lo, 32);
199     tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
201     tcg_temp_free_i64(hi);
202     tcg_temp_free_i64(lo);
204 static void gen_lxvh8x(DisasContext *ctx)
206     TCGv EA;
207     TCGv_i64 xth;
208     TCGv_i64 xtl;
210     if (unlikely(!ctx->vsx_enabled)) {
211         gen_exception(ctx, POWERPC_EXCP_VSXU);
212         return;
213     }
214     xth = tcg_temp_new_i64();
215     xtl = tcg_temp_new_i64();
216     gen_set_access_type(ctx, ACCESS_INT);
218     EA = tcg_temp_new();
219     gen_addr_reg_index(ctx, EA);
220     tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
221     tcg_gen_addi_tl(EA, EA, 8);
222     tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
223     if (ctx->le_mode) {
224         gen_bswap16x8(xth, xtl, xth, xtl);
225     }
226     set_cpu_vsr(xT(ctx->opcode), xth, true);
227     set_cpu_vsr(xT(ctx->opcode), xtl, false);
228     tcg_temp_free(EA);
229     tcg_temp_free_i64(xth);
230     tcg_temp_free_i64(xtl);
233 static void gen_lxvb16x(DisasContext *ctx)
235     TCGv EA;
236     TCGv_i64 xth;
237     TCGv_i64 xtl;
239     if (unlikely(!ctx->vsx_enabled)) {
240         gen_exception(ctx, POWERPC_EXCP_VSXU);
241         return;
242     }
243     xth = tcg_temp_new_i64();
244     xtl = tcg_temp_new_i64();
245     gen_set_access_type(ctx, ACCESS_INT);
246     EA = tcg_temp_new();
247     gen_addr_reg_index(ctx, EA);
248     tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
249     tcg_gen_addi_tl(EA, EA, 8);
250     tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
251     set_cpu_vsr(xT(ctx->opcode), xth, true);
252     set_cpu_vsr(xT(ctx->opcode), xtl, false);
253     tcg_temp_free(EA);
254     tcg_temp_free_i64(xth);
255     tcg_temp_free_i64(xtl);
258 #ifdef TARGET_PPC64
259 #define VSX_VECTOR_LOAD_STORE_LENGTH(name)                         \
260 static void gen_##name(DisasContext *ctx)                          \
261 {                                                                  \
262     TCGv EA;                                                       \
263     TCGv_ptr xt;                                                   \
264                                                                    \
265     if (xT(ctx->opcode) < 32) {                                    \
266         if (unlikely(!ctx->vsx_enabled)) {                         \
267             gen_exception(ctx, POWERPC_EXCP_VSXU);                 \
268             return;                                                \
269         }                                                          \
270     } else {                                                       \
271         if (unlikely(!ctx->altivec_enabled)) {                     \
272             gen_exception(ctx, POWERPC_EXCP_VPU);                  \
273             return;                                                \
274         }                                                          \
275     }                                                              \
276     EA = tcg_temp_new();                                           \
277     xt = gen_vsr_ptr(xT(ctx->opcode));                             \
278     gen_set_access_type(ctx, ACCESS_INT);                          \
279     gen_addr_register(ctx, EA);                                    \
280     gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]);  \
281     tcg_temp_free(EA);                                             \
282     tcg_temp_free_ptr(xt);                                         \
285 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
286 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
287 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
288 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
289 #endif
291 #define VSX_LOAD_SCALAR_DS(name, operation)                       \
292 static void gen_##name(DisasContext *ctx)                         \
293 {                                                                 \
294     TCGv EA;                                                      \
295     TCGv_i64 xth;                                                 \
296                                                                   \
297     if (unlikely(!ctx->altivec_enabled)) {                        \
298         gen_exception(ctx, POWERPC_EXCP_VPU);                     \
299         return;                                                   \
300     }                                                             \
301     xth = tcg_temp_new_i64();                                     \
302     gen_set_access_type(ctx, ACCESS_INT);                         \
303     EA = tcg_temp_new();                                          \
304     gen_addr_imm_index(ctx, EA, 0x03);                            \
305     gen_qemu_##operation(ctx, xth, EA);                           \
306     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);                 \
307     /* NOTE: cpu_vsrl is undefined */                             \
308     tcg_temp_free(EA);                                            \
309     tcg_temp_free_i64(xth);                                       \
312 VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
313 VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
315 #define VSX_STORE_SCALAR(name, operation)                     \
316 static void gen_##name(DisasContext *ctx)                     \
317 {                                                             \
318     TCGv EA;                                                  \
319     TCGv_i64 t0;                                              \
320     if (unlikely(!ctx->vsx_enabled)) {                        \
321         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
322         return;                                               \
323     }                                                         \
324     t0 = tcg_temp_new_i64();                                  \
325     gen_set_access_type(ctx, ACCESS_INT);                     \
326     EA = tcg_temp_new();                                      \
327     gen_addr_reg_index(ctx, EA);                              \
328     get_cpu_vsr(t0, xS(ctx->opcode), true);                   \
329     gen_qemu_##operation(ctx, t0, EA);                        \
330     tcg_temp_free(EA);                                        \
331     tcg_temp_free_i64(t0);                                    \
334 VSX_STORE_SCALAR(stxsdx, st64_i64)
336 VSX_STORE_SCALAR(stxsibx, st8_i64)
337 VSX_STORE_SCALAR(stxsihx, st16_i64)
338 VSX_STORE_SCALAR(stxsiwx, st32_i64)
339 VSX_STORE_SCALAR(stxsspx, st32fs)
341 static void gen_stxvd2x(DisasContext *ctx)
343     TCGv EA;
344     TCGv_i64 t0;
345     if (unlikely(!ctx->vsx_enabled)) {
346         gen_exception(ctx, POWERPC_EXCP_VSXU);
347         return;
348     }
349     t0 = tcg_temp_new_i64();
350     gen_set_access_type(ctx, ACCESS_INT);
351     EA = tcg_temp_new();
352     gen_addr_reg_index(ctx, EA);
353     get_cpu_vsr(t0, xS(ctx->opcode), true);
354     gen_qemu_st64_i64(ctx, t0, EA);
355     tcg_gen_addi_tl(EA, EA, 8);
356     get_cpu_vsr(t0, xS(ctx->opcode), false);
357     gen_qemu_st64_i64(ctx, t0, EA);
358     tcg_temp_free(EA);
359     tcg_temp_free_i64(t0);
362 static void gen_stxvw4x(DisasContext *ctx)
364     TCGv EA;
365     TCGv_i64 xsh;
366     TCGv_i64 xsl;
368     if (unlikely(!ctx->vsx_enabled)) {
369         gen_exception(ctx, POWERPC_EXCP_VSXU);
370         return;
371     }
372     xsh = tcg_temp_new_i64();
373     xsl = tcg_temp_new_i64();
374     get_cpu_vsr(xsh, xS(ctx->opcode), true);
375     get_cpu_vsr(xsl, xS(ctx->opcode), false);
376     gen_set_access_type(ctx, ACCESS_INT);
377     EA = tcg_temp_new();
378     gen_addr_reg_index(ctx, EA);
379     if (ctx->le_mode) {
380         TCGv_i64 t0 = tcg_temp_new_i64();
381         TCGv_i64 t1 = tcg_temp_new_i64();
383         tcg_gen_shri_i64(t0, xsh, 32);
384         tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
385         tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
386         tcg_gen_addi_tl(EA, EA, 8);
387         tcg_gen_shri_i64(t0, xsl, 32);
388         tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
389         tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
390         tcg_temp_free_i64(t0);
391         tcg_temp_free_i64(t1);
392     } else {
393         tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
394         tcg_gen_addi_tl(EA, EA, 8);
395         tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
396     }
397     tcg_temp_free(EA);
398     tcg_temp_free_i64(xsh);
399     tcg_temp_free_i64(xsl);
402 static void gen_stxvh8x(DisasContext *ctx)
404     TCGv EA;
405     TCGv_i64 xsh;
406     TCGv_i64 xsl;
408     if (unlikely(!ctx->vsx_enabled)) {
409         gen_exception(ctx, POWERPC_EXCP_VSXU);
410         return;
411     }
412     xsh = tcg_temp_new_i64();
413     xsl = tcg_temp_new_i64();
414     get_cpu_vsr(xsh, xS(ctx->opcode), true);
415     get_cpu_vsr(xsl, xS(ctx->opcode), false);
416     gen_set_access_type(ctx, ACCESS_INT);
417     EA = tcg_temp_new();
418     gen_addr_reg_index(ctx, EA);
419     if (ctx->le_mode) {
420         TCGv_i64 outh = tcg_temp_new_i64();
421         TCGv_i64 outl = tcg_temp_new_i64();
423         gen_bswap16x8(outh, outl, xsh, xsl);
424         tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEUQ);
425         tcg_gen_addi_tl(EA, EA, 8);
426         tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEUQ);
427         tcg_temp_free_i64(outh);
428         tcg_temp_free_i64(outl);
429     } else {
430         tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
431         tcg_gen_addi_tl(EA, EA, 8);
432         tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
433     }
434     tcg_temp_free(EA);
435     tcg_temp_free_i64(xsh);
436     tcg_temp_free_i64(xsl);
439 static void gen_stxvb16x(DisasContext *ctx)
441     TCGv EA;
442     TCGv_i64 xsh;
443     TCGv_i64 xsl;
445     if (unlikely(!ctx->vsx_enabled)) {
446         gen_exception(ctx, POWERPC_EXCP_VSXU);
447         return;
448     }
449     xsh = tcg_temp_new_i64();
450     xsl = tcg_temp_new_i64();
451     get_cpu_vsr(xsh, xS(ctx->opcode), true);
452     get_cpu_vsr(xsl, xS(ctx->opcode), false);
453     gen_set_access_type(ctx, ACCESS_INT);
454     EA = tcg_temp_new();
455     gen_addr_reg_index(ctx, EA);
456     tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
457     tcg_gen_addi_tl(EA, EA, 8);
458     tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
459     tcg_temp_free(EA);
460     tcg_temp_free_i64(xsh);
461     tcg_temp_free_i64(xsl);
464 #define VSX_STORE_SCALAR_DS(name, operation)                      \
465 static void gen_##name(DisasContext *ctx)                         \
466 {                                                                 \
467     TCGv EA;                                                      \
468     TCGv_i64 xth;                                                 \
469                                                                   \
470     if (unlikely(!ctx->altivec_enabled)) {                        \
471         gen_exception(ctx, POWERPC_EXCP_VPU);                     \
472         return;                                                   \
473     }                                                             \
474     xth = tcg_temp_new_i64();                                     \
475     get_cpu_vsr(xth, rD(ctx->opcode) + 32, true);                 \
476     gen_set_access_type(ctx, ACCESS_INT);                         \
477     EA = tcg_temp_new();                                          \
478     gen_addr_imm_index(ctx, EA, 0x03);                            \
479     gen_qemu_##operation(ctx, xth, EA);                           \
480     /* NOTE: cpu_vsrl is undefined */                             \
481     tcg_temp_free(EA);                                            \
482     tcg_temp_free_i64(xth);                                       \
485 VSX_STORE_SCALAR_DS(stxsd, st64_i64)
486 VSX_STORE_SCALAR_DS(stxssp, st32fs)
488 static void gen_mfvsrwz(DisasContext *ctx)
490     if (xS(ctx->opcode) < 32) {
491         if (unlikely(!ctx->fpu_enabled)) {
492             gen_exception(ctx, POWERPC_EXCP_FPU);
493             return;
494         }
495     } else {
496         if (unlikely(!ctx->altivec_enabled)) {
497             gen_exception(ctx, POWERPC_EXCP_VPU);
498             return;
499         }
500     }
501     TCGv_i64 tmp = tcg_temp_new_i64();
502     TCGv_i64 xsh = tcg_temp_new_i64();
503     get_cpu_vsr(xsh, xS(ctx->opcode), true);
504     tcg_gen_ext32u_i64(tmp, xsh);
505     tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
506     tcg_temp_free_i64(tmp);
507     tcg_temp_free_i64(xsh);
510 static void gen_mtvsrwa(DisasContext *ctx)
512     if (xS(ctx->opcode) < 32) {
513         if (unlikely(!ctx->fpu_enabled)) {
514             gen_exception(ctx, POWERPC_EXCP_FPU);
515             return;
516         }
517     } else {
518         if (unlikely(!ctx->altivec_enabled)) {
519             gen_exception(ctx, POWERPC_EXCP_VPU);
520             return;
521         }
522     }
523     TCGv_i64 tmp = tcg_temp_new_i64();
524     TCGv_i64 xsh = tcg_temp_new_i64();
525     tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
526     tcg_gen_ext32s_i64(xsh, tmp);
527     set_cpu_vsr(xT(ctx->opcode), xsh, true);
528     tcg_temp_free_i64(tmp);
529     tcg_temp_free_i64(xsh);
532 static void gen_mtvsrwz(DisasContext *ctx)
534     if (xS(ctx->opcode) < 32) {
535         if (unlikely(!ctx->fpu_enabled)) {
536             gen_exception(ctx, POWERPC_EXCP_FPU);
537             return;
538         }
539     } else {
540         if (unlikely(!ctx->altivec_enabled)) {
541             gen_exception(ctx, POWERPC_EXCP_VPU);
542             return;
543         }
544     }
545     TCGv_i64 tmp = tcg_temp_new_i64();
546     TCGv_i64 xsh = tcg_temp_new_i64();
547     tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
548     tcg_gen_ext32u_i64(xsh, tmp);
549     set_cpu_vsr(xT(ctx->opcode), xsh, true);
550     tcg_temp_free_i64(tmp);
551     tcg_temp_free_i64(xsh);
554 #if defined(TARGET_PPC64)
555 static void gen_mfvsrd(DisasContext *ctx)
557     TCGv_i64 t0;
558     if (xS(ctx->opcode) < 32) {
559         if (unlikely(!ctx->fpu_enabled)) {
560             gen_exception(ctx, POWERPC_EXCP_FPU);
561             return;
562         }
563     } else {
564         if (unlikely(!ctx->altivec_enabled)) {
565             gen_exception(ctx, POWERPC_EXCP_VPU);
566             return;
567         }
568     }
569     t0 = tcg_temp_new_i64();
570     get_cpu_vsr(t0, xS(ctx->opcode), true);
571     tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
572     tcg_temp_free_i64(t0);
575 static void gen_mtvsrd(DisasContext *ctx)
577     TCGv_i64 t0;
578     if (xS(ctx->opcode) < 32) {
579         if (unlikely(!ctx->fpu_enabled)) {
580             gen_exception(ctx, POWERPC_EXCP_FPU);
581             return;
582         }
583     } else {
584         if (unlikely(!ctx->altivec_enabled)) {
585             gen_exception(ctx, POWERPC_EXCP_VPU);
586             return;
587         }
588     }
589     t0 = tcg_temp_new_i64();
590     tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
591     set_cpu_vsr(xT(ctx->opcode), t0, true);
592     tcg_temp_free_i64(t0);
595 static void gen_mfvsrld(DisasContext *ctx)
597     TCGv_i64 t0;
598     if (xS(ctx->opcode) < 32) {
599         if (unlikely(!ctx->vsx_enabled)) {
600             gen_exception(ctx, POWERPC_EXCP_VSXU);
601             return;
602         }
603     } else {
604         if (unlikely(!ctx->altivec_enabled)) {
605             gen_exception(ctx, POWERPC_EXCP_VPU);
606             return;
607         }
608     }
609     t0 = tcg_temp_new_i64();
610     get_cpu_vsr(t0, xS(ctx->opcode), false);
611     tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
612     tcg_temp_free_i64(t0);
615 static void gen_mtvsrdd(DisasContext *ctx)
617     TCGv_i64 t0;
618     if (xT(ctx->opcode) < 32) {
619         if (unlikely(!ctx->vsx_enabled)) {
620             gen_exception(ctx, POWERPC_EXCP_VSXU);
621             return;
622         }
623     } else {
624         if (unlikely(!ctx->altivec_enabled)) {
625             gen_exception(ctx, POWERPC_EXCP_VPU);
626             return;
627         }
628     }
630     t0 = tcg_temp_new_i64();
631     if (!rA(ctx->opcode)) {
632         tcg_gen_movi_i64(t0, 0);
633     } else {
634         tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
635     }
636     set_cpu_vsr(xT(ctx->opcode), t0, true);
638     tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
639     set_cpu_vsr(xT(ctx->opcode), t0, false);
640     tcg_temp_free_i64(t0);
643 static void gen_mtvsrws(DisasContext *ctx)
645     TCGv_i64 t0;
646     if (xT(ctx->opcode) < 32) {
647         if (unlikely(!ctx->vsx_enabled)) {
648             gen_exception(ctx, POWERPC_EXCP_VSXU);
649             return;
650         }
651     } else {
652         if (unlikely(!ctx->altivec_enabled)) {
653             gen_exception(ctx, POWERPC_EXCP_VPU);
654             return;
655         }
656     }
658     t0 = tcg_temp_new_i64();
659     tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
660                         cpu_gpr[rA(ctx->opcode)], 32, 32);
661     set_cpu_vsr(xT(ctx->opcode), t0, false);
662     set_cpu_vsr(xT(ctx->opcode), t0, true);
663     tcg_temp_free_i64(t0);
666 #endif
668 static void gen_xxpermdi(DisasContext *ctx)
670     TCGv_i64 xh, xl;
672     if (unlikely(!ctx->vsx_enabled)) {
673         gen_exception(ctx, POWERPC_EXCP_VSXU);
674         return;
675     }
677     xh = tcg_temp_new_i64();
678     xl = tcg_temp_new_i64();
680     if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
681                  (xT(ctx->opcode) == xB(ctx->opcode)))) {
682         get_cpu_vsr(xh, xA(ctx->opcode), (DM(ctx->opcode) & 2) == 0);
683         get_cpu_vsr(xl, xB(ctx->opcode), (DM(ctx->opcode) & 1) == 0);
685         set_cpu_vsr(xT(ctx->opcode), xh, true);
686         set_cpu_vsr(xT(ctx->opcode), xl, false);
687     } else {
688         if ((DM(ctx->opcode) & 2) == 0) {
689             get_cpu_vsr(xh, xA(ctx->opcode), true);
690             set_cpu_vsr(xT(ctx->opcode), xh, true);
691         } else {
692             get_cpu_vsr(xh, xA(ctx->opcode), false);
693             set_cpu_vsr(xT(ctx->opcode), xh, true);
694         }
695         if ((DM(ctx->opcode) & 1) == 0) {
696             get_cpu_vsr(xl, xB(ctx->opcode), true);
697             set_cpu_vsr(xT(ctx->opcode), xl, false);
698         } else {
699             get_cpu_vsr(xl, xB(ctx->opcode), false);
700             set_cpu_vsr(xT(ctx->opcode), xl, false);
701         }
702     }
703     tcg_temp_free_i64(xh);
704     tcg_temp_free_i64(xl);
707 #define OP_ABS 1
708 #define OP_NABS 2
709 #define OP_NEG 3
710 #define OP_CPSGN 4
711 #define SGN_MASK_DP  0x8000000000000000ull
712 #define SGN_MASK_SP 0x8000000080000000ull
714 #define VSX_SCALAR_MOVE(name, op, sgn_mask)                       \
715 static void glue(gen_, name)(DisasContext *ctx)                   \
716     {                                                             \
717         TCGv_i64 xb, sgm;                                         \
718         if (unlikely(!ctx->vsx_enabled)) {                        \
719             gen_exception(ctx, POWERPC_EXCP_VSXU);                \
720             return;                                               \
721         }                                                         \
722         xb = tcg_temp_new_i64();                                  \
723         sgm = tcg_temp_new_i64();                                 \
724         get_cpu_vsr(xb, xB(ctx->opcode), true);                   \
725         tcg_gen_movi_i64(sgm, sgn_mask);                          \
726         switch (op) {                                             \
727             case OP_ABS: {                                        \
728                 tcg_gen_andc_i64(xb, xb, sgm);                    \
729                 break;                                            \
730             }                                                     \
731             case OP_NABS: {                                       \
732                 tcg_gen_or_i64(xb, xb, sgm);                      \
733                 break;                                            \
734             }                                                     \
735             case OP_NEG: {                                        \
736                 tcg_gen_xor_i64(xb, xb, sgm);                     \
737                 break;                                            \
738             }                                                     \
739             case OP_CPSGN: {                                      \
740                 TCGv_i64 xa = tcg_temp_new_i64();                 \
741                 get_cpu_vsr(xa, xA(ctx->opcode), true);           \
742                 tcg_gen_and_i64(xa, xa, sgm);                     \
743                 tcg_gen_andc_i64(xb, xb, sgm);                    \
744                 tcg_gen_or_i64(xb, xb, xa);                       \
745                 tcg_temp_free_i64(xa);                            \
746                 break;                                            \
747             }                                                     \
748         }                                                         \
749         set_cpu_vsr(xT(ctx->opcode), xb, true);                   \
750         set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
751         tcg_temp_free_i64(xb);                                    \
752         tcg_temp_free_i64(sgm);                                   \
753     }
755 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
756 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
757 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
758 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
760 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask)                    \
761 static void glue(gen_, name)(DisasContext *ctx)                   \
762 {                                                                 \
763     int xa;                                                       \
764     int xt = rD(ctx->opcode) + 32;                                \
765     int xb = rB(ctx->opcode) + 32;                                \
766     TCGv_i64 xah, xbh, xbl, sgm, tmp;                             \
767                                                                   \
768     if (unlikely(!ctx->vsx_enabled)) {                            \
769         gen_exception(ctx, POWERPC_EXCP_VSXU);                    \
770         return;                                                   \
771     }                                                             \
772     xbh = tcg_temp_new_i64();                                     \
773     xbl = tcg_temp_new_i64();                                     \
774     sgm = tcg_temp_new_i64();                                     \
775     tmp = tcg_temp_new_i64();                                     \
776     get_cpu_vsr(xbh, xb, true);                                   \
777     get_cpu_vsr(xbl, xb, false);                                  \
778     tcg_gen_movi_i64(sgm, sgn_mask);                              \
779     switch (op) {                                                 \
780     case OP_ABS:                                                  \
781         tcg_gen_andc_i64(xbh, xbh, sgm);                          \
782         break;                                                    \
783     case OP_NABS:                                                 \
784         tcg_gen_or_i64(xbh, xbh, sgm);                            \
785         break;                                                    \
786     case OP_NEG:                                                  \
787         tcg_gen_xor_i64(xbh, xbh, sgm);                           \
788         break;                                                    \
789     case OP_CPSGN:                                                \
790         xah = tcg_temp_new_i64();                                 \
791         xa = rA(ctx->opcode) + 32;                                \
792         get_cpu_vsr(tmp, xa, true);                               \
793         tcg_gen_and_i64(xah, tmp, sgm);                           \
794         tcg_gen_andc_i64(xbh, xbh, sgm);                          \
795         tcg_gen_or_i64(xbh, xbh, xah);                            \
796         tcg_temp_free_i64(xah);                                   \
797         break;                                                    \
798     }                                                             \
799     set_cpu_vsr(xt, xbh, true);                                   \
800     set_cpu_vsr(xt, xbl, false);                                  \
801     tcg_temp_free_i64(xbl);                                       \
802     tcg_temp_free_i64(xbh);                                       \
803     tcg_temp_free_i64(sgm);                                       \
804     tcg_temp_free_i64(tmp);                                       \
807 VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
808 VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
809 VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
810 VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
812 #define VSX_VECTOR_MOVE(name, op, sgn_mask)                      \
813 static void glue(gen_, name)(DisasContext *ctx)                  \
814     {                                                            \
815         TCGv_i64 xbh, xbl, sgm;                                  \
816         if (unlikely(!ctx->vsx_enabled)) {                       \
817             gen_exception(ctx, POWERPC_EXCP_VSXU);               \
818             return;                                              \
819         }                                                        \
820         xbh = tcg_temp_new_i64();                                \
821         xbl = tcg_temp_new_i64();                                \
822         sgm = tcg_temp_new_i64();                                \
823         get_cpu_vsr(xbh, xB(ctx->opcode), true);                 \
824         get_cpu_vsr(xbl, xB(ctx->opcode), false);                \
825         tcg_gen_movi_i64(sgm, sgn_mask);                         \
826         switch (op) {                                            \
827             case OP_ABS: {                                       \
828                 tcg_gen_andc_i64(xbh, xbh, sgm);                 \
829                 tcg_gen_andc_i64(xbl, xbl, sgm);                 \
830                 break;                                           \
831             }                                                    \
832             case OP_NABS: {                                      \
833                 tcg_gen_or_i64(xbh, xbh, sgm);                   \
834                 tcg_gen_or_i64(xbl, xbl, sgm);                   \
835                 break;                                           \
836             }                                                    \
837             case OP_NEG: {                                       \
838                 tcg_gen_xor_i64(xbh, xbh, sgm);                  \
839                 tcg_gen_xor_i64(xbl, xbl, sgm);                  \
840                 break;                                           \
841             }                                                    \
842             case OP_CPSGN: {                                     \
843                 TCGv_i64 xah = tcg_temp_new_i64();               \
844                 TCGv_i64 xal = tcg_temp_new_i64();               \
845                 get_cpu_vsr(xah, xA(ctx->opcode), true);         \
846                 get_cpu_vsr(xal, xA(ctx->opcode), false);        \
847                 tcg_gen_and_i64(xah, xah, sgm);                  \
848                 tcg_gen_and_i64(xal, xal, sgm);                  \
849                 tcg_gen_andc_i64(xbh, xbh, sgm);                 \
850                 tcg_gen_andc_i64(xbl, xbl, sgm);                 \
851                 tcg_gen_or_i64(xbh, xbh, xah);                   \
852                 tcg_gen_or_i64(xbl, xbl, xal);                   \
853                 tcg_temp_free_i64(xah);                          \
854                 tcg_temp_free_i64(xal);                          \
855                 break;                                           \
856             }                                                    \
857         }                                                        \
858         set_cpu_vsr(xT(ctx->opcode), xbh, true);                 \
859         set_cpu_vsr(xT(ctx->opcode), xbl, false);                \
860         tcg_temp_free_i64(xbh);                                  \
861         tcg_temp_free_i64(xbl);                                  \
862         tcg_temp_free_i64(sgm);                                  \
863     }
865 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
866 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
867 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
868 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
869 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
870 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
871 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
872 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
874 #define VSX_CMP(name, op1, op2, inval, type)                                  \
875 static void gen_##name(DisasContext *ctx)                                     \
876 {                                                                             \
877     TCGv_i32 ignored;                                                         \
878     TCGv_ptr xt, xa, xb;                                                      \
879     if (unlikely(!ctx->vsx_enabled)) {                                        \
880         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
881         return;                                                               \
882     }                                                                         \
883     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
884     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
885     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
886     if ((ctx->opcode >> (31 - 21)) & 1) {                                     \
887         gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb);                   \
888     } else {                                                                  \
889         ignored = tcg_temp_new_i32();                                         \
890         gen_helper_##name(ignored, cpu_env, xt, xa, xb);                      \
891         tcg_temp_free_i32(ignored);                                           \
892     }                                                                         \
893     gen_helper_float_check_status(cpu_env);                                   \
894     tcg_temp_free_ptr(xt);                                                    \
895     tcg_temp_free_ptr(xa);                                                    \
896     tcg_temp_free_ptr(xb);                                                    \
899 VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
900 VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
901 VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
902 VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
903 VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
904 VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
905 VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
906 VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
908 static bool trans_XSCVQPDP(DisasContext *ctx, arg_X_tb_rc *a)
910     TCGv_i32 ro;
911     TCGv_ptr xt, xb;
913     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
914     REQUIRE_VSX(ctx);
916     ro = tcg_const_i32(a->rc);
918     xt = gen_avr_ptr(a->rt);
919     xb = gen_avr_ptr(a->rb);
920     gen_helper_XSCVQPDP(cpu_env, ro, xt, xb);
921     tcg_temp_free_i32(ro);
922     tcg_temp_free_ptr(xt);
923     tcg_temp_free_ptr(xb);
925     return true;
928 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type)                         \
929 static void gen_##name(DisasContext *ctx)                                     \
930 {                                                                             \
931     TCGv_i32 opc;                                                             \
932     if (unlikely(!ctx->vsx_enabled)) {                                        \
933         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
934         return;                                                               \
935     }                                                                         \
936     opc = tcg_const_i32(ctx->opcode);                                         \
937     gen_helper_##name(cpu_env, opc);                                          \
938     tcg_temp_free_i32(opc);                                                   \
941 #define GEN_VSX_HELPER_X3(name, op1, op2, inval, type)                        \
942 static void gen_##name(DisasContext *ctx)                                     \
943 {                                                                             \
944     TCGv_ptr xt, xa, xb;                                                      \
945     if (unlikely(!ctx->vsx_enabled)) {                                        \
946         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
947         return;                                                               \
948     }                                                                         \
949     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
950     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
951     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
952     gen_helper_##name(cpu_env, xt, xa, xb);                                   \
953     tcg_temp_free_ptr(xt);                                                    \
954     tcg_temp_free_ptr(xa);                                                    \
955     tcg_temp_free_ptr(xb);                                                    \
958 #define GEN_VSX_HELPER_X2(name, op1, op2, inval, type)                        \
959 static void gen_##name(DisasContext *ctx)                                     \
960 {                                                                             \
961     TCGv_ptr xt, xb;                                                          \
962     if (unlikely(!ctx->vsx_enabled)) {                                        \
963         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
964         return;                                                               \
965     }                                                                         \
966     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
967     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
968     gen_helper_##name(cpu_env, xt, xb);                                       \
969     tcg_temp_free_ptr(xt);                                                    \
970     tcg_temp_free_ptr(xb);                                                    \
973 #define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type)                     \
974 static void gen_##name(DisasContext *ctx)                                     \
975 {                                                                             \
976     TCGv_i32 opc;                                                             \
977     TCGv_ptr xa, xb;                                                          \
978     if (unlikely(!ctx->vsx_enabled)) {                                        \
979         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
980         return;                                                               \
981     }                                                                         \
982     opc = tcg_const_i32(ctx->opcode);                                         \
983     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
984     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
985     gen_helper_##name(cpu_env, opc, xa, xb);                                  \
986     tcg_temp_free_i32(opc);                                                   \
987     tcg_temp_free_ptr(xa);                                                    \
988     tcg_temp_free_ptr(xb);                                                    \
991 #define GEN_VSX_HELPER_X1(name, op1, op2, inval, type)                        \
992 static void gen_##name(DisasContext *ctx)                                     \
993 {                                                                             \
994     TCGv_i32 opc;                                                             \
995     TCGv_ptr xb;                                                              \
996     if (unlikely(!ctx->vsx_enabled)) {                                        \
997         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
998         return;                                                               \
999     }                                                                         \
1000     opc = tcg_const_i32(ctx->opcode);                                         \
1001     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
1002     gen_helper_##name(cpu_env, opc, xb);                                      \
1003     tcg_temp_free_i32(opc);                                                   \
1004     tcg_temp_free_ptr(xb);                                                    \
1007 #define GEN_VSX_HELPER_R3(name, op1, op2, inval, type)                        \
1008 static void gen_##name(DisasContext *ctx)                                     \
1009 {                                                                             \
1010     TCGv_i32 opc;                                                             \
1011     TCGv_ptr xt, xa, xb;                                                      \
1012     if (unlikely(!ctx->vsx_enabled)) {                                        \
1013         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1014         return;                                                               \
1015     }                                                                         \
1016     opc = tcg_const_i32(ctx->opcode);                                         \
1017     xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
1018     xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
1019     xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1020     gen_helper_##name(cpu_env, opc, xt, xa, xb);                              \
1021     tcg_temp_free_i32(opc);                                                   \
1022     tcg_temp_free_ptr(xt);                                                    \
1023     tcg_temp_free_ptr(xa);                                                    \
1024     tcg_temp_free_ptr(xb);                                                    \
1027 #define GEN_VSX_HELPER_R2(name, op1, op2, inval, type)                        \
1028 static void gen_##name(DisasContext *ctx)                                     \
1029 {                                                                             \
1030     TCGv_i32 opc;                                                             \
1031     TCGv_ptr xt, xb;                                                          \
1032     if (unlikely(!ctx->vsx_enabled)) {                                        \
1033         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1034         return;                                                               \
1035     }                                                                         \
1036     opc = tcg_const_i32(ctx->opcode);                                         \
1037     xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
1038     xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1039     gen_helper_##name(cpu_env, opc, xt, xb);                                  \
1040     tcg_temp_free_i32(opc);                                                   \
1041     tcg_temp_free_ptr(xt);                                                    \
1042     tcg_temp_free_ptr(xb);                                                    \
1045 #define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type)                     \
1046 static void gen_##name(DisasContext *ctx)                                     \
1047 {                                                                             \
1048     TCGv_i32 opc;                                                             \
1049     TCGv_ptr xa, xb;                                                          \
1050     if (unlikely(!ctx->vsx_enabled)) {                                        \
1051         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1052         return;                                                               \
1053     }                                                                         \
1054     opc = tcg_const_i32(ctx->opcode);                                         \
1055     xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
1056     xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1057     gen_helper_##name(cpu_env, opc, xa, xb);                                  \
1058     tcg_temp_free_i32(opc);                                                   \
1059     tcg_temp_free_ptr(xa);                                                    \
1060     tcg_temp_free_ptr(xb);                                                    \
1063 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
1064 static void gen_##name(DisasContext *ctx)                     \
1065 {                                                             \
1066     TCGv_i64 t0;                                              \
1067     TCGv_i64 t1;                                              \
1068     if (unlikely(!ctx->vsx_enabled)) {                        \
1069         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
1070         return;                                               \
1071     }                                                         \
1072     t0 = tcg_temp_new_i64();                                  \
1073     t1 = tcg_temp_new_i64();                                  \
1074     get_cpu_vsr(t0, xB(ctx->opcode), true);                   \
1075     gen_helper_##name(t1, cpu_env, t0);                       \
1076     set_cpu_vsr(xT(ctx->opcode), t1, true);                   \
1077     set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
1078     tcg_temp_free_i64(t0);                                    \
1079     tcg_temp_free_i64(t1);                                    \
1082 GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
1083 GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
1084 GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
1085 GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
1086 GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
1087 GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
1088 GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
1089 GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
1090 GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
1091 GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
1092 GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
1093 GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
1094 GEN_VSX_HELPER_X3(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
1095 GEN_VSX_HELPER_X3(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
1096 GEN_VSX_HELPER_X3(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
1097 GEN_VSX_HELPER_X3(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
1098 GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
1099 GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1100 GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1101 GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1102 GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1103 GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1104 GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1105 GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1106 GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1107 GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1108 GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1109 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1110 GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1111 GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1112 GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1113 GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1114 GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1115 GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1116 GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1117 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
1118 GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1119 GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1120 GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1121 GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1122 GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1123 GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1124 GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1125 GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1126 GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1127 GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1128 GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1129 GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1130 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1131 GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1132 GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1133 GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1134 GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1135 GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1136 GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1137 GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1138 GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1139 GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1140 GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1141 GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1142 GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1143 GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1144 GEN_VSX_HELPER_X1(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1145 GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1146 GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1148 GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1149 GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1150 GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1151 GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1152 GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1153 GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1154 GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1155 GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1156 GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1157 GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1158 GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1159 GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1160 GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1161 GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1162 GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1163 GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1164 GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1165 GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1166 GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1167 GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1168 GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1169 GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1170 GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1171 GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1172 GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1174 GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1175 GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1176 GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1177 GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1178 GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1179 GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1180 GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1181 GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1182 GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1183 GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1184 GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1185 GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1186 GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1187 GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1188 GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1189 GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1190 GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1191 GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1192 GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1193 GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1194 GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1195 GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1196 GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1197 GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1198 GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1199 GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1200 GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1201 GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1202 GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1203 GEN_VSX_HELPER_X3(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
1204 GEN_VSX_HELPER_X3(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
1206 #define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type)             \
1207 static void gen_##name(DisasContext *ctx)                                     \
1208 {                                                                             \
1209     TCGv_ptr xt, xa, b, c;                                                    \
1210     if (unlikely(!ctx->vsx_enabled)) {                                        \
1211         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1212         return;                                                               \
1213     }                                                                         \
1214     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
1215     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
1216     if (ctx->opcode & PPC_BIT32(25)) {                                        \
1217         /*                                                                    \
1218          * AxT + B                                                            \
1219          */                                                                   \
1220         b = gen_vsr_ptr(xT(ctx->opcode));                                     \
1221         c = gen_vsr_ptr(xB(ctx->opcode));                                     \
1222     } else {                                                                  \
1223         /*                                                                    \
1224          * AxB + T                                                            \
1225          */                                                                   \
1226         b = gen_vsr_ptr(xB(ctx->opcode));                                     \
1227         c = gen_vsr_ptr(xT(ctx->opcode));                                     \
1228     }                                                                         \
1229     gen_helper_##name(cpu_env, xt, xa, b, c);                                 \
1230     tcg_temp_free_ptr(xt);                                                    \
1231     tcg_temp_free_ptr(xa);                                                    \
1232     tcg_temp_free_ptr(b);                                                     \
1233     tcg_temp_free_ptr(c);                                                     \
1236 GEN_VSX_HELPER_VSX_MADD(xsmadddp, 0x04, 0x04, 0x05, 0, PPC2_VSX)
1237 GEN_VSX_HELPER_VSX_MADD(xsmsubdp, 0x04, 0x06, 0x07, 0, PPC2_VSX)
1238 GEN_VSX_HELPER_VSX_MADD(xsnmadddp, 0x04, 0x14, 0x15, 0, PPC2_VSX)
1239 GEN_VSX_HELPER_VSX_MADD(xsnmsubdp, 0x04, 0x16, 0x17, 0, PPC2_VSX)
1240 GEN_VSX_HELPER_VSX_MADD(xsmaddsp, 0x04, 0x00, 0x01, 0, PPC2_VSX207)
1241 GEN_VSX_HELPER_VSX_MADD(xsmsubsp, 0x04, 0x02, 0x03, 0, PPC2_VSX207)
1242 GEN_VSX_HELPER_VSX_MADD(xsnmaddsp, 0x04, 0x10, 0x11, 0, PPC2_VSX207)
1243 GEN_VSX_HELPER_VSX_MADD(xsnmsubsp, 0x04, 0x12, 0x13, 0, PPC2_VSX207)
1244 GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX)
1245 GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX)
1246 GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX)
1247 GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX)
1248 GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX)
1249 GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX)
1250 GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX)
1251 GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX)
1253 static void gen_xxbrd(DisasContext *ctx)
1255     TCGv_i64 xth;
1256     TCGv_i64 xtl;
1257     TCGv_i64 xbh;
1258     TCGv_i64 xbl;
1260     if (unlikely(!ctx->vsx_enabled)) {
1261         gen_exception(ctx, POWERPC_EXCP_VSXU);
1262         return;
1263     }
1264     xth = tcg_temp_new_i64();
1265     xtl = tcg_temp_new_i64();
1266     xbh = tcg_temp_new_i64();
1267     xbl = tcg_temp_new_i64();
1268     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1269     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1271     tcg_gen_bswap64_i64(xth, xbh);
1272     tcg_gen_bswap64_i64(xtl, xbl);
1273     set_cpu_vsr(xT(ctx->opcode), xth, true);
1274     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1276     tcg_temp_free_i64(xth);
1277     tcg_temp_free_i64(xtl);
1278     tcg_temp_free_i64(xbh);
1279     tcg_temp_free_i64(xbl);
1282 static void gen_xxbrh(DisasContext *ctx)
1284     TCGv_i64 xth;
1285     TCGv_i64 xtl;
1286     TCGv_i64 xbh;
1287     TCGv_i64 xbl;
1289     if (unlikely(!ctx->vsx_enabled)) {
1290         gen_exception(ctx, POWERPC_EXCP_VSXU);
1291         return;
1292     }
1293     xth = tcg_temp_new_i64();
1294     xtl = tcg_temp_new_i64();
1295     xbh = tcg_temp_new_i64();
1296     xbl = tcg_temp_new_i64();
1297     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1298     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1300     gen_bswap16x8(xth, xtl, xbh, xbl);
1301     set_cpu_vsr(xT(ctx->opcode), xth, true);
1302     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1304     tcg_temp_free_i64(xth);
1305     tcg_temp_free_i64(xtl);
1306     tcg_temp_free_i64(xbh);
1307     tcg_temp_free_i64(xbl);
1310 static void gen_xxbrq(DisasContext *ctx)
1312     TCGv_i64 xth;
1313     TCGv_i64 xtl;
1314     TCGv_i64 xbh;
1315     TCGv_i64 xbl;
1316     TCGv_i64 t0;
1318     if (unlikely(!ctx->vsx_enabled)) {
1319         gen_exception(ctx, POWERPC_EXCP_VSXU);
1320         return;
1321     }
1322     xth = tcg_temp_new_i64();
1323     xtl = tcg_temp_new_i64();
1324     xbh = tcg_temp_new_i64();
1325     xbl = tcg_temp_new_i64();
1326     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1327     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1328     t0 = tcg_temp_new_i64();
1330     tcg_gen_bswap64_i64(t0, xbl);
1331     tcg_gen_bswap64_i64(xtl, xbh);
1332     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1333     tcg_gen_mov_i64(xth, t0);
1334     set_cpu_vsr(xT(ctx->opcode), xth, true);
1336     tcg_temp_free_i64(t0);
1337     tcg_temp_free_i64(xth);
1338     tcg_temp_free_i64(xtl);
1339     tcg_temp_free_i64(xbh);
1340     tcg_temp_free_i64(xbl);
1343 static void gen_xxbrw(DisasContext *ctx)
1345     TCGv_i64 xth;
1346     TCGv_i64 xtl;
1347     TCGv_i64 xbh;
1348     TCGv_i64 xbl;
1350     if (unlikely(!ctx->vsx_enabled)) {
1351         gen_exception(ctx, POWERPC_EXCP_VSXU);
1352         return;
1353     }
1354     xth = tcg_temp_new_i64();
1355     xtl = tcg_temp_new_i64();
1356     xbh = tcg_temp_new_i64();
1357     xbl = tcg_temp_new_i64();
1358     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1359     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1361     gen_bswap32x4(xth, xtl, xbh, xbl);
1362     set_cpu_vsr(xT(ctx->opcode), xth, true);
1363     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1365     tcg_temp_free_i64(xth);
1366     tcg_temp_free_i64(xtl);
1367     tcg_temp_free_i64(xbh);
1368     tcg_temp_free_i64(xbl);
1371 #define VSX_LOGICAL(name, vece, tcg_op)                              \
1372 static void glue(gen_, name)(DisasContext *ctx)                      \
1373     {                                                                \
1374         if (unlikely(!ctx->vsx_enabled)) {                           \
1375             gen_exception(ctx, POWERPC_EXCP_VSXU);                   \
1376             return;                                                  \
1377         }                                                            \
1378         tcg_op(vece, vsr_full_offset(xT(ctx->opcode)),               \
1379                vsr_full_offset(xA(ctx->opcode)),                     \
1380                vsr_full_offset(xB(ctx->opcode)), 16, 16);            \
1381     }
1383 VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1384 VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1385 VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1386 VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1387 VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1388 VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1389 VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1390 VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1392 #define VSX_XXMRG(name, high)                               \
1393 static void glue(gen_, name)(DisasContext *ctx)             \
1394     {                                                       \
1395         TCGv_i64 a0, a1, b0, b1, tmp;                       \
1396         if (unlikely(!ctx->vsx_enabled)) {                  \
1397             gen_exception(ctx, POWERPC_EXCP_VSXU);          \
1398             return;                                         \
1399         }                                                   \
1400         a0 = tcg_temp_new_i64();                            \
1401         a1 = tcg_temp_new_i64();                            \
1402         b0 = tcg_temp_new_i64();                            \
1403         b1 = tcg_temp_new_i64();                            \
1404         tmp = tcg_temp_new_i64();                           \
1405         get_cpu_vsr(a0, xA(ctx->opcode), high);             \
1406         get_cpu_vsr(a1, xA(ctx->opcode), high);             \
1407         get_cpu_vsr(b0, xB(ctx->opcode), high);             \
1408         get_cpu_vsr(b1, xB(ctx->opcode), high);             \
1409         tcg_gen_shri_i64(a0, a0, 32);                       \
1410         tcg_gen_shri_i64(b0, b0, 32);                       \
1411         tcg_gen_deposit_i64(tmp, b0, a0, 32, 32);           \
1412         set_cpu_vsr(xT(ctx->opcode), tmp, true);            \
1413         tcg_gen_deposit_i64(tmp, b1, a1, 32, 32);           \
1414         set_cpu_vsr(xT(ctx->opcode), tmp, false);           \
1415         tcg_temp_free_i64(a0);                              \
1416         tcg_temp_free_i64(a1);                              \
1417         tcg_temp_free_i64(b0);                              \
1418         tcg_temp_free_i64(b1);                              \
1419         tcg_temp_free_i64(tmp);                             \
1420     }
1422 VSX_XXMRG(xxmrghw, 1)
1423 VSX_XXMRG(xxmrglw, 0)
1425 static void gen_xxsel(DisasContext *ctx)
1427     int rt = xT(ctx->opcode);
1428     int ra = xA(ctx->opcode);
1429     int rb = xB(ctx->opcode);
1430     int rc = xC(ctx->opcode);
1432     if (unlikely(!ctx->vsx_enabled)) {
1433         gen_exception(ctx, POWERPC_EXCP_VSXU);
1434         return;
1435     }
1436     tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(rt), vsr_full_offset(rc),
1437                         vsr_full_offset(rb), vsr_full_offset(ra), 16, 16);
1440 static bool trans_XXSPLTW(DisasContext *ctx, arg_XX2 *a)
1442     int tofs, bofs;
1444     REQUIRE_VSX(ctx);
1446     tofs = vsr_full_offset(a->xt);
1447     bofs = vsr_full_offset(a->xb);
1448     bofs += a->uim << MO_32;
1449 #ifndef HOST_WORDS_BIG_ENDIAN
1450     bofs ^= 8 | 4;
1451 #endif
1453     tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1454     return true;
1457 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1459 static bool trans_XXSPLTIB(DisasContext *ctx, arg_X_imm8 *a)
1461     if (a->xt < 32) {
1462         REQUIRE_VSX(ctx);
1463     } else {
1464         REQUIRE_VECTOR(ctx);
1465     }
1466     tcg_gen_gvec_dup_imm(MO_8, vsr_full_offset(a->xt), 16, 16, a->imm);
1467     return true;
1470 static bool trans_XXSPLTIW(DisasContext *ctx, arg_8RR_D *a)
1472     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1473     REQUIRE_VSX(ctx);
1475     tcg_gen_gvec_dup_imm(MO_32, vsr_full_offset(a->xt), 16, 16, a->si);
1477     return true;
1480 static bool trans_XXSPLTIDP(DisasContext *ctx, arg_8RR_D *a)
1482     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1483     REQUIRE_VSX(ctx);
1485     tcg_gen_gvec_dup_imm(MO_64, vsr_full_offset(a->xt), 16, 16,
1486                          helper_todouble(a->si));
1487     return true;
1490 static bool trans_XXSPLTI32DX(DisasContext *ctx, arg_8RR_D_IX *a)
1492     TCGv_i32 imm;
1494     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1495     REQUIRE_VSX(ctx);
1497     imm = tcg_constant_i32(a->si);
1499     tcg_gen_st_i32(imm, cpu_env,
1500         offsetof(CPUPPCState, vsr[a->xt].VsrW(0 + a->ix)));
1501     tcg_gen_st_i32(imm, cpu_env,
1502         offsetof(CPUPPCState, vsr[a->xt].VsrW(2 + a->ix)));
1504     return true;
1507 static bool trans_LXVKQ(DisasContext *ctx, arg_X_uim5 *a)
1509     static const uint64_t values[32] = {
1510         0, /* Unspecified */
1511         0x3FFF000000000000llu, /* QP +1.0 */
1512         0x4000000000000000llu, /* QP +2.0 */
1513         0x4000800000000000llu, /* QP +3.0 */
1514         0x4001000000000000llu, /* QP +4.0 */
1515         0x4001400000000000llu, /* QP +5.0 */
1516         0x4001800000000000llu, /* QP +6.0 */
1517         0x4001C00000000000llu, /* QP +7.0 */
1518         0x7FFF000000000000llu, /* QP +Inf */
1519         0x7FFF800000000000llu, /* QP dQNaN */
1520         0, /* Unspecified */
1521         0, /* Unspecified */
1522         0, /* Unspecified */
1523         0, /* Unspecified */
1524         0, /* Unspecified */
1525         0, /* Unspecified */
1526         0x8000000000000000llu, /* QP -0.0 */
1527         0xBFFF000000000000llu, /* QP -1.0 */
1528         0xC000000000000000llu, /* QP -2.0 */
1529         0xC000800000000000llu, /* QP -3.0 */
1530         0xC001000000000000llu, /* QP -4.0 */
1531         0xC001400000000000llu, /* QP -5.0 */
1532         0xC001800000000000llu, /* QP -6.0 */
1533         0xC001C00000000000llu, /* QP -7.0 */
1534         0xFFFF000000000000llu, /* QP -Inf */
1535     };
1537     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1538     REQUIRE_VSX(ctx);
1540     if (values[a->uim]) {
1541         set_cpu_vsr(a->xt, tcg_constant_i64(0x0), false);
1542         set_cpu_vsr(a->xt, tcg_constant_i64(values[a->uim]), true);
1543     } else {
1544         gen_invalid(ctx);
1545     }
1547     return true;
1550 static void gen_xxsldwi(DisasContext *ctx)
1552     TCGv_i64 xth, xtl;
1553     if (unlikely(!ctx->vsx_enabled)) {
1554         gen_exception(ctx, POWERPC_EXCP_VSXU);
1555         return;
1556     }
1557     xth = tcg_temp_new_i64();
1558     xtl = tcg_temp_new_i64();
1560     switch (SHW(ctx->opcode)) {
1561         case 0: {
1562             get_cpu_vsr(xth, xA(ctx->opcode), true);
1563             get_cpu_vsr(xtl, xA(ctx->opcode), false);
1564             break;
1565         }
1566         case 1: {
1567             TCGv_i64 t0 = tcg_temp_new_i64();
1568             get_cpu_vsr(xth, xA(ctx->opcode), true);
1569             tcg_gen_shli_i64(xth, xth, 32);
1570             get_cpu_vsr(t0, xA(ctx->opcode), false);
1571             tcg_gen_shri_i64(t0, t0, 32);
1572             tcg_gen_or_i64(xth, xth, t0);
1573             get_cpu_vsr(xtl, xA(ctx->opcode), false);
1574             tcg_gen_shli_i64(xtl, xtl, 32);
1575             get_cpu_vsr(t0, xB(ctx->opcode), true);
1576             tcg_gen_shri_i64(t0, t0, 32);
1577             tcg_gen_or_i64(xtl, xtl, t0);
1578             tcg_temp_free_i64(t0);
1579             break;
1580         }
1581         case 2: {
1582             get_cpu_vsr(xth, xA(ctx->opcode), false);
1583             get_cpu_vsr(xtl, xB(ctx->opcode), true);
1584             break;
1585         }
1586         case 3: {
1587             TCGv_i64 t0 = tcg_temp_new_i64();
1588             get_cpu_vsr(xth, xA(ctx->opcode), false);
1589             tcg_gen_shli_i64(xth, xth, 32);
1590             get_cpu_vsr(t0, xB(ctx->opcode), true);
1591             tcg_gen_shri_i64(t0, t0, 32);
1592             tcg_gen_or_i64(xth, xth, t0);
1593             get_cpu_vsr(xtl, xB(ctx->opcode), true);
1594             tcg_gen_shli_i64(xtl, xtl, 32);
1595             get_cpu_vsr(t0, xB(ctx->opcode), false);
1596             tcg_gen_shri_i64(t0, t0, 32);
1597             tcg_gen_or_i64(xtl, xtl, t0);
1598             tcg_temp_free_i64(t0);
1599             break;
1600         }
1601     }
1603     set_cpu_vsr(xT(ctx->opcode), xth, true);
1604     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1606     tcg_temp_free_i64(xth);
1607     tcg_temp_free_i64(xtl);
1610 #define VSX_EXTRACT_INSERT(name)                                \
1611 static void gen_##name(DisasContext *ctx)                       \
1612 {                                                               \
1613     TCGv_ptr xt, xb;                                            \
1614     TCGv_i32 t0;                                                \
1615     TCGv_i64 t1;                                                \
1616     uint8_t uimm = UIMM4(ctx->opcode);                          \
1617                                                                 \
1618     if (unlikely(!ctx->vsx_enabled)) {                          \
1619         gen_exception(ctx, POWERPC_EXCP_VSXU);                  \
1620         return;                                                 \
1621     }                                                           \
1622     xt = gen_vsr_ptr(xT(ctx->opcode));                          \
1623     xb = gen_vsr_ptr(xB(ctx->opcode));                          \
1624     t0 = tcg_temp_new_i32();                                    \
1625     t1 = tcg_temp_new_i64();                                    \
1626     /*                                                          \
1627      * uimm > 15 out of bound and for                           \
1628      * uimm > 12 handle as per hardware in helper               \
1629      */                                                         \
1630     if (uimm > 15) {                                            \
1631         tcg_gen_movi_i64(t1, 0);                                \
1632         set_cpu_vsr(xT(ctx->opcode), t1, true);                 \
1633         set_cpu_vsr(xT(ctx->opcode), t1, false);                \
1634         return;                                                 \
1635     }                                                           \
1636     tcg_gen_movi_i32(t0, uimm);                                 \
1637     gen_helper_##name(cpu_env, xt, xb, t0);                     \
1638     tcg_temp_free_ptr(xb);                                      \
1639     tcg_temp_free_ptr(xt);                                      \
1640     tcg_temp_free_i32(t0);                                      \
1641     tcg_temp_free_i64(t1);                                      \
1644 VSX_EXTRACT_INSERT(xxextractuw)
1645 VSX_EXTRACT_INSERT(xxinsertw)
1647 #ifdef TARGET_PPC64
1648 static void gen_xsxexpdp(DisasContext *ctx)
1650     TCGv rt = cpu_gpr[rD(ctx->opcode)];
1651     TCGv_i64 t0;
1652     if (unlikely(!ctx->vsx_enabled)) {
1653         gen_exception(ctx, POWERPC_EXCP_VSXU);
1654         return;
1655     }
1656     t0 = tcg_temp_new_i64();
1657     get_cpu_vsr(t0, xB(ctx->opcode), true);
1658     tcg_gen_extract_i64(rt, t0, 52, 11);
1659     tcg_temp_free_i64(t0);
1662 static void gen_xsxexpqp(DisasContext *ctx)
1664     TCGv_i64 xth;
1665     TCGv_i64 xtl;
1666     TCGv_i64 xbh;
1668     if (unlikely(!ctx->vsx_enabled)) {
1669         gen_exception(ctx, POWERPC_EXCP_VSXU);
1670         return;
1671     }
1672     xth = tcg_temp_new_i64();
1673     xtl = tcg_temp_new_i64();
1674     xbh = tcg_temp_new_i64();
1675     get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1677     tcg_gen_extract_i64(xth, xbh, 48, 15);
1678     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1679     tcg_gen_movi_i64(xtl, 0);
1680     set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1682     tcg_temp_free_i64(xbh);
1683     tcg_temp_free_i64(xth);
1684     tcg_temp_free_i64(xtl);
1687 static void gen_xsiexpdp(DisasContext *ctx)
1689     TCGv_i64 xth;
1690     TCGv ra = cpu_gpr[rA(ctx->opcode)];
1691     TCGv rb = cpu_gpr[rB(ctx->opcode)];
1692     TCGv_i64 t0;
1694     if (unlikely(!ctx->vsx_enabled)) {
1695         gen_exception(ctx, POWERPC_EXCP_VSXU);
1696         return;
1697     }
1698     t0 = tcg_temp_new_i64();
1699     xth = tcg_temp_new_i64();
1700     tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1701     tcg_gen_andi_i64(t0, rb, 0x7FF);
1702     tcg_gen_shli_i64(t0, t0, 52);
1703     tcg_gen_or_i64(xth, xth, t0);
1704     set_cpu_vsr(xT(ctx->opcode), xth, true);
1705     set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false);
1706     tcg_temp_free_i64(t0);
1707     tcg_temp_free_i64(xth);
1710 static void gen_xsiexpqp(DisasContext *ctx)
1712     TCGv_i64 xth;
1713     TCGv_i64 xtl;
1714     TCGv_i64 xah;
1715     TCGv_i64 xal;
1716     TCGv_i64 xbh;
1717     TCGv_i64 t0;
1719     if (unlikely(!ctx->vsx_enabled)) {
1720         gen_exception(ctx, POWERPC_EXCP_VSXU);
1721         return;
1722     }
1723     xth = tcg_temp_new_i64();
1724     xtl = tcg_temp_new_i64();
1725     xah = tcg_temp_new_i64();
1726     xal = tcg_temp_new_i64();
1727     get_cpu_vsr(xah, rA(ctx->opcode) + 32, true);
1728     get_cpu_vsr(xal, rA(ctx->opcode) + 32, false);
1729     xbh = tcg_temp_new_i64();
1730     get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1731     t0 = tcg_temp_new_i64();
1733     tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1734     tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1735     tcg_gen_shli_i64(t0, t0, 48);
1736     tcg_gen_or_i64(xth, xth, t0);
1737     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1738     tcg_gen_mov_i64(xtl, xal);
1739     set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1741     tcg_temp_free_i64(t0);
1742     tcg_temp_free_i64(xth);
1743     tcg_temp_free_i64(xtl);
1744     tcg_temp_free_i64(xah);
1745     tcg_temp_free_i64(xal);
1746     tcg_temp_free_i64(xbh);
1749 static void gen_xsxsigdp(DisasContext *ctx)
1751     TCGv rt = cpu_gpr[rD(ctx->opcode)];
1752     TCGv_i64 t0, t1, zr, nan, exp;
1754     if (unlikely(!ctx->vsx_enabled)) {
1755         gen_exception(ctx, POWERPC_EXCP_VSXU);
1756         return;
1757     }
1758     exp = tcg_temp_new_i64();
1759     t0 = tcg_temp_new_i64();
1760     t1 = tcg_temp_new_i64();
1761     zr = tcg_const_i64(0);
1762     nan = tcg_const_i64(2047);
1764     get_cpu_vsr(t1, xB(ctx->opcode), true);
1765     tcg_gen_extract_i64(exp, t1, 52, 11);
1766     tcg_gen_movi_i64(t0, 0x0010000000000000);
1767     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1768     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1769     get_cpu_vsr(t1, xB(ctx->opcode), true);
1770     tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1772     tcg_temp_free_i64(t0);
1773     tcg_temp_free_i64(t1);
1774     tcg_temp_free_i64(exp);
1775     tcg_temp_free_i64(zr);
1776     tcg_temp_free_i64(nan);
1779 static void gen_xsxsigqp(DisasContext *ctx)
1781     TCGv_i64 t0, zr, nan, exp;
1782     TCGv_i64 xth;
1783     TCGv_i64 xtl;
1784     TCGv_i64 xbh;
1785     TCGv_i64 xbl;
1787     if (unlikely(!ctx->vsx_enabled)) {
1788         gen_exception(ctx, POWERPC_EXCP_VSXU);
1789         return;
1790     }
1791     xth = tcg_temp_new_i64();
1792     xtl = tcg_temp_new_i64();
1793     xbh = tcg_temp_new_i64();
1794     xbl = tcg_temp_new_i64();
1795     get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1796     get_cpu_vsr(xbl, rB(ctx->opcode) + 32, false);
1797     exp = tcg_temp_new_i64();
1798     t0 = tcg_temp_new_i64();
1799     zr = tcg_const_i64(0);
1800     nan = tcg_const_i64(32767);
1802     tcg_gen_extract_i64(exp, xbh, 48, 15);
1803     tcg_gen_movi_i64(t0, 0x0001000000000000);
1804     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1805     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1806     tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
1807     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1808     tcg_gen_mov_i64(xtl, xbl);
1809     set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1811     tcg_temp_free_i64(t0);
1812     tcg_temp_free_i64(exp);
1813     tcg_temp_free_i64(zr);
1814     tcg_temp_free_i64(nan);
1815     tcg_temp_free_i64(xth);
1816     tcg_temp_free_i64(xtl);
1817     tcg_temp_free_i64(xbh);
1818     tcg_temp_free_i64(xbl);
1820 #endif
1822 static void gen_xviexpsp(DisasContext *ctx)
1824     TCGv_i64 xth;
1825     TCGv_i64 xtl;
1826     TCGv_i64 xah;
1827     TCGv_i64 xal;
1828     TCGv_i64 xbh;
1829     TCGv_i64 xbl;
1830     TCGv_i64 t0;
1832     if (unlikely(!ctx->vsx_enabled)) {
1833         gen_exception(ctx, POWERPC_EXCP_VSXU);
1834         return;
1835     }
1836     xth = tcg_temp_new_i64();
1837     xtl = tcg_temp_new_i64();
1838     xah = tcg_temp_new_i64();
1839     xal = tcg_temp_new_i64();
1840     xbh = tcg_temp_new_i64();
1841     xbl = tcg_temp_new_i64();
1842     get_cpu_vsr(xah, xA(ctx->opcode), true);
1843     get_cpu_vsr(xal, xA(ctx->opcode), false);
1844     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1845     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1846     t0 = tcg_temp_new_i64();
1848     tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1849     tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1850     tcg_gen_shli_i64(t0, t0, 23);
1851     tcg_gen_or_i64(xth, xth, t0);
1852     set_cpu_vsr(xT(ctx->opcode), xth, true);
1853     tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1854     tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1855     tcg_gen_shli_i64(t0, t0, 23);
1856     tcg_gen_or_i64(xtl, xtl, t0);
1857     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1859     tcg_temp_free_i64(t0);
1860     tcg_temp_free_i64(xth);
1861     tcg_temp_free_i64(xtl);
1862     tcg_temp_free_i64(xah);
1863     tcg_temp_free_i64(xal);
1864     tcg_temp_free_i64(xbh);
1865     tcg_temp_free_i64(xbl);
1868 static void gen_xviexpdp(DisasContext *ctx)
1870     TCGv_i64 xth;
1871     TCGv_i64 xtl;
1872     TCGv_i64 xah;
1873     TCGv_i64 xal;
1874     TCGv_i64 xbh;
1875     TCGv_i64 xbl;
1877     if (unlikely(!ctx->vsx_enabled)) {
1878         gen_exception(ctx, POWERPC_EXCP_VSXU);
1879         return;
1880     }
1881     xth = tcg_temp_new_i64();
1882     xtl = tcg_temp_new_i64();
1883     xah = tcg_temp_new_i64();
1884     xal = tcg_temp_new_i64();
1885     xbh = tcg_temp_new_i64();
1886     xbl = tcg_temp_new_i64();
1887     get_cpu_vsr(xah, xA(ctx->opcode), true);
1888     get_cpu_vsr(xal, xA(ctx->opcode), false);
1889     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1890     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1892     tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
1893     set_cpu_vsr(xT(ctx->opcode), xth, true);
1895     tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
1896     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1898     tcg_temp_free_i64(xth);
1899     tcg_temp_free_i64(xtl);
1900     tcg_temp_free_i64(xah);
1901     tcg_temp_free_i64(xal);
1902     tcg_temp_free_i64(xbh);
1903     tcg_temp_free_i64(xbl);
1906 static void gen_xvxexpsp(DisasContext *ctx)
1908     TCGv_i64 xth;
1909     TCGv_i64 xtl;
1910     TCGv_i64 xbh;
1911     TCGv_i64 xbl;
1913     if (unlikely(!ctx->vsx_enabled)) {
1914         gen_exception(ctx, POWERPC_EXCP_VSXU);
1915         return;
1916     }
1917     xth = tcg_temp_new_i64();
1918     xtl = tcg_temp_new_i64();
1919     xbh = tcg_temp_new_i64();
1920     xbl = tcg_temp_new_i64();
1921     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1922     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1924     tcg_gen_shri_i64(xth, xbh, 23);
1925     tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
1926     set_cpu_vsr(xT(ctx->opcode), xth, true);
1927     tcg_gen_shri_i64(xtl, xbl, 23);
1928     tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
1929     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1931     tcg_temp_free_i64(xth);
1932     tcg_temp_free_i64(xtl);
1933     tcg_temp_free_i64(xbh);
1934     tcg_temp_free_i64(xbl);
1937 static void gen_xvxexpdp(DisasContext *ctx)
1939     TCGv_i64 xth;
1940     TCGv_i64 xtl;
1941     TCGv_i64 xbh;
1942     TCGv_i64 xbl;
1944     if (unlikely(!ctx->vsx_enabled)) {
1945         gen_exception(ctx, POWERPC_EXCP_VSXU);
1946         return;
1947     }
1948     xth = tcg_temp_new_i64();
1949     xtl = tcg_temp_new_i64();
1950     xbh = tcg_temp_new_i64();
1951     xbl = tcg_temp_new_i64();
1952     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1953     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1955     tcg_gen_extract_i64(xth, xbh, 52, 11);
1956     set_cpu_vsr(xT(ctx->opcode), xth, true);
1957     tcg_gen_extract_i64(xtl, xbl, 52, 11);
1958     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1960     tcg_temp_free_i64(xth);
1961     tcg_temp_free_i64(xtl);
1962     tcg_temp_free_i64(xbh);
1963     tcg_temp_free_i64(xbl);
1966 GEN_VSX_HELPER_X2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
1968 static void gen_xvxsigdp(DisasContext *ctx)
1970     TCGv_i64 xth;
1971     TCGv_i64 xtl;
1972     TCGv_i64 xbh;
1973     TCGv_i64 xbl;
1974     TCGv_i64 t0, zr, nan, exp;
1976     if (unlikely(!ctx->vsx_enabled)) {
1977         gen_exception(ctx, POWERPC_EXCP_VSXU);
1978         return;
1979     }
1980     xth = tcg_temp_new_i64();
1981     xtl = tcg_temp_new_i64();
1982     xbh = tcg_temp_new_i64();
1983     xbl = tcg_temp_new_i64();
1984     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1985     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1986     exp = tcg_temp_new_i64();
1987     t0 = tcg_temp_new_i64();
1988     zr = tcg_const_i64(0);
1989     nan = tcg_const_i64(2047);
1991     tcg_gen_extract_i64(exp, xbh, 52, 11);
1992     tcg_gen_movi_i64(t0, 0x0010000000000000);
1993     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1994     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1995     tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
1996     set_cpu_vsr(xT(ctx->opcode), xth, true);
1998     tcg_gen_extract_i64(exp, xbl, 52, 11);
1999     tcg_gen_movi_i64(t0, 0x0010000000000000);
2000     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2001     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2002     tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
2003     set_cpu_vsr(xT(ctx->opcode), xtl, false);
2005     tcg_temp_free_i64(t0);
2006     tcg_temp_free_i64(exp);
2007     tcg_temp_free_i64(zr);
2008     tcg_temp_free_i64(nan);
2009     tcg_temp_free_i64(xth);
2010     tcg_temp_free_i64(xtl);
2011     tcg_temp_free_i64(xbh);
2012     tcg_temp_free_i64(xbl);
2015 static bool do_lstxv(DisasContext *ctx, int ra, TCGv displ,
2016                      int rt, bool store, bool paired)
2018     TCGv ea;
2019     TCGv_i64 xt;
2020     MemOp mop;
2021     int rt1, rt2;
2023     xt = tcg_temp_new_i64();
2025     mop = DEF_MEMOP(MO_UQ);
2027     gen_set_access_type(ctx, ACCESS_INT);
2028     ea = do_ea_calc(ctx, ra, displ);
2030     if (paired && ctx->le_mode) {
2031         rt1 = rt + 1;
2032         rt2 = rt;
2033     } else {
2034         rt1 = rt;
2035         rt2 = rt + 1;
2036     }
2038     if (store) {
2039         get_cpu_vsr(xt, rt1, !ctx->le_mode);
2040         tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2041         gen_addr_add(ctx, ea, ea, 8);
2042         get_cpu_vsr(xt, rt1, ctx->le_mode);
2043         tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2044         if (paired) {
2045             gen_addr_add(ctx, ea, ea, 8);
2046             get_cpu_vsr(xt, rt2, !ctx->le_mode);
2047             tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2048             gen_addr_add(ctx, ea, ea, 8);
2049             get_cpu_vsr(xt, rt2, ctx->le_mode);
2050             tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2051         }
2052     } else {
2053         tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2054         set_cpu_vsr(rt1, xt, !ctx->le_mode);
2055         gen_addr_add(ctx, ea, ea, 8);
2056         tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2057         set_cpu_vsr(rt1, xt, ctx->le_mode);
2058         if (paired) {
2059             gen_addr_add(ctx, ea, ea, 8);
2060             tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2061             set_cpu_vsr(rt2, xt, !ctx->le_mode);
2062             gen_addr_add(ctx, ea, ea, 8);
2063             tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2064             set_cpu_vsr(rt2, xt, ctx->le_mode);
2065         }
2066     }
2068     tcg_temp_free(ea);
2069     tcg_temp_free_i64(xt);
2070     return true;
2073 static bool do_lstxv_D(DisasContext *ctx, arg_D *a, bool store, bool paired)
2075     if (paired || a->rt >= 32) {
2076         REQUIRE_VSX(ctx);
2077     } else {
2078         REQUIRE_VECTOR(ctx);
2079     }
2081     return do_lstxv(ctx, a->ra, tcg_constant_tl(a->si), a->rt, store, paired);
2084 static bool do_lstxv_PLS_D(DisasContext *ctx, arg_PLS_D *a,
2085                            bool store, bool paired)
2087     arg_D d;
2088     REQUIRE_VSX(ctx);
2090     if (!resolve_PLS_D(ctx, &d, a)) {
2091         return true;
2092     }
2094     return do_lstxv(ctx, d.ra, tcg_constant_tl(d.si), d.rt, store, paired);
2097 static bool do_lstxv_X(DisasContext *ctx, arg_X *a, bool store, bool paired)
2099     if (paired || a->rt >= 32) {
2100         REQUIRE_VSX(ctx);
2101     } else {
2102         REQUIRE_VECTOR(ctx);
2103     }
2105     return do_lstxv(ctx, a->ra, cpu_gpr[a->rb], a->rt, store, paired);
2108 TRANS_FLAGS2(ISA300, STXV, do_lstxv_D, true, false)
2109 TRANS_FLAGS2(ISA300, LXV, do_lstxv_D, false, false)
2110 TRANS_FLAGS2(ISA310, STXVP, do_lstxv_D, true, true)
2111 TRANS_FLAGS2(ISA310, LXVP, do_lstxv_D, false, true)
2112 TRANS_FLAGS2(ISA300, STXVX, do_lstxv_X, true, false)
2113 TRANS_FLAGS2(ISA300, LXVX, do_lstxv_X, false, false)
2114 TRANS_FLAGS2(ISA310, STXVPX, do_lstxv_X, true, true)
2115 TRANS_FLAGS2(ISA310, LXVPX, do_lstxv_X, false, true)
2116 TRANS64_FLAGS2(ISA310, PSTXV, do_lstxv_PLS_D, true, false)
2117 TRANS64_FLAGS2(ISA310, PLXV, do_lstxv_PLS_D, false, false)
2118 TRANS64_FLAGS2(ISA310, PSTXVP, do_lstxv_PLS_D, true, true)
2119 TRANS64_FLAGS2(ISA310, PLXVP, do_lstxv_PLS_D, false, true)
2121 static void gen_xxblendv_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b,
2122                              TCGv_vec c)
2124     TCGv_vec tmp = tcg_temp_new_vec_matching(c);
2125     tcg_gen_sari_vec(vece, tmp, c, (8 << vece) - 1);
2126     tcg_gen_bitsel_vec(vece, t, tmp, b, a);
2127     tcg_temp_free_vec(tmp);
2130 static bool do_xxblendv(DisasContext *ctx, arg_XX4 *a, unsigned vece)
2132     static const TCGOpcode vecop_list[] = {
2133         INDEX_op_sari_vec, 0
2134     };
2135     static const GVecGen4 ops[4] = {
2136         {
2137             .fniv = gen_xxblendv_vec,
2138             .fno = gen_helper_XXBLENDVB,
2139             .opt_opc = vecop_list,
2140             .vece = MO_8
2141         },
2142         {
2143             .fniv = gen_xxblendv_vec,
2144             .fno = gen_helper_XXBLENDVH,
2145             .opt_opc = vecop_list,
2146             .vece = MO_16
2147         },
2148         {
2149             .fniv = gen_xxblendv_vec,
2150             .fno = gen_helper_XXBLENDVW,
2151             .opt_opc = vecop_list,
2152             .vece = MO_32
2153         },
2154         {
2155             .fniv = gen_xxblendv_vec,
2156             .fno = gen_helper_XXBLENDVD,
2157             .opt_opc = vecop_list,
2158             .vece = MO_64
2159         }
2160     };
2162     REQUIRE_VSX(ctx);
2164     tcg_gen_gvec_4(vsr_full_offset(a->xt), vsr_full_offset(a->xa),
2165                    vsr_full_offset(a->xb), vsr_full_offset(a->xc),
2166                    16, 16, &ops[vece]);
2168     return true;
2171 TRANS(XXBLENDVB, do_xxblendv, MO_8)
2172 TRANS(XXBLENDVH, do_xxblendv, MO_16)
2173 TRANS(XXBLENDVW, do_xxblendv, MO_32)
2174 TRANS(XXBLENDVD, do_xxblendv, MO_64)
2176 static bool do_xsmaxmincjdp(DisasContext *ctx, arg_XX3 *a,
2177                             void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2179     TCGv_ptr xt, xa, xb;
2181     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2182     REQUIRE_VSX(ctx);
2184     xt = gen_vsr_ptr(a->xt);
2185     xa = gen_vsr_ptr(a->xa);
2186     xb = gen_vsr_ptr(a->xb);
2188     helper(cpu_env, xt, xa, xb);
2190     tcg_temp_free_ptr(xt);
2191     tcg_temp_free_ptr(xa);
2192     tcg_temp_free_ptr(xb);
2194     return true;
2197 TRANS(XSMAXCDP, do_xsmaxmincjdp, gen_helper_xsmaxcdp)
2198 TRANS(XSMINCDP, do_xsmaxmincjdp, gen_helper_xsmincdp)
2199 TRANS(XSMAXJDP, do_xsmaxmincjdp, gen_helper_xsmaxjdp)
2200 TRANS(XSMINJDP, do_xsmaxmincjdp, gen_helper_xsminjdp)
2202 #undef GEN_XX2FORM
2203 #undef GEN_XX3FORM
2204 #undef GEN_XX2IFORM
2205 #undef GEN_XX3_RC_FORM
2206 #undef GEN_XX3FORM_DM
2207 #undef VSX_LOGICAL