target/ppc: move xxperm/xxpermr to decodetree
[qemu.git] / target / ppc / translate / vsx-impl.c.inc
blob7ce90f18a50249bf65f042875e394ea2cb33fdb0
1 /***                           VSX extension                               ***/
3 static inline void get_cpu_vsr(TCGv_i64 dst, int n, bool high)
5     tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, high));
8 static inline void set_cpu_vsr(int n, TCGv_i64 src, bool high)
10     tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, high));
13 static inline TCGv_ptr gen_vsr_ptr(int reg)
15     TCGv_ptr r = tcg_temp_new_ptr();
16     tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg));
17     return r;
20 #define VSX_LOAD_SCALAR(name, operation)                      \
21 static void gen_##name(DisasContext *ctx)                     \
22 {                                                             \
23     TCGv EA;                                                  \
24     TCGv_i64 t0;                                              \
25     if (unlikely(!ctx->vsx_enabled)) {                        \
26         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
27         return;                                               \
28     }                                                         \
29     t0 = tcg_temp_new_i64();                                  \
30     gen_set_access_type(ctx, ACCESS_INT);                     \
31     EA = tcg_temp_new();                                      \
32     gen_addr_reg_index(ctx, EA);                              \
33     gen_qemu_##operation(ctx, t0, EA);                        \
34     set_cpu_vsr(xT(ctx->opcode), t0, true);                   \
35     /* NOTE: cpu_vsrl is undefined */                         \
36     tcg_temp_free(EA);                                        \
37     tcg_temp_free_i64(t0);                                    \
40 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
41 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
42 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
43 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
44 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
45 VSX_LOAD_SCALAR(lxsspx, ld32fs)
47 static void gen_lxvd2x(DisasContext *ctx)
49     TCGv EA;
50     TCGv_i64 t0;
51     if (unlikely(!ctx->vsx_enabled)) {
52         gen_exception(ctx, POWERPC_EXCP_VSXU);
53         return;
54     }
55     t0 = tcg_temp_new_i64();
56     gen_set_access_type(ctx, ACCESS_INT);
57     EA = tcg_temp_new();
58     gen_addr_reg_index(ctx, EA);
59     gen_qemu_ld64_i64(ctx, t0, EA);
60     set_cpu_vsr(xT(ctx->opcode), t0, true);
61     tcg_gen_addi_tl(EA, EA, 8);
62     gen_qemu_ld64_i64(ctx, t0, EA);
63     set_cpu_vsr(xT(ctx->opcode), t0, false);
64     tcg_temp_free(EA);
65     tcg_temp_free_i64(t0);
68 static void gen_lxvw4x(DisasContext *ctx)
70     TCGv EA;
71     TCGv_i64 xth;
72     TCGv_i64 xtl;
73     if (unlikely(!ctx->vsx_enabled)) {
74         gen_exception(ctx, POWERPC_EXCP_VSXU);
75         return;
76     }
77     xth = tcg_temp_new_i64();
78     xtl = tcg_temp_new_i64();
80     gen_set_access_type(ctx, ACCESS_INT);
81     EA = tcg_temp_new();
83     gen_addr_reg_index(ctx, EA);
84     if (ctx->le_mode) {
85         TCGv_i64 t0 = tcg_temp_new_i64();
86         TCGv_i64 t1 = tcg_temp_new_i64();
88         tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
89         tcg_gen_shri_i64(t1, t0, 32);
90         tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
91         tcg_gen_addi_tl(EA, EA, 8);
92         tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
93         tcg_gen_shri_i64(t1, t0, 32);
94         tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
95         tcg_temp_free_i64(t0);
96         tcg_temp_free_i64(t1);
97     } else {
98         tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
99         tcg_gen_addi_tl(EA, EA, 8);
100         tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
101     }
102     set_cpu_vsr(xT(ctx->opcode), xth, true);
103     set_cpu_vsr(xT(ctx->opcode), xtl, false);
104     tcg_temp_free(EA);
105     tcg_temp_free_i64(xth);
106     tcg_temp_free_i64(xtl);
109 static void gen_lxvwsx(DisasContext *ctx)
111     TCGv EA;
112     TCGv_i32 data;
114     if (xT(ctx->opcode) < 32) {
115         if (unlikely(!ctx->vsx_enabled)) {
116             gen_exception(ctx, POWERPC_EXCP_VSXU);
117             return;
118         }
119     } else {
120         if (unlikely(!ctx->altivec_enabled)) {
121             gen_exception(ctx, POWERPC_EXCP_VPU);
122             return;
123         }
124     }
126     gen_set_access_type(ctx, ACCESS_INT);
127     EA = tcg_temp_new();
129     gen_addr_reg_index(ctx, EA);
131     data = tcg_temp_new_i32();
132     tcg_gen_qemu_ld_i32(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UL));
133     tcg_gen_gvec_dup_i32(MO_UL, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
135     tcg_temp_free(EA);
136     tcg_temp_free_i32(data);
139 static void gen_lxvdsx(DisasContext *ctx)
141     TCGv EA;
142     TCGv_i64 data;
144     if (unlikely(!ctx->vsx_enabled)) {
145         gen_exception(ctx, POWERPC_EXCP_VSXU);
146         return;
147     }
149     gen_set_access_type(ctx, ACCESS_INT);
150     EA = tcg_temp_new();
152     gen_addr_reg_index(ctx, EA);
154     data = tcg_temp_new_i64();
155     tcg_gen_qemu_ld_i64(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UQ));
156     tcg_gen_gvec_dup_i64(MO_UQ, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
158     tcg_temp_free(EA);
159     tcg_temp_free_i64(data);
162 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
163                           TCGv_i64 inh, TCGv_i64 inl)
165     TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
166     TCGv_i64 t0 = tcg_temp_new_i64();
167     TCGv_i64 t1 = tcg_temp_new_i64();
169     /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
170     tcg_gen_and_i64(t0, inh, mask);
171     tcg_gen_shli_i64(t0, t0, 8);
172     tcg_gen_shri_i64(t1, inh, 8);
173     tcg_gen_and_i64(t1, t1, mask);
174     tcg_gen_or_i64(outh, t0, t1);
176     /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
177     tcg_gen_and_i64(t0, inl, mask);
178     tcg_gen_shli_i64(t0, t0, 8);
179     tcg_gen_shri_i64(t1, inl, 8);
180     tcg_gen_and_i64(t1, t1, mask);
181     tcg_gen_or_i64(outl, t0, t1);
183     tcg_temp_free_i64(t0);
184     tcg_temp_free_i64(t1);
185     tcg_temp_free_i64(mask);
188 static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
189                           TCGv_i64 inh, TCGv_i64 inl)
191     TCGv_i64 hi = tcg_temp_new_i64();
192     TCGv_i64 lo = tcg_temp_new_i64();
194     tcg_gen_bswap64_i64(hi, inh);
195     tcg_gen_bswap64_i64(lo, inl);
196     tcg_gen_shri_i64(outh, hi, 32);
197     tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
198     tcg_gen_shri_i64(outl, lo, 32);
199     tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
201     tcg_temp_free_i64(hi);
202     tcg_temp_free_i64(lo);
204 static void gen_lxvh8x(DisasContext *ctx)
206     TCGv EA;
207     TCGv_i64 xth;
208     TCGv_i64 xtl;
210     if (unlikely(!ctx->vsx_enabled)) {
211         gen_exception(ctx, POWERPC_EXCP_VSXU);
212         return;
213     }
214     xth = tcg_temp_new_i64();
215     xtl = tcg_temp_new_i64();
216     gen_set_access_type(ctx, ACCESS_INT);
218     EA = tcg_temp_new();
219     gen_addr_reg_index(ctx, EA);
220     tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
221     tcg_gen_addi_tl(EA, EA, 8);
222     tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
223     if (ctx->le_mode) {
224         gen_bswap16x8(xth, xtl, xth, xtl);
225     }
226     set_cpu_vsr(xT(ctx->opcode), xth, true);
227     set_cpu_vsr(xT(ctx->opcode), xtl, false);
228     tcg_temp_free(EA);
229     tcg_temp_free_i64(xth);
230     tcg_temp_free_i64(xtl);
233 static void gen_lxvb16x(DisasContext *ctx)
235     TCGv EA;
236     TCGv_i64 xth;
237     TCGv_i64 xtl;
239     if (unlikely(!ctx->vsx_enabled)) {
240         gen_exception(ctx, POWERPC_EXCP_VSXU);
241         return;
242     }
243     xth = tcg_temp_new_i64();
244     xtl = tcg_temp_new_i64();
245     gen_set_access_type(ctx, ACCESS_INT);
246     EA = tcg_temp_new();
247     gen_addr_reg_index(ctx, EA);
248     tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
249     tcg_gen_addi_tl(EA, EA, 8);
250     tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
251     set_cpu_vsr(xT(ctx->opcode), xth, true);
252     set_cpu_vsr(xT(ctx->opcode), xtl, false);
253     tcg_temp_free(EA);
254     tcg_temp_free_i64(xth);
255     tcg_temp_free_i64(xtl);
258 #ifdef TARGET_PPC64
259 #define VSX_VECTOR_LOAD_STORE_LENGTH(name)                         \
260 static void gen_##name(DisasContext *ctx)                          \
261 {                                                                  \
262     TCGv EA;                                                       \
263     TCGv_ptr xt;                                                   \
264                                                                    \
265     if (xT(ctx->opcode) < 32) {                                    \
266         if (unlikely(!ctx->vsx_enabled)) {                         \
267             gen_exception(ctx, POWERPC_EXCP_VSXU);                 \
268             return;                                                \
269         }                                                          \
270     } else {                                                       \
271         if (unlikely(!ctx->altivec_enabled)) {                     \
272             gen_exception(ctx, POWERPC_EXCP_VPU);                  \
273             return;                                                \
274         }                                                          \
275     }                                                              \
276     EA = tcg_temp_new();                                           \
277     xt = gen_vsr_ptr(xT(ctx->opcode));                             \
278     gen_set_access_type(ctx, ACCESS_INT);                          \
279     gen_addr_register(ctx, EA);                                    \
280     gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]);  \
281     tcg_temp_free(EA);                                             \
282     tcg_temp_free_ptr(xt);                                         \
285 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
286 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
287 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
288 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
289 #endif
291 #define VSX_LOAD_SCALAR_DS(name, operation)                       \
292 static void gen_##name(DisasContext *ctx)                         \
293 {                                                                 \
294     TCGv EA;                                                      \
295     TCGv_i64 xth;                                                 \
296                                                                   \
297     if (unlikely(!ctx->altivec_enabled)) {                        \
298         gen_exception(ctx, POWERPC_EXCP_VPU);                     \
299         return;                                                   \
300     }                                                             \
301     xth = tcg_temp_new_i64();                                     \
302     gen_set_access_type(ctx, ACCESS_INT);                         \
303     EA = tcg_temp_new();                                          \
304     gen_addr_imm_index(ctx, EA, 0x03);                            \
305     gen_qemu_##operation(ctx, xth, EA);                           \
306     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);                 \
307     /* NOTE: cpu_vsrl is undefined */                             \
308     tcg_temp_free(EA);                                            \
309     tcg_temp_free_i64(xth);                                       \
312 VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
313 VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
315 #define VSX_STORE_SCALAR(name, operation)                     \
316 static void gen_##name(DisasContext *ctx)                     \
317 {                                                             \
318     TCGv EA;                                                  \
319     TCGv_i64 t0;                                              \
320     if (unlikely(!ctx->vsx_enabled)) {                        \
321         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
322         return;                                               \
323     }                                                         \
324     t0 = tcg_temp_new_i64();                                  \
325     gen_set_access_type(ctx, ACCESS_INT);                     \
326     EA = tcg_temp_new();                                      \
327     gen_addr_reg_index(ctx, EA);                              \
328     get_cpu_vsr(t0, xS(ctx->opcode), true);                   \
329     gen_qemu_##operation(ctx, t0, EA);                        \
330     tcg_temp_free(EA);                                        \
331     tcg_temp_free_i64(t0);                                    \
334 VSX_STORE_SCALAR(stxsdx, st64_i64)
336 VSX_STORE_SCALAR(stxsibx, st8_i64)
337 VSX_STORE_SCALAR(stxsihx, st16_i64)
338 VSX_STORE_SCALAR(stxsiwx, st32_i64)
339 VSX_STORE_SCALAR(stxsspx, st32fs)
341 static void gen_stxvd2x(DisasContext *ctx)
343     TCGv EA;
344     TCGv_i64 t0;
345     if (unlikely(!ctx->vsx_enabled)) {
346         gen_exception(ctx, POWERPC_EXCP_VSXU);
347         return;
348     }
349     t0 = tcg_temp_new_i64();
350     gen_set_access_type(ctx, ACCESS_INT);
351     EA = tcg_temp_new();
352     gen_addr_reg_index(ctx, EA);
353     get_cpu_vsr(t0, xS(ctx->opcode), true);
354     gen_qemu_st64_i64(ctx, t0, EA);
355     tcg_gen_addi_tl(EA, EA, 8);
356     get_cpu_vsr(t0, xS(ctx->opcode), false);
357     gen_qemu_st64_i64(ctx, t0, EA);
358     tcg_temp_free(EA);
359     tcg_temp_free_i64(t0);
362 static void gen_stxvw4x(DisasContext *ctx)
364     TCGv EA;
365     TCGv_i64 xsh;
366     TCGv_i64 xsl;
368     if (unlikely(!ctx->vsx_enabled)) {
369         gen_exception(ctx, POWERPC_EXCP_VSXU);
370         return;
371     }
372     xsh = tcg_temp_new_i64();
373     xsl = tcg_temp_new_i64();
374     get_cpu_vsr(xsh, xS(ctx->opcode), true);
375     get_cpu_vsr(xsl, xS(ctx->opcode), false);
376     gen_set_access_type(ctx, ACCESS_INT);
377     EA = tcg_temp_new();
378     gen_addr_reg_index(ctx, EA);
379     if (ctx->le_mode) {
380         TCGv_i64 t0 = tcg_temp_new_i64();
381         TCGv_i64 t1 = tcg_temp_new_i64();
383         tcg_gen_shri_i64(t0, xsh, 32);
384         tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
385         tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
386         tcg_gen_addi_tl(EA, EA, 8);
387         tcg_gen_shri_i64(t0, xsl, 32);
388         tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
389         tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
390         tcg_temp_free_i64(t0);
391         tcg_temp_free_i64(t1);
392     } else {
393         tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
394         tcg_gen_addi_tl(EA, EA, 8);
395         tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
396     }
397     tcg_temp_free(EA);
398     tcg_temp_free_i64(xsh);
399     tcg_temp_free_i64(xsl);
402 static void gen_stxvh8x(DisasContext *ctx)
404     TCGv EA;
405     TCGv_i64 xsh;
406     TCGv_i64 xsl;
408     if (unlikely(!ctx->vsx_enabled)) {
409         gen_exception(ctx, POWERPC_EXCP_VSXU);
410         return;
411     }
412     xsh = tcg_temp_new_i64();
413     xsl = tcg_temp_new_i64();
414     get_cpu_vsr(xsh, xS(ctx->opcode), true);
415     get_cpu_vsr(xsl, xS(ctx->opcode), false);
416     gen_set_access_type(ctx, ACCESS_INT);
417     EA = tcg_temp_new();
418     gen_addr_reg_index(ctx, EA);
419     if (ctx->le_mode) {
420         TCGv_i64 outh = tcg_temp_new_i64();
421         TCGv_i64 outl = tcg_temp_new_i64();
423         gen_bswap16x8(outh, outl, xsh, xsl);
424         tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEUQ);
425         tcg_gen_addi_tl(EA, EA, 8);
426         tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEUQ);
427         tcg_temp_free_i64(outh);
428         tcg_temp_free_i64(outl);
429     } else {
430         tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
431         tcg_gen_addi_tl(EA, EA, 8);
432         tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
433     }
434     tcg_temp_free(EA);
435     tcg_temp_free_i64(xsh);
436     tcg_temp_free_i64(xsl);
439 static void gen_stxvb16x(DisasContext *ctx)
441     TCGv EA;
442     TCGv_i64 xsh;
443     TCGv_i64 xsl;
445     if (unlikely(!ctx->vsx_enabled)) {
446         gen_exception(ctx, POWERPC_EXCP_VSXU);
447         return;
448     }
449     xsh = tcg_temp_new_i64();
450     xsl = tcg_temp_new_i64();
451     get_cpu_vsr(xsh, xS(ctx->opcode), true);
452     get_cpu_vsr(xsl, xS(ctx->opcode), false);
453     gen_set_access_type(ctx, ACCESS_INT);
454     EA = tcg_temp_new();
455     gen_addr_reg_index(ctx, EA);
456     tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
457     tcg_gen_addi_tl(EA, EA, 8);
458     tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
459     tcg_temp_free(EA);
460     tcg_temp_free_i64(xsh);
461     tcg_temp_free_i64(xsl);
464 #define VSX_STORE_SCALAR_DS(name, operation)                      \
465 static void gen_##name(DisasContext *ctx)                         \
466 {                                                                 \
467     TCGv EA;                                                      \
468     TCGv_i64 xth;                                                 \
469                                                                   \
470     if (unlikely(!ctx->altivec_enabled)) {                        \
471         gen_exception(ctx, POWERPC_EXCP_VPU);                     \
472         return;                                                   \
473     }                                                             \
474     xth = tcg_temp_new_i64();                                     \
475     get_cpu_vsr(xth, rD(ctx->opcode) + 32, true);                 \
476     gen_set_access_type(ctx, ACCESS_INT);                         \
477     EA = tcg_temp_new();                                          \
478     gen_addr_imm_index(ctx, EA, 0x03);                            \
479     gen_qemu_##operation(ctx, xth, EA);                           \
480     /* NOTE: cpu_vsrl is undefined */                             \
481     tcg_temp_free(EA);                                            \
482     tcg_temp_free_i64(xth);                                       \
485 VSX_STORE_SCALAR_DS(stxsd, st64_i64)
486 VSX_STORE_SCALAR_DS(stxssp, st32fs)
488 static void gen_mfvsrwz(DisasContext *ctx)
490     if (xS(ctx->opcode) < 32) {
491         if (unlikely(!ctx->fpu_enabled)) {
492             gen_exception(ctx, POWERPC_EXCP_FPU);
493             return;
494         }
495     } else {
496         if (unlikely(!ctx->altivec_enabled)) {
497             gen_exception(ctx, POWERPC_EXCP_VPU);
498             return;
499         }
500     }
501     TCGv_i64 tmp = tcg_temp_new_i64();
502     TCGv_i64 xsh = tcg_temp_new_i64();
503     get_cpu_vsr(xsh, xS(ctx->opcode), true);
504     tcg_gen_ext32u_i64(tmp, xsh);
505     tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
506     tcg_temp_free_i64(tmp);
507     tcg_temp_free_i64(xsh);
510 static void gen_mtvsrwa(DisasContext *ctx)
512     if (xS(ctx->opcode) < 32) {
513         if (unlikely(!ctx->fpu_enabled)) {
514             gen_exception(ctx, POWERPC_EXCP_FPU);
515             return;
516         }
517     } else {
518         if (unlikely(!ctx->altivec_enabled)) {
519             gen_exception(ctx, POWERPC_EXCP_VPU);
520             return;
521         }
522     }
523     TCGv_i64 tmp = tcg_temp_new_i64();
524     TCGv_i64 xsh = tcg_temp_new_i64();
525     tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
526     tcg_gen_ext32s_i64(xsh, tmp);
527     set_cpu_vsr(xT(ctx->opcode), xsh, true);
528     tcg_temp_free_i64(tmp);
529     tcg_temp_free_i64(xsh);
532 static void gen_mtvsrwz(DisasContext *ctx)
534     if (xS(ctx->opcode) < 32) {
535         if (unlikely(!ctx->fpu_enabled)) {
536             gen_exception(ctx, POWERPC_EXCP_FPU);
537             return;
538         }
539     } else {
540         if (unlikely(!ctx->altivec_enabled)) {
541             gen_exception(ctx, POWERPC_EXCP_VPU);
542             return;
543         }
544     }
545     TCGv_i64 tmp = tcg_temp_new_i64();
546     TCGv_i64 xsh = tcg_temp_new_i64();
547     tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
548     tcg_gen_ext32u_i64(xsh, tmp);
549     set_cpu_vsr(xT(ctx->opcode), xsh, true);
550     tcg_temp_free_i64(tmp);
551     tcg_temp_free_i64(xsh);
554 #if defined(TARGET_PPC64)
555 static void gen_mfvsrd(DisasContext *ctx)
557     TCGv_i64 t0;
558     if (xS(ctx->opcode) < 32) {
559         if (unlikely(!ctx->fpu_enabled)) {
560             gen_exception(ctx, POWERPC_EXCP_FPU);
561             return;
562         }
563     } else {
564         if (unlikely(!ctx->altivec_enabled)) {
565             gen_exception(ctx, POWERPC_EXCP_VPU);
566             return;
567         }
568     }
569     t0 = tcg_temp_new_i64();
570     get_cpu_vsr(t0, xS(ctx->opcode), true);
571     tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
572     tcg_temp_free_i64(t0);
575 static void gen_mtvsrd(DisasContext *ctx)
577     TCGv_i64 t0;
578     if (xS(ctx->opcode) < 32) {
579         if (unlikely(!ctx->fpu_enabled)) {
580             gen_exception(ctx, POWERPC_EXCP_FPU);
581             return;
582         }
583     } else {
584         if (unlikely(!ctx->altivec_enabled)) {
585             gen_exception(ctx, POWERPC_EXCP_VPU);
586             return;
587         }
588     }
589     t0 = tcg_temp_new_i64();
590     tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
591     set_cpu_vsr(xT(ctx->opcode), t0, true);
592     tcg_temp_free_i64(t0);
595 static void gen_mfvsrld(DisasContext *ctx)
597     TCGv_i64 t0;
598     if (xS(ctx->opcode) < 32) {
599         if (unlikely(!ctx->vsx_enabled)) {
600             gen_exception(ctx, POWERPC_EXCP_VSXU);
601             return;
602         }
603     } else {
604         if (unlikely(!ctx->altivec_enabled)) {
605             gen_exception(ctx, POWERPC_EXCP_VPU);
606             return;
607         }
608     }
609     t0 = tcg_temp_new_i64();
610     get_cpu_vsr(t0, xS(ctx->opcode), false);
611     tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
612     tcg_temp_free_i64(t0);
615 static void gen_mtvsrdd(DisasContext *ctx)
617     TCGv_i64 t0;
618     if (xT(ctx->opcode) < 32) {
619         if (unlikely(!ctx->vsx_enabled)) {
620             gen_exception(ctx, POWERPC_EXCP_VSXU);
621             return;
622         }
623     } else {
624         if (unlikely(!ctx->altivec_enabled)) {
625             gen_exception(ctx, POWERPC_EXCP_VPU);
626             return;
627         }
628     }
630     t0 = tcg_temp_new_i64();
631     if (!rA(ctx->opcode)) {
632         tcg_gen_movi_i64(t0, 0);
633     } else {
634         tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
635     }
636     set_cpu_vsr(xT(ctx->opcode), t0, true);
638     tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
639     set_cpu_vsr(xT(ctx->opcode), t0, false);
640     tcg_temp_free_i64(t0);
643 static void gen_mtvsrws(DisasContext *ctx)
645     TCGv_i64 t0;
646     if (xT(ctx->opcode) < 32) {
647         if (unlikely(!ctx->vsx_enabled)) {
648             gen_exception(ctx, POWERPC_EXCP_VSXU);
649             return;
650         }
651     } else {
652         if (unlikely(!ctx->altivec_enabled)) {
653             gen_exception(ctx, POWERPC_EXCP_VPU);
654             return;
655         }
656     }
658     t0 = tcg_temp_new_i64();
659     tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
660                         cpu_gpr[rA(ctx->opcode)], 32, 32);
661     set_cpu_vsr(xT(ctx->opcode), t0, false);
662     set_cpu_vsr(xT(ctx->opcode), t0, true);
663     tcg_temp_free_i64(t0);
666 #endif
668 static void gen_xxpermdi(DisasContext *ctx)
670     TCGv_i64 xh, xl;
672     if (unlikely(!ctx->vsx_enabled)) {
673         gen_exception(ctx, POWERPC_EXCP_VSXU);
674         return;
675     }
677     xh = tcg_temp_new_i64();
678     xl = tcg_temp_new_i64();
680     if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
681                  (xT(ctx->opcode) == xB(ctx->opcode)))) {
682         get_cpu_vsr(xh, xA(ctx->opcode), (DM(ctx->opcode) & 2) == 0);
683         get_cpu_vsr(xl, xB(ctx->opcode), (DM(ctx->opcode) & 1) == 0);
685         set_cpu_vsr(xT(ctx->opcode), xh, true);
686         set_cpu_vsr(xT(ctx->opcode), xl, false);
687     } else {
688         if ((DM(ctx->opcode) & 2) == 0) {
689             get_cpu_vsr(xh, xA(ctx->opcode), true);
690             set_cpu_vsr(xT(ctx->opcode), xh, true);
691         } else {
692             get_cpu_vsr(xh, xA(ctx->opcode), false);
693             set_cpu_vsr(xT(ctx->opcode), xh, true);
694         }
695         if ((DM(ctx->opcode) & 1) == 0) {
696             get_cpu_vsr(xl, xB(ctx->opcode), true);
697             set_cpu_vsr(xT(ctx->opcode), xl, false);
698         } else {
699             get_cpu_vsr(xl, xB(ctx->opcode), false);
700             set_cpu_vsr(xT(ctx->opcode), xl, false);
701         }
702     }
703     tcg_temp_free_i64(xh);
704     tcg_temp_free_i64(xl);
707 #define OP_ABS 1
708 #define OP_NABS 2
709 #define OP_NEG 3
710 #define OP_CPSGN 4
711 #define SGN_MASK_DP  0x8000000000000000ull
712 #define SGN_MASK_SP 0x8000000080000000ull
714 #define VSX_SCALAR_MOVE(name, op, sgn_mask)                       \
715 static void glue(gen_, name)(DisasContext *ctx)                   \
716     {                                                             \
717         TCGv_i64 xb, sgm;                                         \
718         if (unlikely(!ctx->vsx_enabled)) {                        \
719             gen_exception(ctx, POWERPC_EXCP_VSXU);                \
720             return;                                               \
721         }                                                         \
722         xb = tcg_temp_new_i64();                                  \
723         sgm = tcg_temp_new_i64();                                 \
724         get_cpu_vsr(xb, xB(ctx->opcode), true);                   \
725         tcg_gen_movi_i64(sgm, sgn_mask);                          \
726         switch (op) {                                             \
727             case OP_ABS: {                                        \
728                 tcg_gen_andc_i64(xb, xb, sgm);                    \
729                 break;                                            \
730             }                                                     \
731             case OP_NABS: {                                       \
732                 tcg_gen_or_i64(xb, xb, sgm);                      \
733                 break;                                            \
734             }                                                     \
735             case OP_NEG: {                                        \
736                 tcg_gen_xor_i64(xb, xb, sgm);                     \
737                 break;                                            \
738             }                                                     \
739             case OP_CPSGN: {                                      \
740                 TCGv_i64 xa = tcg_temp_new_i64();                 \
741                 get_cpu_vsr(xa, xA(ctx->opcode), true);           \
742                 tcg_gen_and_i64(xa, xa, sgm);                     \
743                 tcg_gen_andc_i64(xb, xb, sgm);                    \
744                 tcg_gen_or_i64(xb, xb, xa);                       \
745                 tcg_temp_free_i64(xa);                            \
746                 break;                                            \
747             }                                                     \
748         }                                                         \
749         set_cpu_vsr(xT(ctx->opcode), xb, true);                   \
750         set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
751         tcg_temp_free_i64(xb);                                    \
752         tcg_temp_free_i64(sgm);                                   \
753     }
755 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
756 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
757 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
758 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
760 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask)                    \
761 static void glue(gen_, name)(DisasContext *ctx)                   \
762 {                                                                 \
763     int xa;                                                       \
764     int xt = rD(ctx->opcode) + 32;                                \
765     int xb = rB(ctx->opcode) + 32;                                \
766     TCGv_i64 xah, xbh, xbl, sgm, tmp;                             \
767                                                                   \
768     if (unlikely(!ctx->vsx_enabled)) {                            \
769         gen_exception(ctx, POWERPC_EXCP_VSXU);                    \
770         return;                                                   \
771     }                                                             \
772     xbh = tcg_temp_new_i64();                                     \
773     xbl = tcg_temp_new_i64();                                     \
774     sgm = tcg_temp_new_i64();                                     \
775     tmp = tcg_temp_new_i64();                                     \
776     get_cpu_vsr(xbh, xb, true);                                   \
777     get_cpu_vsr(xbl, xb, false);                                  \
778     tcg_gen_movi_i64(sgm, sgn_mask);                              \
779     switch (op) {                                                 \
780     case OP_ABS:                                                  \
781         tcg_gen_andc_i64(xbh, xbh, sgm);                          \
782         break;                                                    \
783     case OP_NABS:                                                 \
784         tcg_gen_or_i64(xbh, xbh, sgm);                            \
785         break;                                                    \
786     case OP_NEG:                                                  \
787         tcg_gen_xor_i64(xbh, xbh, sgm);                           \
788         break;                                                    \
789     case OP_CPSGN:                                                \
790         xah = tcg_temp_new_i64();                                 \
791         xa = rA(ctx->opcode) + 32;                                \
792         get_cpu_vsr(tmp, xa, true);                               \
793         tcg_gen_and_i64(xah, tmp, sgm);                           \
794         tcg_gen_andc_i64(xbh, xbh, sgm);                          \
795         tcg_gen_or_i64(xbh, xbh, xah);                            \
796         tcg_temp_free_i64(xah);                                   \
797         break;                                                    \
798     }                                                             \
799     set_cpu_vsr(xt, xbh, true);                                   \
800     set_cpu_vsr(xt, xbl, false);                                  \
801     tcg_temp_free_i64(xbl);                                       \
802     tcg_temp_free_i64(xbh);                                       \
803     tcg_temp_free_i64(sgm);                                       \
804     tcg_temp_free_i64(tmp);                                       \
807 VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
808 VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
809 VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
810 VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
812 #define VSX_VECTOR_MOVE(name, op, sgn_mask)                      \
813 static void glue(gen_, name)(DisasContext *ctx)                  \
814     {                                                            \
815         TCGv_i64 xbh, xbl, sgm;                                  \
816         if (unlikely(!ctx->vsx_enabled)) {                       \
817             gen_exception(ctx, POWERPC_EXCP_VSXU);               \
818             return;                                              \
819         }                                                        \
820         xbh = tcg_temp_new_i64();                                \
821         xbl = tcg_temp_new_i64();                                \
822         sgm = tcg_temp_new_i64();                                \
823         get_cpu_vsr(xbh, xB(ctx->opcode), true);                 \
824         get_cpu_vsr(xbl, xB(ctx->opcode), false);                \
825         tcg_gen_movi_i64(sgm, sgn_mask);                         \
826         switch (op) {                                            \
827             case OP_ABS: {                                       \
828                 tcg_gen_andc_i64(xbh, xbh, sgm);                 \
829                 tcg_gen_andc_i64(xbl, xbl, sgm);                 \
830                 break;                                           \
831             }                                                    \
832             case OP_NABS: {                                      \
833                 tcg_gen_or_i64(xbh, xbh, sgm);                   \
834                 tcg_gen_or_i64(xbl, xbl, sgm);                   \
835                 break;                                           \
836             }                                                    \
837             case OP_NEG: {                                       \
838                 tcg_gen_xor_i64(xbh, xbh, sgm);                  \
839                 tcg_gen_xor_i64(xbl, xbl, sgm);                  \
840                 break;                                           \
841             }                                                    \
842             case OP_CPSGN: {                                     \
843                 TCGv_i64 xah = tcg_temp_new_i64();               \
844                 TCGv_i64 xal = tcg_temp_new_i64();               \
845                 get_cpu_vsr(xah, xA(ctx->opcode), true);         \
846                 get_cpu_vsr(xal, xA(ctx->opcode), false);        \
847                 tcg_gen_and_i64(xah, xah, sgm);                  \
848                 tcg_gen_and_i64(xal, xal, sgm);                  \
849                 tcg_gen_andc_i64(xbh, xbh, sgm);                 \
850                 tcg_gen_andc_i64(xbl, xbl, sgm);                 \
851                 tcg_gen_or_i64(xbh, xbh, xah);                   \
852                 tcg_gen_or_i64(xbl, xbl, xal);                   \
853                 tcg_temp_free_i64(xah);                          \
854                 tcg_temp_free_i64(xal);                          \
855                 break;                                           \
856             }                                                    \
857         }                                                        \
858         set_cpu_vsr(xT(ctx->opcode), xbh, true);                 \
859         set_cpu_vsr(xT(ctx->opcode), xbl, false);                \
860         tcg_temp_free_i64(xbh);                                  \
861         tcg_temp_free_i64(xbl);                                  \
862         tcg_temp_free_i64(sgm);                                  \
863     }
865 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
866 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
867 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
868 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
869 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
870 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
871 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
872 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
874 #define VSX_CMP(name, op1, op2, inval, type)                                  \
875 static void gen_##name(DisasContext *ctx)                                     \
876 {                                                                             \
877     TCGv_i32 ignored;                                                         \
878     TCGv_ptr xt, xa, xb;                                                      \
879     if (unlikely(!ctx->vsx_enabled)) {                                        \
880         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
881         return;                                                               \
882     }                                                                         \
883     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
884     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
885     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
886     if ((ctx->opcode >> (31 - 21)) & 1) {                                     \
887         gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb);                   \
888     } else {                                                                  \
889         ignored = tcg_temp_new_i32();                                         \
890         gen_helper_##name(ignored, cpu_env, xt, xa, xb);                      \
891         tcg_temp_free_i32(ignored);                                           \
892     }                                                                         \
893     gen_helper_float_check_status(cpu_env);                                   \
894     tcg_temp_free_ptr(xt);                                                    \
895     tcg_temp_free_ptr(xa);                                                    \
896     tcg_temp_free_ptr(xb);                                                    \
899 VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
900 VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
901 VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
902 VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
903 VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
904 VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
905 VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
906 VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
908 static bool trans_XSCVQPDP(DisasContext *ctx, arg_X_tb_rc *a)
910     TCGv_i32 ro;
911     TCGv_ptr xt, xb;
913     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
914     REQUIRE_VSX(ctx);
916     ro = tcg_const_i32(a->rc);
918     xt = gen_avr_ptr(a->rt);
919     xb = gen_avr_ptr(a->rb);
920     gen_helper_XSCVQPDP(cpu_env, ro, xt, xb);
921     tcg_temp_free_i32(ro);
922     tcg_temp_free_ptr(xt);
923     tcg_temp_free_ptr(xb);
925     return true;
928 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type)                         \
929 static void gen_##name(DisasContext *ctx)                                     \
930 {                                                                             \
931     TCGv_i32 opc;                                                             \
932     if (unlikely(!ctx->vsx_enabled)) {                                        \
933         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
934         return;                                                               \
935     }                                                                         \
936     opc = tcg_const_i32(ctx->opcode);                                         \
937     gen_helper_##name(cpu_env, opc);                                          \
938     tcg_temp_free_i32(opc);                                                   \
941 #define GEN_VSX_HELPER_X3(name, op1, op2, inval, type)                        \
942 static void gen_##name(DisasContext *ctx)                                     \
943 {                                                                             \
944     TCGv_ptr xt, xa, xb;                                                      \
945     if (unlikely(!ctx->vsx_enabled)) {                                        \
946         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
947         return;                                                               \
948     }                                                                         \
949     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
950     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
951     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
952     gen_helper_##name(cpu_env, xt, xa, xb);                                   \
953     tcg_temp_free_ptr(xt);                                                    \
954     tcg_temp_free_ptr(xa);                                                    \
955     tcg_temp_free_ptr(xb);                                                    \
958 #define GEN_VSX_HELPER_X2(name, op1, op2, inval, type)                        \
959 static void gen_##name(DisasContext *ctx)                                     \
960 {                                                                             \
961     TCGv_ptr xt, xb;                                                          \
962     if (unlikely(!ctx->vsx_enabled)) {                                        \
963         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
964         return;                                                               \
965     }                                                                         \
966     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
967     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
968     gen_helper_##name(cpu_env, xt, xb);                                       \
969     tcg_temp_free_ptr(xt);                                                    \
970     tcg_temp_free_ptr(xb);                                                    \
973 #define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type)                     \
974 static void gen_##name(DisasContext *ctx)                                     \
975 {                                                                             \
976     TCGv_i32 opc;                                                             \
977     TCGv_ptr xa, xb;                                                          \
978     if (unlikely(!ctx->vsx_enabled)) {                                        \
979         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
980         return;                                                               \
981     }                                                                         \
982     opc = tcg_const_i32(ctx->opcode);                                         \
983     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
984     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
985     gen_helper_##name(cpu_env, opc, xa, xb);                                  \
986     tcg_temp_free_i32(opc);                                                   \
987     tcg_temp_free_ptr(xa);                                                    \
988     tcg_temp_free_ptr(xb);                                                    \
991 #define GEN_VSX_HELPER_X1(name, op1, op2, inval, type)                        \
992 static void gen_##name(DisasContext *ctx)                                     \
993 {                                                                             \
994     TCGv_i32 opc;                                                             \
995     TCGv_ptr xb;                                                              \
996     if (unlikely(!ctx->vsx_enabled)) {                                        \
997         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
998         return;                                                               \
999     }                                                                         \
1000     opc = tcg_const_i32(ctx->opcode);                                         \
1001     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
1002     gen_helper_##name(cpu_env, opc, xb);                                      \
1003     tcg_temp_free_i32(opc);                                                   \
1004     tcg_temp_free_ptr(xb);                                                    \
1007 #define GEN_VSX_HELPER_R3(name, op1, op2, inval, type)                        \
1008 static void gen_##name(DisasContext *ctx)                                     \
1009 {                                                                             \
1010     TCGv_i32 opc;                                                             \
1011     TCGv_ptr xt, xa, xb;                                                      \
1012     if (unlikely(!ctx->vsx_enabled)) {                                        \
1013         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1014         return;                                                               \
1015     }                                                                         \
1016     opc = tcg_const_i32(ctx->opcode);                                         \
1017     xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
1018     xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
1019     xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1020     gen_helper_##name(cpu_env, opc, xt, xa, xb);                              \
1021     tcg_temp_free_i32(opc);                                                   \
1022     tcg_temp_free_ptr(xt);                                                    \
1023     tcg_temp_free_ptr(xa);                                                    \
1024     tcg_temp_free_ptr(xb);                                                    \
1027 #define GEN_VSX_HELPER_R2(name, op1, op2, inval, type)                        \
1028 static void gen_##name(DisasContext *ctx)                                     \
1029 {                                                                             \
1030     TCGv_i32 opc;                                                             \
1031     TCGv_ptr xt, xb;                                                          \
1032     if (unlikely(!ctx->vsx_enabled)) {                                        \
1033         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1034         return;                                                               \
1035     }                                                                         \
1036     opc = tcg_const_i32(ctx->opcode);                                         \
1037     xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
1038     xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1039     gen_helper_##name(cpu_env, opc, xt, xb);                                  \
1040     tcg_temp_free_i32(opc);                                                   \
1041     tcg_temp_free_ptr(xt);                                                    \
1042     tcg_temp_free_ptr(xb);                                                    \
1045 #define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type)                     \
1046 static void gen_##name(DisasContext *ctx)                                     \
1047 {                                                                             \
1048     TCGv_i32 opc;                                                             \
1049     TCGv_ptr xa, xb;                                                          \
1050     if (unlikely(!ctx->vsx_enabled)) {                                        \
1051         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1052         return;                                                               \
1053     }                                                                         \
1054     opc = tcg_const_i32(ctx->opcode);                                         \
1055     xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
1056     xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1057     gen_helper_##name(cpu_env, opc, xa, xb);                                  \
1058     tcg_temp_free_i32(opc);                                                   \
1059     tcg_temp_free_ptr(xa);                                                    \
1060     tcg_temp_free_ptr(xb);                                                    \
1063 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
1064 static void gen_##name(DisasContext *ctx)                     \
1065 {                                                             \
1066     TCGv_i64 t0;                                              \
1067     TCGv_i64 t1;                                              \
1068     if (unlikely(!ctx->vsx_enabled)) {                        \
1069         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
1070         return;                                               \
1071     }                                                         \
1072     t0 = tcg_temp_new_i64();                                  \
1073     t1 = tcg_temp_new_i64();                                  \
1074     get_cpu_vsr(t0, xB(ctx->opcode), true);                   \
1075     gen_helper_##name(t1, cpu_env, t0);                       \
1076     set_cpu_vsr(xT(ctx->opcode), t1, true);                   \
1077     set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
1078     tcg_temp_free_i64(t0);                                    \
1079     tcg_temp_free_i64(t1);                                    \
1082 GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
1083 GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
1084 GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
1085 GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
1086 GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
1087 GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
1088 GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
1089 GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
1090 GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
1091 GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
1092 GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
1093 GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
1094 GEN_VSX_HELPER_X3(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
1095 GEN_VSX_HELPER_X3(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
1096 GEN_VSX_HELPER_X3(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
1097 GEN_VSX_HELPER_X3(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
1098 GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
1099 GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1100 GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1101 GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1102 GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1103 GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1104 GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1105 GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1106 GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1107 GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1108 GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1109 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1110 GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1111 GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1112 GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1113 GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1114 GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1115 GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1116 GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1117 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
1118 GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1119 GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1120 GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1121 GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1122 GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1123 GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1124 GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1125 GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1126 GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1127 GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1128 GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1129 GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1130 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1131 GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1132 GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1133 GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1134 GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1135 GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1136 GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1137 GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1138 GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1139 GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1140 GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1141 GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1142 GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1143 GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1144 GEN_VSX_HELPER_X1(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1145 GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1146 GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1148 GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1149 GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1150 GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1151 GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1152 GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1153 GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1154 GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1155 GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1156 GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1157 GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1158 GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1159 GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1160 GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1161 GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1162 GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1163 GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1164 GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1165 GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1166 GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1167 GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1168 GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1169 GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1170 GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1171 GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1172 GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1174 GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1175 GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1176 GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1177 GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1178 GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1179 GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1180 GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1181 GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1182 GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1183 GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1184 GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1185 GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1186 GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1187 GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1188 GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1189 GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1190 GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1191 GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1192 GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1193 GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1194 GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1195 GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1196 GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1197 GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1198 GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1199 GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1200 GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1201 GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1202 GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1204 static bool trans_XXPERM(DisasContext *ctx, arg_XX3 *a)
1206     TCGv_ptr xt, xa, xb;
1208     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1209     REQUIRE_VSX(ctx);
1211     xt = gen_vsr_ptr(a->xt);
1212     xa = gen_vsr_ptr(a->xa);
1213     xb = gen_vsr_ptr(a->xb);
1215     gen_helper_VPERM(xt, xa, xt, xb);
1217     tcg_temp_free_ptr(xt);
1218     tcg_temp_free_ptr(xa);
1219     tcg_temp_free_ptr(xb);
1221     return true;
1224 static bool trans_XXPERMR(DisasContext *ctx, arg_XX3 *a)
1226     TCGv_ptr xt, xa, xb;
1228     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1229     REQUIRE_VSX(ctx);
1231     xt = gen_vsr_ptr(a->xt);
1232     xa = gen_vsr_ptr(a->xa);
1233     xb = gen_vsr_ptr(a->xb);
1235     gen_helper_VPERMR(xt, xa, xt, xb);
1237     tcg_temp_free_ptr(xt);
1238     tcg_temp_free_ptr(xa);
1239     tcg_temp_free_ptr(xb);
1241     return true;
1244 #define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type)             \
1245 static void gen_##name(DisasContext *ctx)                                     \
1246 {                                                                             \
1247     TCGv_ptr xt, xa, b, c;                                                    \
1248     if (unlikely(!ctx->vsx_enabled)) {                                        \
1249         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1250         return;                                                               \
1251     }                                                                         \
1252     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
1253     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
1254     if (ctx->opcode & PPC_BIT32(25)) {                                        \
1255         /*                                                                    \
1256          * AxT + B                                                            \
1257          */                                                                   \
1258         b = gen_vsr_ptr(xT(ctx->opcode));                                     \
1259         c = gen_vsr_ptr(xB(ctx->opcode));                                     \
1260     } else {                                                                  \
1261         /*                                                                    \
1262          * AxB + T                                                            \
1263          */                                                                   \
1264         b = gen_vsr_ptr(xB(ctx->opcode));                                     \
1265         c = gen_vsr_ptr(xT(ctx->opcode));                                     \
1266     }                                                                         \
1267     gen_helper_##name(cpu_env, xt, xa, b, c);                                 \
1268     tcg_temp_free_ptr(xt);                                                    \
1269     tcg_temp_free_ptr(xa);                                                    \
1270     tcg_temp_free_ptr(b);                                                     \
1271     tcg_temp_free_ptr(c);                                                     \
1274 GEN_VSX_HELPER_VSX_MADD(xsmadddp, 0x04, 0x04, 0x05, 0, PPC2_VSX)
1275 GEN_VSX_HELPER_VSX_MADD(xsmsubdp, 0x04, 0x06, 0x07, 0, PPC2_VSX)
1276 GEN_VSX_HELPER_VSX_MADD(xsnmadddp, 0x04, 0x14, 0x15, 0, PPC2_VSX)
1277 GEN_VSX_HELPER_VSX_MADD(xsnmsubdp, 0x04, 0x16, 0x17, 0, PPC2_VSX)
1278 GEN_VSX_HELPER_VSX_MADD(xsmaddsp, 0x04, 0x00, 0x01, 0, PPC2_VSX207)
1279 GEN_VSX_HELPER_VSX_MADD(xsmsubsp, 0x04, 0x02, 0x03, 0, PPC2_VSX207)
1280 GEN_VSX_HELPER_VSX_MADD(xsnmaddsp, 0x04, 0x10, 0x11, 0, PPC2_VSX207)
1281 GEN_VSX_HELPER_VSX_MADD(xsnmsubsp, 0x04, 0x12, 0x13, 0, PPC2_VSX207)
1282 GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX)
1283 GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX)
1284 GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX)
1285 GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX)
1286 GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX)
1287 GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX)
1288 GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX)
1289 GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX)
1291 static void gen_xxbrd(DisasContext *ctx)
1293     TCGv_i64 xth;
1294     TCGv_i64 xtl;
1295     TCGv_i64 xbh;
1296     TCGv_i64 xbl;
1298     if (unlikely(!ctx->vsx_enabled)) {
1299         gen_exception(ctx, POWERPC_EXCP_VSXU);
1300         return;
1301     }
1302     xth = tcg_temp_new_i64();
1303     xtl = tcg_temp_new_i64();
1304     xbh = tcg_temp_new_i64();
1305     xbl = tcg_temp_new_i64();
1306     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1307     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1309     tcg_gen_bswap64_i64(xth, xbh);
1310     tcg_gen_bswap64_i64(xtl, xbl);
1311     set_cpu_vsr(xT(ctx->opcode), xth, true);
1312     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1314     tcg_temp_free_i64(xth);
1315     tcg_temp_free_i64(xtl);
1316     tcg_temp_free_i64(xbh);
1317     tcg_temp_free_i64(xbl);
1320 static void gen_xxbrh(DisasContext *ctx)
1322     TCGv_i64 xth;
1323     TCGv_i64 xtl;
1324     TCGv_i64 xbh;
1325     TCGv_i64 xbl;
1327     if (unlikely(!ctx->vsx_enabled)) {
1328         gen_exception(ctx, POWERPC_EXCP_VSXU);
1329         return;
1330     }
1331     xth = tcg_temp_new_i64();
1332     xtl = tcg_temp_new_i64();
1333     xbh = tcg_temp_new_i64();
1334     xbl = tcg_temp_new_i64();
1335     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1336     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1338     gen_bswap16x8(xth, xtl, xbh, xbl);
1339     set_cpu_vsr(xT(ctx->opcode), xth, true);
1340     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1342     tcg_temp_free_i64(xth);
1343     tcg_temp_free_i64(xtl);
1344     tcg_temp_free_i64(xbh);
1345     tcg_temp_free_i64(xbl);
1348 static void gen_xxbrq(DisasContext *ctx)
1350     TCGv_i64 xth;
1351     TCGv_i64 xtl;
1352     TCGv_i64 xbh;
1353     TCGv_i64 xbl;
1354     TCGv_i64 t0;
1356     if (unlikely(!ctx->vsx_enabled)) {
1357         gen_exception(ctx, POWERPC_EXCP_VSXU);
1358         return;
1359     }
1360     xth = tcg_temp_new_i64();
1361     xtl = tcg_temp_new_i64();
1362     xbh = tcg_temp_new_i64();
1363     xbl = tcg_temp_new_i64();
1364     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1365     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1366     t0 = tcg_temp_new_i64();
1368     tcg_gen_bswap64_i64(t0, xbl);
1369     tcg_gen_bswap64_i64(xtl, xbh);
1370     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1371     tcg_gen_mov_i64(xth, t0);
1372     set_cpu_vsr(xT(ctx->opcode), xth, true);
1374     tcg_temp_free_i64(t0);
1375     tcg_temp_free_i64(xth);
1376     tcg_temp_free_i64(xtl);
1377     tcg_temp_free_i64(xbh);
1378     tcg_temp_free_i64(xbl);
1381 static void gen_xxbrw(DisasContext *ctx)
1383     TCGv_i64 xth;
1384     TCGv_i64 xtl;
1385     TCGv_i64 xbh;
1386     TCGv_i64 xbl;
1388     if (unlikely(!ctx->vsx_enabled)) {
1389         gen_exception(ctx, POWERPC_EXCP_VSXU);
1390         return;
1391     }
1392     xth = tcg_temp_new_i64();
1393     xtl = tcg_temp_new_i64();
1394     xbh = tcg_temp_new_i64();
1395     xbl = tcg_temp_new_i64();
1396     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1397     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1399     gen_bswap32x4(xth, xtl, xbh, xbl);
1400     set_cpu_vsr(xT(ctx->opcode), xth, true);
1401     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1403     tcg_temp_free_i64(xth);
1404     tcg_temp_free_i64(xtl);
1405     tcg_temp_free_i64(xbh);
1406     tcg_temp_free_i64(xbl);
1409 #define VSX_LOGICAL(name, vece, tcg_op)                              \
1410 static void glue(gen_, name)(DisasContext *ctx)                      \
1411     {                                                                \
1412         if (unlikely(!ctx->vsx_enabled)) {                           \
1413             gen_exception(ctx, POWERPC_EXCP_VSXU);                   \
1414             return;                                                  \
1415         }                                                            \
1416         tcg_op(vece, vsr_full_offset(xT(ctx->opcode)),               \
1417                vsr_full_offset(xA(ctx->opcode)),                     \
1418                vsr_full_offset(xB(ctx->opcode)), 16, 16);            \
1419     }
1421 VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1422 VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1423 VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1424 VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1425 VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1426 VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1427 VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1428 VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1430 #define VSX_XXMRG(name, high)                               \
1431 static void glue(gen_, name)(DisasContext *ctx)             \
1432     {                                                       \
1433         TCGv_i64 a0, a1, b0, b1, tmp;                       \
1434         if (unlikely(!ctx->vsx_enabled)) {                  \
1435             gen_exception(ctx, POWERPC_EXCP_VSXU);          \
1436             return;                                         \
1437         }                                                   \
1438         a0 = tcg_temp_new_i64();                            \
1439         a1 = tcg_temp_new_i64();                            \
1440         b0 = tcg_temp_new_i64();                            \
1441         b1 = tcg_temp_new_i64();                            \
1442         tmp = tcg_temp_new_i64();                           \
1443         get_cpu_vsr(a0, xA(ctx->opcode), high);             \
1444         get_cpu_vsr(a1, xA(ctx->opcode), high);             \
1445         get_cpu_vsr(b0, xB(ctx->opcode), high);             \
1446         get_cpu_vsr(b1, xB(ctx->opcode), high);             \
1447         tcg_gen_shri_i64(a0, a0, 32);                       \
1448         tcg_gen_shri_i64(b0, b0, 32);                       \
1449         tcg_gen_deposit_i64(tmp, b0, a0, 32, 32);           \
1450         set_cpu_vsr(xT(ctx->opcode), tmp, true);            \
1451         tcg_gen_deposit_i64(tmp, b1, a1, 32, 32);           \
1452         set_cpu_vsr(xT(ctx->opcode), tmp, false);           \
1453         tcg_temp_free_i64(a0);                              \
1454         tcg_temp_free_i64(a1);                              \
1455         tcg_temp_free_i64(b0);                              \
1456         tcg_temp_free_i64(b1);                              \
1457         tcg_temp_free_i64(tmp);                             \
1458     }
1460 VSX_XXMRG(xxmrghw, 1)
1461 VSX_XXMRG(xxmrglw, 0)
1463 static bool trans_XXSEL(DisasContext *ctx, arg_XX4 *a)
1465     REQUIRE_INSNS_FLAGS2(ctx, VSX);
1466     REQUIRE_VSX(ctx);
1468     tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(a->xt), vsr_full_offset(a->xc),
1469                         vsr_full_offset(a->xb), vsr_full_offset(a->xa), 16, 16);
1471     return true;
1474 static bool trans_XXSPLTW(DisasContext *ctx, arg_XX2 *a)
1476     int tofs, bofs;
1478     REQUIRE_VSX(ctx);
1480     tofs = vsr_full_offset(a->xt);
1481     bofs = vsr_full_offset(a->xb);
1482     bofs += a->uim << MO_32;
1483 #ifndef HOST_WORDS_BIG_ENDIAN
1484     bofs ^= 8 | 4;
1485 #endif
1487     tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1488     return true;
1491 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1493 static bool trans_XXSPLTIB(DisasContext *ctx, arg_X_imm8 *a)
1495     if (a->xt < 32) {
1496         REQUIRE_VSX(ctx);
1497     } else {
1498         REQUIRE_VECTOR(ctx);
1499     }
1500     tcg_gen_gvec_dup_imm(MO_8, vsr_full_offset(a->xt), 16, 16, a->imm);
1501     return true;
1504 static bool trans_XXSPLTIW(DisasContext *ctx, arg_8RR_D *a)
1506     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1507     REQUIRE_VSX(ctx);
1509     tcg_gen_gvec_dup_imm(MO_32, vsr_full_offset(a->xt), 16, 16, a->si);
1511     return true;
1514 static bool trans_XXSPLTIDP(DisasContext *ctx, arg_8RR_D *a)
1516     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1517     REQUIRE_VSX(ctx);
1519     tcg_gen_gvec_dup_imm(MO_64, vsr_full_offset(a->xt), 16, 16,
1520                          helper_todouble(a->si));
1521     return true;
1524 static bool trans_XXSPLTI32DX(DisasContext *ctx, arg_8RR_D_IX *a)
1526     TCGv_i32 imm;
1528     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1529     REQUIRE_VSX(ctx);
1531     imm = tcg_constant_i32(a->si);
1533     tcg_gen_st_i32(imm, cpu_env,
1534         offsetof(CPUPPCState, vsr[a->xt].VsrW(0 + a->ix)));
1535     tcg_gen_st_i32(imm, cpu_env,
1536         offsetof(CPUPPCState, vsr[a->xt].VsrW(2 + a->ix)));
1538     return true;
1541 static bool trans_LXVKQ(DisasContext *ctx, arg_X_uim5 *a)
1543     static const uint64_t values[32] = {
1544         0, /* Unspecified */
1545         0x3FFF000000000000llu, /* QP +1.0 */
1546         0x4000000000000000llu, /* QP +2.0 */
1547         0x4000800000000000llu, /* QP +3.0 */
1548         0x4001000000000000llu, /* QP +4.0 */
1549         0x4001400000000000llu, /* QP +5.0 */
1550         0x4001800000000000llu, /* QP +6.0 */
1551         0x4001C00000000000llu, /* QP +7.0 */
1552         0x7FFF000000000000llu, /* QP +Inf */
1553         0x7FFF800000000000llu, /* QP dQNaN */
1554         0, /* Unspecified */
1555         0, /* Unspecified */
1556         0, /* Unspecified */
1557         0, /* Unspecified */
1558         0, /* Unspecified */
1559         0, /* Unspecified */
1560         0x8000000000000000llu, /* QP -0.0 */
1561         0xBFFF000000000000llu, /* QP -1.0 */
1562         0xC000000000000000llu, /* QP -2.0 */
1563         0xC000800000000000llu, /* QP -3.0 */
1564         0xC001000000000000llu, /* QP -4.0 */
1565         0xC001400000000000llu, /* QP -5.0 */
1566         0xC001800000000000llu, /* QP -6.0 */
1567         0xC001C00000000000llu, /* QP -7.0 */
1568         0xFFFF000000000000llu, /* QP -Inf */
1569     };
1571     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1572     REQUIRE_VSX(ctx);
1574     if (values[a->uim]) {
1575         set_cpu_vsr(a->xt, tcg_constant_i64(0x0), false);
1576         set_cpu_vsr(a->xt, tcg_constant_i64(values[a->uim]), true);
1577     } else {
1578         gen_invalid(ctx);
1579     }
1581     return true;
1584 static void gen_xxsldwi(DisasContext *ctx)
1586     TCGv_i64 xth, xtl;
1587     if (unlikely(!ctx->vsx_enabled)) {
1588         gen_exception(ctx, POWERPC_EXCP_VSXU);
1589         return;
1590     }
1591     xth = tcg_temp_new_i64();
1592     xtl = tcg_temp_new_i64();
1594     switch (SHW(ctx->opcode)) {
1595         case 0: {
1596             get_cpu_vsr(xth, xA(ctx->opcode), true);
1597             get_cpu_vsr(xtl, xA(ctx->opcode), false);
1598             break;
1599         }
1600         case 1: {
1601             TCGv_i64 t0 = tcg_temp_new_i64();
1602             get_cpu_vsr(xth, xA(ctx->opcode), true);
1603             tcg_gen_shli_i64(xth, xth, 32);
1604             get_cpu_vsr(t0, xA(ctx->opcode), false);
1605             tcg_gen_shri_i64(t0, t0, 32);
1606             tcg_gen_or_i64(xth, xth, t0);
1607             get_cpu_vsr(xtl, xA(ctx->opcode), false);
1608             tcg_gen_shli_i64(xtl, xtl, 32);
1609             get_cpu_vsr(t0, xB(ctx->opcode), true);
1610             tcg_gen_shri_i64(t0, t0, 32);
1611             tcg_gen_or_i64(xtl, xtl, t0);
1612             tcg_temp_free_i64(t0);
1613             break;
1614         }
1615         case 2: {
1616             get_cpu_vsr(xth, xA(ctx->opcode), false);
1617             get_cpu_vsr(xtl, xB(ctx->opcode), true);
1618             break;
1619         }
1620         case 3: {
1621             TCGv_i64 t0 = tcg_temp_new_i64();
1622             get_cpu_vsr(xth, xA(ctx->opcode), false);
1623             tcg_gen_shli_i64(xth, xth, 32);
1624             get_cpu_vsr(t0, xB(ctx->opcode), true);
1625             tcg_gen_shri_i64(t0, t0, 32);
1626             tcg_gen_or_i64(xth, xth, t0);
1627             get_cpu_vsr(xtl, xB(ctx->opcode), true);
1628             tcg_gen_shli_i64(xtl, xtl, 32);
1629             get_cpu_vsr(t0, xB(ctx->opcode), false);
1630             tcg_gen_shri_i64(t0, t0, 32);
1631             tcg_gen_or_i64(xtl, xtl, t0);
1632             tcg_temp_free_i64(t0);
1633             break;
1634         }
1635     }
1637     set_cpu_vsr(xT(ctx->opcode), xth, true);
1638     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1640     tcg_temp_free_i64(xth);
1641     tcg_temp_free_i64(xtl);
1644 #define VSX_EXTRACT_INSERT(name)                                \
1645 static void gen_##name(DisasContext *ctx)                       \
1646 {                                                               \
1647     TCGv_ptr xt, xb;                                            \
1648     TCGv_i32 t0;                                                \
1649     TCGv_i64 t1;                                                \
1650     uint8_t uimm = UIMM4(ctx->opcode);                          \
1651                                                                 \
1652     if (unlikely(!ctx->vsx_enabled)) {                          \
1653         gen_exception(ctx, POWERPC_EXCP_VSXU);                  \
1654         return;                                                 \
1655     }                                                           \
1656     xt = gen_vsr_ptr(xT(ctx->opcode));                          \
1657     xb = gen_vsr_ptr(xB(ctx->opcode));                          \
1658     t0 = tcg_temp_new_i32();                                    \
1659     t1 = tcg_temp_new_i64();                                    \
1660     /*                                                          \
1661      * uimm > 15 out of bound and for                           \
1662      * uimm > 12 handle as per hardware in helper               \
1663      */                                                         \
1664     if (uimm > 15) {                                            \
1665         tcg_gen_movi_i64(t1, 0);                                \
1666         set_cpu_vsr(xT(ctx->opcode), t1, true);                 \
1667         set_cpu_vsr(xT(ctx->opcode), t1, false);                \
1668         return;                                                 \
1669     }                                                           \
1670     tcg_gen_movi_i32(t0, uimm);                                 \
1671     gen_helper_##name(cpu_env, xt, xb, t0);                     \
1672     tcg_temp_free_ptr(xb);                                      \
1673     tcg_temp_free_ptr(xt);                                      \
1674     tcg_temp_free_i32(t0);                                      \
1675     tcg_temp_free_i64(t1);                                      \
1678 VSX_EXTRACT_INSERT(xxextractuw)
1679 VSX_EXTRACT_INSERT(xxinsertw)
1681 #ifdef TARGET_PPC64
1682 static void gen_xsxexpdp(DisasContext *ctx)
1684     TCGv rt = cpu_gpr[rD(ctx->opcode)];
1685     TCGv_i64 t0;
1686     if (unlikely(!ctx->vsx_enabled)) {
1687         gen_exception(ctx, POWERPC_EXCP_VSXU);
1688         return;
1689     }
1690     t0 = tcg_temp_new_i64();
1691     get_cpu_vsr(t0, xB(ctx->opcode), true);
1692     tcg_gen_extract_i64(rt, t0, 52, 11);
1693     tcg_temp_free_i64(t0);
1696 static void gen_xsxexpqp(DisasContext *ctx)
1698     TCGv_i64 xth;
1699     TCGv_i64 xtl;
1700     TCGv_i64 xbh;
1702     if (unlikely(!ctx->vsx_enabled)) {
1703         gen_exception(ctx, POWERPC_EXCP_VSXU);
1704         return;
1705     }
1706     xth = tcg_temp_new_i64();
1707     xtl = tcg_temp_new_i64();
1708     xbh = tcg_temp_new_i64();
1709     get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1711     tcg_gen_extract_i64(xth, xbh, 48, 15);
1712     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1713     tcg_gen_movi_i64(xtl, 0);
1714     set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1716     tcg_temp_free_i64(xbh);
1717     tcg_temp_free_i64(xth);
1718     tcg_temp_free_i64(xtl);
1721 static void gen_xsiexpdp(DisasContext *ctx)
1723     TCGv_i64 xth;
1724     TCGv ra = cpu_gpr[rA(ctx->opcode)];
1725     TCGv rb = cpu_gpr[rB(ctx->opcode)];
1726     TCGv_i64 t0;
1728     if (unlikely(!ctx->vsx_enabled)) {
1729         gen_exception(ctx, POWERPC_EXCP_VSXU);
1730         return;
1731     }
1732     t0 = tcg_temp_new_i64();
1733     xth = tcg_temp_new_i64();
1734     tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1735     tcg_gen_andi_i64(t0, rb, 0x7FF);
1736     tcg_gen_shli_i64(t0, t0, 52);
1737     tcg_gen_or_i64(xth, xth, t0);
1738     set_cpu_vsr(xT(ctx->opcode), xth, true);
1739     set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false);
1740     tcg_temp_free_i64(t0);
1741     tcg_temp_free_i64(xth);
1744 static void gen_xsiexpqp(DisasContext *ctx)
1746     TCGv_i64 xth;
1747     TCGv_i64 xtl;
1748     TCGv_i64 xah;
1749     TCGv_i64 xal;
1750     TCGv_i64 xbh;
1751     TCGv_i64 t0;
1753     if (unlikely(!ctx->vsx_enabled)) {
1754         gen_exception(ctx, POWERPC_EXCP_VSXU);
1755         return;
1756     }
1757     xth = tcg_temp_new_i64();
1758     xtl = tcg_temp_new_i64();
1759     xah = tcg_temp_new_i64();
1760     xal = tcg_temp_new_i64();
1761     get_cpu_vsr(xah, rA(ctx->opcode) + 32, true);
1762     get_cpu_vsr(xal, rA(ctx->opcode) + 32, false);
1763     xbh = tcg_temp_new_i64();
1764     get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1765     t0 = tcg_temp_new_i64();
1767     tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1768     tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1769     tcg_gen_shli_i64(t0, t0, 48);
1770     tcg_gen_or_i64(xth, xth, t0);
1771     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1772     tcg_gen_mov_i64(xtl, xal);
1773     set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1775     tcg_temp_free_i64(t0);
1776     tcg_temp_free_i64(xth);
1777     tcg_temp_free_i64(xtl);
1778     tcg_temp_free_i64(xah);
1779     tcg_temp_free_i64(xal);
1780     tcg_temp_free_i64(xbh);
1783 static void gen_xsxsigdp(DisasContext *ctx)
1785     TCGv rt = cpu_gpr[rD(ctx->opcode)];
1786     TCGv_i64 t0, t1, zr, nan, exp;
1788     if (unlikely(!ctx->vsx_enabled)) {
1789         gen_exception(ctx, POWERPC_EXCP_VSXU);
1790         return;
1791     }
1792     exp = tcg_temp_new_i64();
1793     t0 = tcg_temp_new_i64();
1794     t1 = tcg_temp_new_i64();
1795     zr = tcg_const_i64(0);
1796     nan = tcg_const_i64(2047);
1798     get_cpu_vsr(t1, xB(ctx->opcode), true);
1799     tcg_gen_extract_i64(exp, t1, 52, 11);
1800     tcg_gen_movi_i64(t0, 0x0010000000000000);
1801     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1802     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1803     get_cpu_vsr(t1, xB(ctx->opcode), true);
1804     tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1806     tcg_temp_free_i64(t0);
1807     tcg_temp_free_i64(t1);
1808     tcg_temp_free_i64(exp);
1809     tcg_temp_free_i64(zr);
1810     tcg_temp_free_i64(nan);
1813 static void gen_xsxsigqp(DisasContext *ctx)
1815     TCGv_i64 t0, zr, nan, exp;
1816     TCGv_i64 xth;
1817     TCGv_i64 xtl;
1818     TCGv_i64 xbh;
1819     TCGv_i64 xbl;
1821     if (unlikely(!ctx->vsx_enabled)) {
1822         gen_exception(ctx, POWERPC_EXCP_VSXU);
1823         return;
1824     }
1825     xth = tcg_temp_new_i64();
1826     xtl = tcg_temp_new_i64();
1827     xbh = tcg_temp_new_i64();
1828     xbl = tcg_temp_new_i64();
1829     get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1830     get_cpu_vsr(xbl, rB(ctx->opcode) + 32, false);
1831     exp = tcg_temp_new_i64();
1832     t0 = tcg_temp_new_i64();
1833     zr = tcg_const_i64(0);
1834     nan = tcg_const_i64(32767);
1836     tcg_gen_extract_i64(exp, xbh, 48, 15);
1837     tcg_gen_movi_i64(t0, 0x0001000000000000);
1838     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1839     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1840     tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
1841     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1842     tcg_gen_mov_i64(xtl, xbl);
1843     set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1845     tcg_temp_free_i64(t0);
1846     tcg_temp_free_i64(exp);
1847     tcg_temp_free_i64(zr);
1848     tcg_temp_free_i64(nan);
1849     tcg_temp_free_i64(xth);
1850     tcg_temp_free_i64(xtl);
1851     tcg_temp_free_i64(xbh);
1852     tcg_temp_free_i64(xbl);
1854 #endif
1856 static void gen_xviexpsp(DisasContext *ctx)
1858     TCGv_i64 xth;
1859     TCGv_i64 xtl;
1860     TCGv_i64 xah;
1861     TCGv_i64 xal;
1862     TCGv_i64 xbh;
1863     TCGv_i64 xbl;
1864     TCGv_i64 t0;
1866     if (unlikely(!ctx->vsx_enabled)) {
1867         gen_exception(ctx, POWERPC_EXCP_VSXU);
1868         return;
1869     }
1870     xth = tcg_temp_new_i64();
1871     xtl = tcg_temp_new_i64();
1872     xah = tcg_temp_new_i64();
1873     xal = tcg_temp_new_i64();
1874     xbh = tcg_temp_new_i64();
1875     xbl = tcg_temp_new_i64();
1876     get_cpu_vsr(xah, xA(ctx->opcode), true);
1877     get_cpu_vsr(xal, xA(ctx->opcode), false);
1878     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1879     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1880     t0 = tcg_temp_new_i64();
1882     tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1883     tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1884     tcg_gen_shli_i64(t0, t0, 23);
1885     tcg_gen_or_i64(xth, xth, t0);
1886     set_cpu_vsr(xT(ctx->opcode), xth, true);
1887     tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1888     tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1889     tcg_gen_shli_i64(t0, t0, 23);
1890     tcg_gen_or_i64(xtl, xtl, t0);
1891     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1893     tcg_temp_free_i64(t0);
1894     tcg_temp_free_i64(xth);
1895     tcg_temp_free_i64(xtl);
1896     tcg_temp_free_i64(xah);
1897     tcg_temp_free_i64(xal);
1898     tcg_temp_free_i64(xbh);
1899     tcg_temp_free_i64(xbl);
1902 static void gen_xviexpdp(DisasContext *ctx)
1904     TCGv_i64 xth;
1905     TCGv_i64 xtl;
1906     TCGv_i64 xah;
1907     TCGv_i64 xal;
1908     TCGv_i64 xbh;
1909     TCGv_i64 xbl;
1911     if (unlikely(!ctx->vsx_enabled)) {
1912         gen_exception(ctx, POWERPC_EXCP_VSXU);
1913         return;
1914     }
1915     xth = tcg_temp_new_i64();
1916     xtl = tcg_temp_new_i64();
1917     xah = tcg_temp_new_i64();
1918     xal = tcg_temp_new_i64();
1919     xbh = tcg_temp_new_i64();
1920     xbl = tcg_temp_new_i64();
1921     get_cpu_vsr(xah, xA(ctx->opcode), true);
1922     get_cpu_vsr(xal, xA(ctx->opcode), false);
1923     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1924     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1926     tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
1927     set_cpu_vsr(xT(ctx->opcode), xth, true);
1929     tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
1930     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1932     tcg_temp_free_i64(xth);
1933     tcg_temp_free_i64(xtl);
1934     tcg_temp_free_i64(xah);
1935     tcg_temp_free_i64(xal);
1936     tcg_temp_free_i64(xbh);
1937     tcg_temp_free_i64(xbl);
1940 static void gen_xvxexpsp(DisasContext *ctx)
1942     TCGv_i64 xth;
1943     TCGv_i64 xtl;
1944     TCGv_i64 xbh;
1945     TCGv_i64 xbl;
1947     if (unlikely(!ctx->vsx_enabled)) {
1948         gen_exception(ctx, POWERPC_EXCP_VSXU);
1949         return;
1950     }
1951     xth = tcg_temp_new_i64();
1952     xtl = tcg_temp_new_i64();
1953     xbh = tcg_temp_new_i64();
1954     xbl = tcg_temp_new_i64();
1955     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1956     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1958     tcg_gen_shri_i64(xth, xbh, 23);
1959     tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
1960     set_cpu_vsr(xT(ctx->opcode), xth, true);
1961     tcg_gen_shri_i64(xtl, xbl, 23);
1962     tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
1963     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1965     tcg_temp_free_i64(xth);
1966     tcg_temp_free_i64(xtl);
1967     tcg_temp_free_i64(xbh);
1968     tcg_temp_free_i64(xbl);
1971 static void gen_xvxexpdp(DisasContext *ctx)
1973     TCGv_i64 xth;
1974     TCGv_i64 xtl;
1975     TCGv_i64 xbh;
1976     TCGv_i64 xbl;
1978     if (unlikely(!ctx->vsx_enabled)) {
1979         gen_exception(ctx, POWERPC_EXCP_VSXU);
1980         return;
1981     }
1982     xth = tcg_temp_new_i64();
1983     xtl = tcg_temp_new_i64();
1984     xbh = tcg_temp_new_i64();
1985     xbl = tcg_temp_new_i64();
1986     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1987     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1989     tcg_gen_extract_i64(xth, xbh, 52, 11);
1990     set_cpu_vsr(xT(ctx->opcode), xth, true);
1991     tcg_gen_extract_i64(xtl, xbl, 52, 11);
1992     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1994     tcg_temp_free_i64(xth);
1995     tcg_temp_free_i64(xtl);
1996     tcg_temp_free_i64(xbh);
1997     tcg_temp_free_i64(xbl);
2000 GEN_VSX_HELPER_X2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
2002 static void gen_xvxsigdp(DisasContext *ctx)
2004     TCGv_i64 xth;
2005     TCGv_i64 xtl;
2006     TCGv_i64 xbh;
2007     TCGv_i64 xbl;
2008     TCGv_i64 t0, zr, nan, exp;
2010     if (unlikely(!ctx->vsx_enabled)) {
2011         gen_exception(ctx, POWERPC_EXCP_VSXU);
2012         return;
2013     }
2014     xth = tcg_temp_new_i64();
2015     xtl = tcg_temp_new_i64();
2016     xbh = tcg_temp_new_i64();
2017     xbl = tcg_temp_new_i64();
2018     get_cpu_vsr(xbh, xB(ctx->opcode), true);
2019     get_cpu_vsr(xbl, xB(ctx->opcode), false);
2020     exp = tcg_temp_new_i64();
2021     t0 = tcg_temp_new_i64();
2022     zr = tcg_const_i64(0);
2023     nan = tcg_const_i64(2047);
2025     tcg_gen_extract_i64(exp, xbh, 52, 11);
2026     tcg_gen_movi_i64(t0, 0x0010000000000000);
2027     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2028     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2029     tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
2030     set_cpu_vsr(xT(ctx->opcode), xth, true);
2032     tcg_gen_extract_i64(exp, xbl, 52, 11);
2033     tcg_gen_movi_i64(t0, 0x0010000000000000);
2034     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2035     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2036     tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
2037     set_cpu_vsr(xT(ctx->opcode), xtl, false);
2039     tcg_temp_free_i64(t0);
2040     tcg_temp_free_i64(exp);
2041     tcg_temp_free_i64(zr);
2042     tcg_temp_free_i64(nan);
2043     tcg_temp_free_i64(xth);
2044     tcg_temp_free_i64(xtl);
2045     tcg_temp_free_i64(xbh);
2046     tcg_temp_free_i64(xbl);
2049 static bool do_lstxv(DisasContext *ctx, int ra, TCGv displ,
2050                      int rt, bool store, bool paired)
2052     TCGv ea;
2053     TCGv_i64 xt;
2054     MemOp mop;
2055     int rt1, rt2;
2057     xt = tcg_temp_new_i64();
2059     mop = DEF_MEMOP(MO_UQ);
2061     gen_set_access_type(ctx, ACCESS_INT);
2062     ea = do_ea_calc(ctx, ra, displ);
2064     if (paired && ctx->le_mode) {
2065         rt1 = rt + 1;
2066         rt2 = rt;
2067     } else {
2068         rt1 = rt;
2069         rt2 = rt + 1;
2070     }
2072     if (store) {
2073         get_cpu_vsr(xt, rt1, !ctx->le_mode);
2074         tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2075         gen_addr_add(ctx, ea, ea, 8);
2076         get_cpu_vsr(xt, rt1, ctx->le_mode);
2077         tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2078         if (paired) {
2079             gen_addr_add(ctx, ea, ea, 8);
2080             get_cpu_vsr(xt, rt2, !ctx->le_mode);
2081             tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2082             gen_addr_add(ctx, ea, ea, 8);
2083             get_cpu_vsr(xt, rt2, ctx->le_mode);
2084             tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2085         }
2086     } else {
2087         tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2088         set_cpu_vsr(rt1, xt, !ctx->le_mode);
2089         gen_addr_add(ctx, ea, ea, 8);
2090         tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2091         set_cpu_vsr(rt1, xt, ctx->le_mode);
2092         if (paired) {
2093             gen_addr_add(ctx, ea, ea, 8);
2094             tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2095             set_cpu_vsr(rt2, xt, !ctx->le_mode);
2096             gen_addr_add(ctx, ea, ea, 8);
2097             tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2098             set_cpu_vsr(rt2, xt, ctx->le_mode);
2099         }
2100     }
2102     tcg_temp_free(ea);
2103     tcg_temp_free_i64(xt);
2104     return true;
2107 static bool do_lstxv_D(DisasContext *ctx, arg_D *a, bool store, bool paired)
2109     if (paired || a->rt >= 32) {
2110         REQUIRE_VSX(ctx);
2111     } else {
2112         REQUIRE_VECTOR(ctx);
2113     }
2115     return do_lstxv(ctx, a->ra, tcg_constant_tl(a->si), a->rt, store, paired);
2118 static bool do_lstxv_PLS_D(DisasContext *ctx, arg_PLS_D *a,
2119                            bool store, bool paired)
2121     arg_D d;
2122     REQUIRE_VSX(ctx);
2124     if (!resolve_PLS_D(ctx, &d, a)) {
2125         return true;
2126     }
2128     return do_lstxv(ctx, d.ra, tcg_constant_tl(d.si), d.rt, store, paired);
2131 static bool do_lstxv_X(DisasContext *ctx, arg_X *a, bool store, bool paired)
2133     if (paired || a->rt >= 32) {
2134         REQUIRE_VSX(ctx);
2135     } else {
2136         REQUIRE_VECTOR(ctx);
2137     }
2139     return do_lstxv(ctx, a->ra, cpu_gpr[a->rb], a->rt, store, paired);
2142 TRANS_FLAGS2(ISA300, STXV, do_lstxv_D, true, false)
2143 TRANS_FLAGS2(ISA300, LXV, do_lstxv_D, false, false)
2144 TRANS_FLAGS2(ISA310, STXVP, do_lstxv_D, true, true)
2145 TRANS_FLAGS2(ISA310, LXVP, do_lstxv_D, false, true)
2146 TRANS_FLAGS2(ISA300, STXVX, do_lstxv_X, true, false)
2147 TRANS_FLAGS2(ISA300, LXVX, do_lstxv_X, false, false)
2148 TRANS_FLAGS2(ISA310, STXVPX, do_lstxv_X, true, true)
2149 TRANS_FLAGS2(ISA310, LXVPX, do_lstxv_X, false, true)
2150 TRANS64_FLAGS2(ISA310, PSTXV, do_lstxv_PLS_D, true, false)
2151 TRANS64_FLAGS2(ISA310, PLXV, do_lstxv_PLS_D, false, false)
2152 TRANS64_FLAGS2(ISA310, PSTXVP, do_lstxv_PLS_D, true, true)
2153 TRANS64_FLAGS2(ISA310, PLXVP, do_lstxv_PLS_D, false, true)
2155 static void gen_xxblendv_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b,
2156                              TCGv_vec c)
2158     TCGv_vec tmp = tcg_temp_new_vec_matching(c);
2159     tcg_gen_sari_vec(vece, tmp, c, (8 << vece) - 1);
2160     tcg_gen_bitsel_vec(vece, t, tmp, b, a);
2161     tcg_temp_free_vec(tmp);
2164 static bool do_xxblendv(DisasContext *ctx, arg_8RR_XX4 *a, unsigned vece)
2166     static const TCGOpcode vecop_list[] = {
2167         INDEX_op_sari_vec, 0
2168     };
2169     static const GVecGen4 ops[4] = {
2170         {
2171             .fniv = gen_xxblendv_vec,
2172             .fno = gen_helper_XXBLENDVB,
2173             .opt_opc = vecop_list,
2174             .vece = MO_8
2175         },
2176         {
2177             .fniv = gen_xxblendv_vec,
2178             .fno = gen_helper_XXBLENDVH,
2179             .opt_opc = vecop_list,
2180             .vece = MO_16
2181         },
2182         {
2183             .fniv = gen_xxblendv_vec,
2184             .fno = gen_helper_XXBLENDVW,
2185             .opt_opc = vecop_list,
2186             .vece = MO_32
2187         },
2188         {
2189             .fniv = gen_xxblendv_vec,
2190             .fno = gen_helper_XXBLENDVD,
2191             .opt_opc = vecop_list,
2192             .vece = MO_64
2193         }
2194     };
2196     REQUIRE_VSX(ctx);
2198     tcg_gen_gvec_4(vsr_full_offset(a->xt), vsr_full_offset(a->xa),
2199                    vsr_full_offset(a->xb), vsr_full_offset(a->xc),
2200                    16, 16, &ops[vece]);
2202     return true;
2205 TRANS(XXBLENDVB, do_xxblendv, MO_8)
2206 TRANS(XXBLENDVH, do_xxblendv, MO_16)
2207 TRANS(XXBLENDVW, do_xxblendv, MO_32)
2208 TRANS(XXBLENDVD, do_xxblendv, MO_64)
2210 static bool do_xsmaxmincjdp(DisasContext *ctx, arg_XX3 *a,
2211                             void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2213     TCGv_ptr xt, xa, xb;
2215     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2216     REQUIRE_VSX(ctx);
2218     xt = gen_vsr_ptr(a->xt);
2219     xa = gen_vsr_ptr(a->xa);
2220     xb = gen_vsr_ptr(a->xb);
2222     helper(cpu_env, xt, xa, xb);
2224     tcg_temp_free_ptr(xt);
2225     tcg_temp_free_ptr(xa);
2226     tcg_temp_free_ptr(xb);
2228     return true;
2231 TRANS(XSMAXCDP, do_xsmaxmincjdp, gen_helper_xsmaxcdp)
2232 TRANS(XSMINCDP, do_xsmaxmincjdp, gen_helper_xsmincdp)
2233 TRANS(XSMAXJDP, do_xsmaxmincjdp, gen_helper_xsmaxjdp)
2234 TRANS(XSMINJDP, do_xsmaxmincjdp, gen_helper_xsminjdp)
2236 #undef GEN_XX2FORM
2237 #undef GEN_XX3FORM
2238 #undef GEN_XX2IFORM
2239 #undef GEN_XX3_RC_FORM
2240 #undef GEN_XX3FORM_DM
2241 #undef VSX_LOGICAL