target/ppc: Add extra float instructions to POWER5P processors
[qemu.git] / target / ppc / translate / vsx-impl.c.inc
blobc636e38164316c177dd50ff9d7da45342183fd4a
1 /***                           VSX extension                               ***/
3 static inline void get_cpu_vsr(TCGv_i64 dst, int n, bool high)
5     tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, high));
8 static inline void set_cpu_vsr(int n, TCGv_i64 src, bool high)
10     tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, high));
13 static inline TCGv_ptr gen_vsr_ptr(int reg)
15     TCGv_ptr r = tcg_temp_new_ptr();
16     tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg));
17     return r;
20 #define VSX_LOAD_SCALAR(name, operation)                      \
21 static void gen_##name(DisasContext *ctx)                     \
22 {                                                             \
23     TCGv EA;                                                  \
24     TCGv_i64 t0;                                              \
25     if (unlikely(!ctx->vsx_enabled)) {                        \
26         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
27         return;                                               \
28     }                                                         \
29     t0 = tcg_temp_new_i64();                                  \
30     gen_set_access_type(ctx, ACCESS_INT);                     \
31     EA = tcg_temp_new();                                      \
32     gen_addr_reg_index(ctx, EA);                              \
33     gen_qemu_##operation(ctx, t0, EA);                        \
34     set_cpu_vsr(xT(ctx->opcode), t0, true);                   \
35     /* NOTE: cpu_vsrl is undefined */                         \
36     tcg_temp_free(EA);                                        \
37     tcg_temp_free_i64(t0);                                    \
40 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
41 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
42 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
43 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
44 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
45 VSX_LOAD_SCALAR(lxsspx, ld32fs)
47 static void gen_lxvd2x(DisasContext *ctx)
49     TCGv EA;
50     TCGv_i64 t0;
51     if (unlikely(!ctx->vsx_enabled)) {
52         gen_exception(ctx, POWERPC_EXCP_VSXU);
53         return;
54     }
55     t0 = tcg_temp_new_i64();
56     gen_set_access_type(ctx, ACCESS_INT);
57     EA = tcg_temp_new();
58     gen_addr_reg_index(ctx, EA);
59     gen_qemu_ld64_i64(ctx, t0, EA);
60     set_cpu_vsr(xT(ctx->opcode), t0, true);
61     tcg_gen_addi_tl(EA, EA, 8);
62     gen_qemu_ld64_i64(ctx, t0, EA);
63     set_cpu_vsr(xT(ctx->opcode), t0, false);
64     tcg_temp_free(EA);
65     tcg_temp_free_i64(t0);
68 static void gen_lxvw4x(DisasContext *ctx)
70     TCGv EA;
71     TCGv_i64 xth;
72     TCGv_i64 xtl;
73     if (unlikely(!ctx->vsx_enabled)) {
74         gen_exception(ctx, POWERPC_EXCP_VSXU);
75         return;
76     }
77     xth = tcg_temp_new_i64();
78     xtl = tcg_temp_new_i64();
80     gen_set_access_type(ctx, ACCESS_INT);
81     EA = tcg_temp_new();
83     gen_addr_reg_index(ctx, EA);
84     if (ctx->le_mode) {
85         TCGv_i64 t0 = tcg_temp_new_i64();
86         TCGv_i64 t1 = tcg_temp_new_i64();
88         tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
89         tcg_gen_shri_i64(t1, t0, 32);
90         tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
91         tcg_gen_addi_tl(EA, EA, 8);
92         tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
93         tcg_gen_shri_i64(t1, t0, 32);
94         tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
95         tcg_temp_free_i64(t0);
96         tcg_temp_free_i64(t1);
97     } else {
98         tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
99         tcg_gen_addi_tl(EA, EA, 8);
100         tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
101     }
102     set_cpu_vsr(xT(ctx->opcode), xth, true);
103     set_cpu_vsr(xT(ctx->opcode), xtl, false);
104     tcg_temp_free(EA);
105     tcg_temp_free_i64(xth);
106     tcg_temp_free_i64(xtl);
109 static void gen_lxvwsx(DisasContext *ctx)
111     TCGv EA;
112     TCGv_i32 data;
114     if (xT(ctx->opcode) < 32) {
115         if (unlikely(!ctx->vsx_enabled)) {
116             gen_exception(ctx, POWERPC_EXCP_VSXU);
117             return;
118         }
119     } else {
120         if (unlikely(!ctx->altivec_enabled)) {
121             gen_exception(ctx, POWERPC_EXCP_VPU);
122             return;
123         }
124     }
126     gen_set_access_type(ctx, ACCESS_INT);
127     EA = tcg_temp_new();
129     gen_addr_reg_index(ctx, EA);
131     data = tcg_temp_new_i32();
132     tcg_gen_qemu_ld_i32(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UL));
133     tcg_gen_gvec_dup_i32(MO_UL, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
135     tcg_temp_free(EA);
136     tcg_temp_free_i32(data);
139 static void gen_lxvdsx(DisasContext *ctx)
141     TCGv EA;
142     TCGv_i64 data;
144     if (unlikely(!ctx->vsx_enabled)) {
145         gen_exception(ctx, POWERPC_EXCP_VSXU);
146         return;
147     }
149     gen_set_access_type(ctx, ACCESS_INT);
150     EA = tcg_temp_new();
152     gen_addr_reg_index(ctx, EA);
154     data = tcg_temp_new_i64();
155     tcg_gen_qemu_ld_i64(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UQ));
156     tcg_gen_gvec_dup_i64(MO_UQ, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
158     tcg_temp_free(EA);
159     tcg_temp_free_i64(data);
162 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
163                           TCGv_i64 inh, TCGv_i64 inl)
165     TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
166     TCGv_i64 t0 = tcg_temp_new_i64();
167     TCGv_i64 t1 = tcg_temp_new_i64();
169     /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
170     tcg_gen_and_i64(t0, inh, mask);
171     tcg_gen_shli_i64(t0, t0, 8);
172     tcg_gen_shri_i64(t1, inh, 8);
173     tcg_gen_and_i64(t1, t1, mask);
174     tcg_gen_or_i64(outh, t0, t1);
176     /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
177     tcg_gen_and_i64(t0, inl, mask);
178     tcg_gen_shli_i64(t0, t0, 8);
179     tcg_gen_shri_i64(t1, inl, 8);
180     tcg_gen_and_i64(t1, t1, mask);
181     tcg_gen_or_i64(outl, t0, t1);
183     tcg_temp_free_i64(t0);
184     tcg_temp_free_i64(t1);
185     tcg_temp_free_i64(mask);
188 static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
189                           TCGv_i64 inh, TCGv_i64 inl)
191     TCGv_i64 hi = tcg_temp_new_i64();
192     TCGv_i64 lo = tcg_temp_new_i64();
194     tcg_gen_bswap64_i64(hi, inh);
195     tcg_gen_bswap64_i64(lo, inl);
196     tcg_gen_shri_i64(outh, hi, 32);
197     tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
198     tcg_gen_shri_i64(outl, lo, 32);
199     tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
201     tcg_temp_free_i64(hi);
202     tcg_temp_free_i64(lo);
204 static void gen_lxvh8x(DisasContext *ctx)
206     TCGv EA;
207     TCGv_i64 xth;
208     TCGv_i64 xtl;
210     if (unlikely(!ctx->vsx_enabled)) {
211         gen_exception(ctx, POWERPC_EXCP_VSXU);
212         return;
213     }
214     xth = tcg_temp_new_i64();
215     xtl = tcg_temp_new_i64();
216     gen_set_access_type(ctx, ACCESS_INT);
218     EA = tcg_temp_new();
219     gen_addr_reg_index(ctx, EA);
220     tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
221     tcg_gen_addi_tl(EA, EA, 8);
222     tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
223     if (ctx->le_mode) {
224         gen_bswap16x8(xth, xtl, xth, xtl);
225     }
226     set_cpu_vsr(xT(ctx->opcode), xth, true);
227     set_cpu_vsr(xT(ctx->opcode), xtl, false);
228     tcg_temp_free(EA);
229     tcg_temp_free_i64(xth);
230     tcg_temp_free_i64(xtl);
233 static void gen_lxvb16x(DisasContext *ctx)
235     TCGv EA;
236     TCGv_i64 xth;
237     TCGv_i64 xtl;
239     if (unlikely(!ctx->vsx_enabled)) {
240         gen_exception(ctx, POWERPC_EXCP_VSXU);
241         return;
242     }
243     xth = tcg_temp_new_i64();
244     xtl = tcg_temp_new_i64();
245     gen_set_access_type(ctx, ACCESS_INT);
246     EA = tcg_temp_new();
247     gen_addr_reg_index(ctx, EA);
248     tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
249     tcg_gen_addi_tl(EA, EA, 8);
250     tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
251     set_cpu_vsr(xT(ctx->opcode), xth, true);
252     set_cpu_vsr(xT(ctx->opcode), xtl, false);
253     tcg_temp_free(EA);
254     tcg_temp_free_i64(xth);
255     tcg_temp_free_i64(xtl);
258 #ifdef TARGET_PPC64
259 #define VSX_VECTOR_LOAD_STORE_LENGTH(name)                         \
260 static void gen_##name(DisasContext *ctx)                          \
261 {                                                                  \
262     TCGv EA;                                                       \
263     TCGv_ptr xt;                                                   \
264                                                                    \
265     if (xT(ctx->opcode) < 32) {                                    \
266         if (unlikely(!ctx->vsx_enabled)) {                         \
267             gen_exception(ctx, POWERPC_EXCP_VSXU);                 \
268             return;                                                \
269         }                                                          \
270     } else {                                                       \
271         if (unlikely(!ctx->altivec_enabled)) {                     \
272             gen_exception(ctx, POWERPC_EXCP_VPU);                  \
273             return;                                                \
274         }                                                          \
275     }                                                              \
276     EA = tcg_temp_new();                                           \
277     xt = gen_vsr_ptr(xT(ctx->opcode));                             \
278     gen_set_access_type(ctx, ACCESS_INT);                          \
279     gen_addr_register(ctx, EA);                                    \
280     gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]);  \
281     tcg_temp_free(EA);                                             \
282     tcg_temp_free_ptr(xt);                                         \
285 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
286 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
287 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
288 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
289 #endif
291 #define VSX_LOAD_SCALAR_DS(name, operation)                       \
292 static void gen_##name(DisasContext *ctx)                         \
293 {                                                                 \
294     TCGv EA;                                                      \
295     TCGv_i64 xth;                                                 \
296                                                                   \
297     if (unlikely(!ctx->altivec_enabled)) {                        \
298         gen_exception(ctx, POWERPC_EXCP_VPU);                     \
299         return;                                                   \
300     }                                                             \
301     xth = tcg_temp_new_i64();                                     \
302     gen_set_access_type(ctx, ACCESS_INT);                         \
303     EA = tcg_temp_new();                                          \
304     gen_addr_imm_index(ctx, EA, 0x03);                            \
305     gen_qemu_##operation(ctx, xth, EA);                           \
306     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);                 \
307     /* NOTE: cpu_vsrl is undefined */                             \
308     tcg_temp_free(EA);                                            \
309     tcg_temp_free_i64(xth);                                       \
312 VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
313 VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
315 #define VSX_STORE_SCALAR(name, operation)                     \
316 static void gen_##name(DisasContext *ctx)                     \
317 {                                                             \
318     TCGv EA;                                                  \
319     TCGv_i64 t0;                                              \
320     if (unlikely(!ctx->vsx_enabled)) {                        \
321         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
322         return;                                               \
323     }                                                         \
324     t0 = tcg_temp_new_i64();                                  \
325     gen_set_access_type(ctx, ACCESS_INT);                     \
326     EA = tcg_temp_new();                                      \
327     gen_addr_reg_index(ctx, EA);                              \
328     get_cpu_vsr(t0, xS(ctx->opcode), true);                   \
329     gen_qemu_##operation(ctx, t0, EA);                        \
330     tcg_temp_free(EA);                                        \
331     tcg_temp_free_i64(t0);                                    \
334 VSX_STORE_SCALAR(stxsdx, st64_i64)
336 VSX_STORE_SCALAR(stxsibx, st8_i64)
337 VSX_STORE_SCALAR(stxsihx, st16_i64)
338 VSX_STORE_SCALAR(stxsiwx, st32_i64)
339 VSX_STORE_SCALAR(stxsspx, st32fs)
341 static void gen_stxvd2x(DisasContext *ctx)
343     TCGv EA;
344     TCGv_i64 t0;
345     if (unlikely(!ctx->vsx_enabled)) {
346         gen_exception(ctx, POWERPC_EXCP_VSXU);
347         return;
348     }
349     t0 = tcg_temp_new_i64();
350     gen_set_access_type(ctx, ACCESS_INT);
351     EA = tcg_temp_new();
352     gen_addr_reg_index(ctx, EA);
353     get_cpu_vsr(t0, xS(ctx->opcode), true);
354     gen_qemu_st64_i64(ctx, t0, EA);
355     tcg_gen_addi_tl(EA, EA, 8);
356     get_cpu_vsr(t0, xS(ctx->opcode), false);
357     gen_qemu_st64_i64(ctx, t0, EA);
358     tcg_temp_free(EA);
359     tcg_temp_free_i64(t0);
362 static void gen_stxvw4x(DisasContext *ctx)
364     TCGv EA;
365     TCGv_i64 xsh;
366     TCGv_i64 xsl;
368     if (unlikely(!ctx->vsx_enabled)) {
369         gen_exception(ctx, POWERPC_EXCP_VSXU);
370         return;
371     }
372     xsh = tcg_temp_new_i64();
373     xsl = tcg_temp_new_i64();
374     get_cpu_vsr(xsh, xS(ctx->opcode), true);
375     get_cpu_vsr(xsl, xS(ctx->opcode), false);
376     gen_set_access_type(ctx, ACCESS_INT);
377     EA = tcg_temp_new();
378     gen_addr_reg_index(ctx, EA);
379     if (ctx->le_mode) {
380         TCGv_i64 t0 = tcg_temp_new_i64();
381         TCGv_i64 t1 = tcg_temp_new_i64();
383         tcg_gen_shri_i64(t0, xsh, 32);
384         tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
385         tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
386         tcg_gen_addi_tl(EA, EA, 8);
387         tcg_gen_shri_i64(t0, xsl, 32);
388         tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
389         tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
390         tcg_temp_free_i64(t0);
391         tcg_temp_free_i64(t1);
392     } else {
393         tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
394         tcg_gen_addi_tl(EA, EA, 8);
395         tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
396     }
397     tcg_temp_free(EA);
398     tcg_temp_free_i64(xsh);
399     tcg_temp_free_i64(xsl);
402 static void gen_stxvh8x(DisasContext *ctx)
404     TCGv EA;
405     TCGv_i64 xsh;
406     TCGv_i64 xsl;
408     if (unlikely(!ctx->vsx_enabled)) {
409         gen_exception(ctx, POWERPC_EXCP_VSXU);
410         return;
411     }
412     xsh = tcg_temp_new_i64();
413     xsl = tcg_temp_new_i64();
414     get_cpu_vsr(xsh, xS(ctx->opcode), true);
415     get_cpu_vsr(xsl, xS(ctx->opcode), false);
416     gen_set_access_type(ctx, ACCESS_INT);
417     EA = tcg_temp_new();
418     gen_addr_reg_index(ctx, EA);
419     if (ctx->le_mode) {
420         TCGv_i64 outh = tcg_temp_new_i64();
421         TCGv_i64 outl = tcg_temp_new_i64();
423         gen_bswap16x8(outh, outl, xsh, xsl);
424         tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEUQ);
425         tcg_gen_addi_tl(EA, EA, 8);
426         tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEUQ);
427         tcg_temp_free_i64(outh);
428         tcg_temp_free_i64(outl);
429     } else {
430         tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
431         tcg_gen_addi_tl(EA, EA, 8);
432         tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
433     }
434     tcg_temp_free(EA);
435     tcg_temp_free_i64(xsh);
436     tcg_temp_free_i64(xsl);
439 static void gen_stxvb16x(DisasContext *ctx)
441     TCGv EA;
442     TCGv_i64 xsh;
443     TCGv_i64 xsl;
445     if (unlikely(!ctx->vsx_enabled)) {
446         gen_exception(ctx, POWERPC_EXCP_VSXU);
447         return;
448     }
449     xsh = tcg_temp_new_i64();
450     xsl = tcg_temp_new_i64();
451     get_cpu_vsr(xsh, xS(ctx->opcode), true);
452     get_cpu_vsr(xsl, xS(ctx->opcode), false);
453     gen_set_access_type(ctx, ACCESS_INT);
454     EA = tcg_temp_new();
455     gen_addr_reg_index(ctx, EA);
456     tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
457     tcg_gen_addi_tl(EA, EA, 8);
458     tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
459     tcg_temp_free(EA);
460     tcg_temp_free_i64(xsh);
461     tcg_temp_free_i64(xsl);
464 #define VSX_STORE_SCALAR_DS(name, operation)                      \
465 static void gen_##name(DisasContext *ctx)                         \
466 {                                                                 \
467     TCGv EA;                                                      \
468     TCGv_i64 xth;                                                 \
469                                                                   \
470     if (unlikely(!ctx->altivec_enabled)) {                        \
471         gen_exception(ctx, POWERPC_EXCP_VPU);                     \
472         return;                                                   \
473     }                                                             \
474     xth = tcg_temp_new_i64();                                     \
475     get_cpu_vsr(xth, rD(ctx->opcode) + 32, true);                 \
476     gen_set_access_type(ctx, ACCESS_INT);                         \
477     EA = tcg_temp_new();                                          \
478     gen_addr_imm_index(ctx, EA, 0x03);                            \
479     gen_qemu_##operation(ctx, xth, EA);                           \
480     /* NOTE: cpu_vsrl is undefined */                             \
481     tcg_temp_free(EA);                                            \
482     tcg_temp_free_i64(xth);                                       \
485 VSX_STORE_SCALAR_DS(stxsd, st64_i64)
486 VSX_STORE_SCALAR_DS(stxssp, st32fs)
488 static void gen_mfvsrwz(DisasContext *ctx)
490     if (xS(ctx->opcode) < 32) {
491         if (unlikely(!ctx->fpu_enabled)) {
492             gen_exception(ctx, POWERPC_EXCP_FPU);
493             return;
494         }
495     } else {
496         if (unlikely(!ctx->altivec_enabled)) {
497             gen_exception(ctx, POWERPC_EXCP_VPU);
498             return;
499         }
500     }
501     TCGv_i64 tmp = tcg_temp_new_i64();
502     TCGv_i64 xsh = tcg_temp_new_i64();
503     get_cpu_vsr(xsh, xS(ctx->opcode), true);
504     tcg_gen_ext32u_i64(tmp, xsh);
505     tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
506     tcg_temp_free_i64(tmp);
507     tcg_temp_free_i64(xsh);
510 static void gen_mtvsrwa(DisasContext *ctx)
512     if (xS(ctx->opcode) < 32) {
513         if (unlikely(!ctx->fpu_enabled)) {
514             gen_exception(ctx, POWERPC_EXCP_FPU);
515             return;
516         }
517     } else {
518         if (unlikely(!ctx->altivec_enabled)) {
519             gen_exception(ctx, POWERPC_EXCP_VPU);
520             return;
521         }
522     }
523     TCGv_i64 tmp = tcg_temp_new_i64();
524     TCGv_i64 xsh = tcg_temp_new_i64();
525     tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
526     tcg_gen_ext32s_i64(xsh, tmp);
527     set_cpu_vsr(xT(ctx->opcode), xsh, true);
528     tcg_temp_free_i64(tmp);
529     tcg_temp_free_i64(xsh);
532 static void gen_mtvsrwz(DisasContext *ctx)
534     if (xS(ctx->opcode) < 32) {
535         if (unlikely(!ctx->fpu_enabled)) {
536             gen_exception(ctx, POWERPC_EXCP_FPU);
537             return;
538         }
539     } else {
540         if (unlikely(!ctx->altivec_enabled)) {
541             gen_exception(ctx, POWERPC_EXCP_VPU);
542             return;
543         }
544     }
545     TCGv_i64 tmp = tcg_temp_new_i64();
546     TCGv_i64 xsh = tcg_temp_new_i64();
547     tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
548     tcg_gen_ext32u_i64(xsh, tmp);
549     set_cpu_vsr(xT(ctx->opcode), xsh, true);
550     tcg_temp_free_i64(tmp);
551     tcg_temp_free_i64(xsh);
554 #if defined(TARGET_PPC64)
555 static void gen_mfvsrd(DisasContext *ctx)
557     TCGv_i64 t0;
558     if (xS(ctx->opcode) < 32) {
559         if (unlikely(!ctx->fpu_enabled)) {
560             gen_exception(ctx, POWERPC_EXCP_FPU);
561             return;
562         }
563     } else {
564         if (unlikely(!ctx->altivec_enabled)) {
565             gen_exception(ctx, POWERPC_EXCP_VPU);
566             return;
567         }
568     }
569     t0 = tcg_temp_new_i64();
570     get_cpu_vsr(t0, xS(ctx->opcode), true);
571     tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
572     tcg_temp_free_i64(t0);
575 static void gen_mtvsrd(DisasContext *ctx)
577     TCGv_i64 t0;
578     if (xS(ctx->opcode) < 32) {
579         if (unlikely(!ctx->fpu_enabled)) {
580             gen_exception(ctx, POWERPC_EXCP_FPU);
581             return;
582         }
583     } else {
584         if (unlikely(!ctx->altivec_enabled)) {
585             gen_exception(ctx, POWERPC_EXCP_VPU);
586             return;
587         }
588     }
589     t0 = tcg_temp_new_i64();
590     tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
591     set_cpu_vsr(xT(ctx->opcode), t0, true);
592     tcg_temp_free_i64(t0);
595 static void gen_mfvsrld(DisasContext *ctx)
597     TCGv_i64 t0;
598     if (xS(ctx->opcode) < 32) {
599         if (unlikely(!ctx->vsx_enabled)) {
600             gen_exception(ctx, POWERPC_EXCP_VSXU);
601             return;
602         }
603     } else {
604         if (unlikely(!ctx->altivec_enabled)) {
605             gen_exception(ctx, POWERPC_EXCP_VPU);
606             return;
607         }
608     }
609     t0 = tcg_temp_new_i64();
610     get_cpu_vsr(t0, xS(ctx->opcode), false);
611     tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
612     tcg_temp_free_i64(t0);
615 static void gen_mtvsrdd(DisasContext *ctx)
617     TCGv_i64 t0;
618     if (xT(ctx->opcode) < 32) {
619         if (unlikely(!ctx->vsx_enabled)) {
620             gen_exception(ctx, POWERPC_EXCP_VSXU);
621             return;
622         }
623     } else {
624         if (unlikely(!ctx->altivec_enabled)) {
625             gen_exception(ctx, POWERPC_EXCP_VPU);
626             return;
627         }
628     }
630     t0 = tcg_temp_new_i64();
631     if (!rA(ctx->opcode)) {
632         tcg_gen_movi_i64(t0, 0);
633     } else {
634         tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
635     }
636     set_cpu_vsr(xT(ctx->opcode), t0, true);
638     tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
639     set_cpu_vsr(xT(ctx->opcode), t0, false);
640     tcg_temp_free_i64(t0);
643 static void gen_mtvsrws(DisasContext *ctx)
645     TCGv_i64 t0;
646     if (xT(ctx->opcode) < 32) {
647         if (unlikely(!ctx->vsx_enabled)) {
648             gen_exception(ctx, POWERPC_EXCP_VSXU);
649             return;
650         }
651     } else {
652         if (unlikely(!ctx->altivec_enabled)) {
653             gen_exception(ctx, POWERPC_EXCP_VPU);
654             return;
655         }
656     }
658     t0 = tcg_temp_new_i64();
659     tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
660                         cpu_gpr[rA(ctx->opcode)], 32, 32);
661     set_cpu_vsr(xT(ctx->opcode), t0, false);
662     set_cpu_vsr(xT(ctx->opcode), t0, true);
663     tcg_temp_free_i64(t0);
666 #endif
668 static void gen_xxpermdi(DisasContext *ctx)
670     TCGv_i64 xh, xl;
672     if (unlikely(!ctx->vsx_enabled)) {
673         gen_exception(ctx, POWERPC_EXCP_VSXU);
674         return;
675     }
677     xh = tcg_temp_new_i64();
678     xl = tcg_temp_new_i64();
680     if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
681                  (xT(ctx->opcode) == xB(ctx->opcode)))) {
682         get_cpu_vsr(xh, xA(ctx->opcode), (DM(ctx->opcode) & 2) == 0);
683         get_cpu_vsr(xl, xB(ctx->opcode), (DM(ctx->opcode) & 1) == 0);
685         set_cpu_vsr(xT(ctx->opcode), xh, true);
686         set_cpu_vsr(xT(ctx->opcode), xl, false);
687     } else {
688         if ((DM(ctx->opcode) & 2) == 0) {
689             get_cpu_vsr(xh, xA(ctx->opcode), true);
690             set_cpu_vsr(xT(ctx->opcode), xh, true);
691         } else {
692             get_cpu_vsr(xh, xA(ctx->opcode), false);
693             set_cpu_vsr(xT(ctx->opcode), xh, true);
694         }
695         if ((DM(ctx->opcode) & 1) == 0) {
696             get_cpu_vsr(xl, xB(ctx->opcode), true);
697             set_cpu_vsr(xT(ctx->opcode), xl, false);
698         } else {
699             get_cpu_vsr(xl, xB(ctx->opcode), false);
700             set_cpu_vsr(xT(ctx->opcode), xl, false);
701         }
702     }
703     tcg_temp_free_i64(xh);
704     tcg_temp_free_i64(xl);
707 #define OP_ABS 1
708 #define OP_NABS 2
709 #define OP_NEG 3
710 #define OP_CPSGN 4
711 #define SGN_MASK_DP  0x8000000000000000ull
712 #define SGN_MASK_SP 0x8000000080000000ull
714 #define VSX_SCALAR_MOVE(name, op, sgn_mask)                       \
715 static void glue(gen_, name)(DisasContext *ctx)                   \
716     {                                                             \
717         TCGv_i64 xb, sgm;                                         \
718         if (unlikely(!ctx->vsx_enabled)) {                        \
719             gen_exception(ctx, POWERPC_EXCP_VSXU);                \
720             return;                                               \
721         }                                                         \
722         xb = tcg_temp_new_i64();                                  \
723         sgm = tcg_temp_new_i64();                                 \
724         get_cpu_vsr(xb, xB(ctx->opcode), true);                   \
725         tcg_gen_movi_i64(sgm, sgn_mask);                          \
726         switch (op) {                                             \
727             case OP_ABS: {                                        \
728                 tcg_gen_andc_i64(xb, xb, sgm);                    \
729                 break;                                            \
730             }                                                     \
731             case OP_NABS: {                                       \
732                 tcg_gen_or_i64(xb, xb, sgm);                      \
733                 break;                                            \
734             }                                                     \
735             case OP_NEG: {                                        \
736                 tcg_gen_xor_i64(xb, xb, sgm);                     \
737                 break;                                            \
738             }                                                     \
739             case OP_CPSGN: {                                      \
740                 TCGv_i64 xa = tcg_temp_new_i64();                 \
741                 get_cpu_vsr(xa, xA(ctx->opcode), true);           \
742                 tcg_gen_and_i64(xa, xa, sgm);                     \
743                 tcg_gen_andc_i64(xb, xb, sgm);                    \
744                 tcg_gen_or_i64(xb, xb, xa);                       \
745                 tcg_temp_free_i64(xa);                            \
746                 break;                                            \
747             }                                                     \
748         }                                                         \
749         set_cpu_vsr(xT(ctx->opcode), xb, true);                   \
750         tcg_temp_free_i64(xb);                                    \
751         tcg_temp_free_i64(sgm);                                   \
752     }
754 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
755 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
756 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
757 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
759 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask)                    \
760 static void glue(gen_, name)(DisasContext *ctx)                   \
761 {                                                                 \
762     int xa;                                                       \
763     int xt = rD(ctx->opcode) + 32;                                \
764     int xb = rB(ctx->opcode) + 32;                                \
765     TCGv_i64 xah, xbh, xbl, sgm, tmp;                             \
766                                                                   \
767     if (unlikely(!ctx->vsx_enabled)) {                            \
768         gen_exception(ctx, POWERPC_EXCP_VSXU);                    \
769         return;                                                   \
770     }                                                             \
771     xbh = tcg_temp_new_i64();                                     \
772     xbl = tcg_temp_new_i64();                                     \
773     sgm = tcg_temp_new_i64();                                     \
774     tmp = tcg_temp_new_i64();                                     \
775     get_cpu_vsr(xbh, xb, true);                                   \
776     get_cpu_vsr(xbl, xb, false);                                  \
777     tcg_gen_movi_i64(sgm, sgn_mask);                              \
778     switch (op) {                                                 \
779     case OP_ABS:                                                  \
780         tcg_gen_andc_i64(xbh, xbh, sgm);                          \
781         break;                                                    \
782     case OP_NABS:                                                 \
783         tcg_gen_or_i64(xbh, xbh, sgm);                            \
784         break;                                                    \
785     case OP_NEG:                                                  \
786         tcg_gen_xor_i64(xbh, xbh, sgm);                           \
787         break;                                                    \
788     case OP_CPSGN:                                                \
789         xah = tcg_temp_new_i64();                                 \
790         xa = rA(ctx->opcode) + 32;                                \
791         get_cpu_vsr(tmp, xa, true);                               \
792         tcg_gen_and_i64(xah, tmp, sgm);                           \
793         tcg_gen_andc_i64(xbh, xbh, sgm);                          \
794         tcg_gen_or_i64(xbh, xbh, xah);                            \
795         tcg_temp_free_i64(xah);                                   \
796         break;                                                    \
797     }                                                             \
798     set_cpu_vsr(xt, xbh, true);                                   \
799     set_cpu_vsr(xt, xbl, false);                                  \
800     tcg_temp_free_i64(xbl);                                       \
801     tcg_temp_free_i64(xbh);                                       \
802     tcg_temp_free_i64(sgm);                                       \
803     tcg_temp_free_i64(tmp);                                       \
806 VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
807 VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
808 VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
809 VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
811 #define VSX_VECTOR_MOVE(name, op, sgn_mask)                      \
812 static void glue(gen_, name)(DisasContext *ctx)                  \
813     {                                                            \
814         TCGv_i64 xbh, xbl, sgm;                                  \
815         if (unlikely(!ctx->vsx_enabled)) {                       \
816             gen_exception(ctx, POWERPC_EXCP_VSXU);               \
817             return;                                              \
818         }                                                        \
819         xbh = tcg_temp_new_i64();                                \
820         xbl = tcg_temp_new_i64();                                \
821         sgm = tcg_temp_new_i64();                                \
822         get_cpu_vsr(xbh, xB(ctx->opcode), true);                 \
823         get_cpu_vsr(xbl, xB(ctx->opcode), false);                \
824         tcg_gen_movi_i64(sgm, sgn_mask);                         \
825         switch (op) {                                            \
826             case OP_ABS: {                                       \
827                 tcg_gen_andc_i64(xbh, xbh, sgm);                 \
828                 tcg_gen_andc_i64(xbl, xbl, sgm);                 \
829                 break;                                           \
830             }                                                    \
831             case OP_NABS: {                                      \
832                 tcg_gen_or_i64(xbh, xbh, sgm);                   \
833                 tcg_gen_or_i64(xbl, xbl, sgm);                   \
834                 break;                                           \
835             }                                                    \
836             case OP_NEG: {                                       \
837                 tcg_gen_xor_i64(xbh, xbh, sgm);                  \
838                 tcg_gen_xor_i64(xbl, xbl, sgm);                  \
839                 break;                                           \
840             }                                                    \
841             case OP_CPSGN: {                                     \
842                 TCGv_i64 xah = tcg_temp_new_i64();               \
843                 TCGv_i64 xal = tcg_temp_new_i64();               \
844                 get_cpu_vsr(xah, xA(ctx->opcode), true);         \
845                 get_cpu_vsr(xal, xA(ctx->opcode), false);        \
846                 tcg_gen_and_i64(xah, xah, sgm);                  \
847                 tcg_gen_and_i64(xal, xal, sgm);                  \
848                 tcg_gen_andc_i64(xbh, xbh, sgm);                 \
849                 tcg_gen_andc_i64(xbl, xbl, sgm);                 \
850                 tcg_gen_or_i64(xbh, xbh, xah);                   \
851                 tcg_gen_or_i64(xbl, xbl, xal);                   \
852                 tcg_temp_free_i64(xah);                          \
853                 tcg_temp_free_i64(xal);                          \
854                 break;                                           \
855             }                                                    \
856         }                                                        \
857         set_cpu_vsr(xT(ctx->opcode), xbh, true);                 \
858         set_cpu_vsr(xT(ctx->opcode), xbl, false);                \
859         tcg_temp_free_i64(xbh);                                  \
860         tcg_temp_free_i64(xbl);                                  \
861         tcg_temp_free_i64(sgm);                                  \
862     }
864 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
865 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
866 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
867 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
868 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
869 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
870 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
871 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
873 #define VSX_CMP(name, op1, op2, inval, type)                                  \
874 static void gen_##name(DisasContext *ctx)                                     \
875 {                                                                             \
876     TCGv_i32 ignored;                                                         \
877     TCGv_ptr xt, xa, xb;                                                      \
878     if (unlikely(!ctx->vsx_enabled)) {                                        \
879         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
880         return;                                                               \
881     }                                                                         \
882     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
883     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
884     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
885     if ((ctx->opcode >> (31 - 21)) & 1) {                                     \
886         gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb);                   \
887     } else {                                                                  \
888         ignored = tcg_temp_new_i32();                                         \
889         gen_helper_##name(ignored, cpu_env, xt, xa, xb);                      \
890         tcg_temp_free_i32(ignored);                                           \
891     }                                                                         \
892     gen_helper_float_check_status(cpu_env);                                   \
893     tcg_temp_free_ptr(xt);                                                    \
894     tcg_temp_free_ptr(xa);                                                    \
895     tcg_temp_free_ptr(xb);                                                    \
898 VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
899 VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
900 VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
901 VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
902 VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
903 VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
904 VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
905 VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
907 static bool trans_XSCVQPDP(DisasContext *ctx, arg_X_tb_rc *a)
909     TCGv_i32 ro;
910     TCGv_ptr xt, xb;
912     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
913     REQUIRE_VSX(ctx);
915     ro = tcg_const_i32(a->rc);
917     xt = gen_avr_ptr(a->rt);
918     xb = gen_avr_ptr(a->rb);
919     gen_helper_XSCVQPDP(cpu_env, ro, xt, xb);
920     tcg_temp_free_i32(ro);
921     tcg_temp_free_ptr(xt);
922     tcg_temp_free_ptr(xb);
924     return true;
927 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type)                         \
928 static void gen_##name(DisasContext *ctx)                                     \
929 {                                                                             \
930     TCGv_i32 opc;                                                             \
931     if (unlikely(!ctx->vsx_enabled)) {                                        \
932         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
933         return;                                                               \
934     }                                                                         \
935     opc = tcg_const_i32(ctx->opcode);                                         \
936     gen_helper_##name(cpu_env, opc);                                          \
937     tcg_temp_free_i32(opc);                                                   \
940 #define GEN_VSX_HELPER_X3(name, op1, op2, inval, type)                        \
941 static void gen_##name(DisasContext *ctx)                                     \
942 {                                                                             \
943     TCGv_ptr xt, xa, xb;                                                      \
944     if (unlikely(!ctx->vsx_enabled)) {                                        \
945         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
946         return;                                                               \
947     }                                                                         \
948     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
949     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
950     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
951     gen_helper_##name(cpu_env, xt, xa, xb);                                   \
952     tcg_temp_free_ptr(xt);                                                    \
953     tcg_temp_free_ptr(xa);                                                    \
954     tcg_temp_free_ptr(xb);                                                    \
957 #define GEN_VSX_HELPER_X2(name, op1, op2, inval, type)                        \
958 static void gen_##name(DisasContext *ctx)                                     \
959 {                                                                             \
960     TCGv_ptr xt, xb;                                                          \
961     if (unlikely(!ctx->vsx_enabled)) {                                        \
962         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
963         return;                                                               \
964     }                                                                         \
965     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
966     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
967     gen_helper_##name(cpu_env, xt, xb);                                       \
968     tcg_temp_free_ptr(xt);                                                    \
969     tcg_temp_free_ptr(xb);                                                    \
972 #define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type)                     \
973 static void gen_##name(DisasContext *ctx)                                     \
974 {                                                                             \
975     TCGv_i32 opc;                                                             \
976     TCGv_ptr xa, xb;                                                          \
977     if (unlikely(!ctx->vsx_enabled)) {                                        \
978         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
979         return;                                                               \
980     }                                                                         \
981     opc = tcg_const_i32(ctx->opcode);                                         \
982     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
983     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
984     gen_helper_##name(cpu_env, opc, xa, xb);                                  \
985     tcg_temp_free_i32(opc);                                                   \
986     tcg_temp_free_ptr(xa);                                                    \
987     tcg_temp_free_ptr(xb);                                                    \
990 #define GEN_VSX_HELPER_X1(name, op1, op2, inval, type)                        \
991 static void gen_##name(DisasContext *ctx)                                     \
992 {                                                                             \
993     TCGv_i32 opc;                                                             \
994     TCGv_ptr xb;                                                              \
995     if (unlikely(!ctx->vsx_enabled)) {                                        \
996         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
997         return;                                                               \
998     }                                                                         \
999     opc = tcg_const_i32(ctx->opcode);                                         \
1000     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
1001     gen_helper_##name(cpu_env, opc, xb);                                      \
1002     tcg_temp_free_i32(opc);                                                   \
1003     tcg_temp_free_ptr(xb);                                                    \
1006 #define GEN_VSX_HELPER_R3(name, op1, op2, inval, type)                        \
1007 static void gen_##name(DisasContext *ctx)                                     \
1008 {                                                                             \
1009     TCGv_i32 opc;                                                             \
1010     TCGv_ptr xt, xa, xb;                                                      \
1011     if (unlikely(!ctx->vsx_enabled)) {                                        \
1012         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1013         return;                                                               \
1014     }                                                                         \
1015     opc = tcg_const_i32(ctx->opcode);                                         \
1016     xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
1017     xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
1018     xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1019     gen_helper_##name(cpu_env, opc, xt, xa, xb);                              \
1020     tcg_temp_free_i32(opc);                                                   \
1021     tcg_temp_free_ptr(xt);                                                    \
1022     tcg_temp_free_ptr(xa);                                                    \
1023     tcg_temp_free_ptr(xb);                                                    \
1026 #define GEN_VSX_HELPER_R2(name, op1, op2, inval, type)                        \
1027 static void gen_##name(DisasContext *ctx)                                     \
1028 {                                                                             \
1029     TCGv_i32 opc;                                                             \
1030     TCGv_ptr xt, xb;                                                          \
1031     if (unlikely(!ctx->vsx_enabled)) {                                        \
1032         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1033         return;                                                               \
1034     }                                                                         \
1035     opc = tcg_const_i32(ctx->opcode);                                         \
1036     xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
1037     xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1038     gen_helper_##name(cpu_env, opc, xt, xb);                                  \
1039     tcg_temp_free_i32(opc);                                                   \
1040     tcg_temp_free_ptr(xt);                                                    \
1041     tcg_temp_free_ptr(xb);                                                    \
1044 #define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type)                     \
1045 static void gen_##name(DisasContext *ctx)                                     \
1046 {                                                                             \
1047     TCGv_i32 opc;                                                             \
1048     TCGv_ptr xa, xb;                                                          \
1049     if (unlikely(!ctx->vsx_enabled)) {                                        \
1050         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1051         return;                                                               \
1052     }                                                                         \
1053     opc = tcg_const_i32(ctx->opcode);                                         \
1054     xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
1055     xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
1056     gen_helper_##name(cpu_env, opc, xa, xb);                                  \
1057     tcg_temp_free_i32(opc);                                                   \
1058     tcg_temp_free_ptr(xa);                                                    \
1059     tcg_temp_free_ptr(xb);                                                    \
1062 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
1063 static void gen_##name(DisasContext *ctx)                     \
1064 {                                                             \
1065     TCGv_i64 t0;                                              \
1066     TCGv_i64 t1;                                              \
1067     if (unlikely(!ctx->vsx_enabled)) {                        \
1068         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
1069         return;                                               \
1070     }                                                         \
1071     t0 = tcg_temp_new_i64();                                  \
1072     t1 = tcg_temp_new_i64();                                  \
1073     get_cpu_vsr(t0, xB(ctx->opcode), true);                   \
1074     gen_helper_##name(t1, cpu_env, t0);                       \
1075     set_cpu_vsr(xT(ctx->opcode), t1, true);                   \
1076     tcg_temp_free_i64(t0);                                    \
1077     tcg_temp_free_i64(t1);                                    \
1080 GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
1081 GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
1082 GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
1083 GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
1084 GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
1085 GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
1086 GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
1087 GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
1088 GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
1089 GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
1090 GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
1091 GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
1092 GEN_VSX_HELPER_X3(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
1093 GEN_VSX_HELPER_X3(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
1094 GEN_VSX_HELPER_X3(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
1095 GEN_VSX_HELPER_X3(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
1096 GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
1097 GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1098 GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1099 GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1100 GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1101 GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1102 GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1103 GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1104 GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1105 GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1106 GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1107 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1108 GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1109 GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1110 GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1111 GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1112 GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1113 GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1114 GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1115 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
1116 GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1117 GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1118 GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1119 GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1120 GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1121 GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1122 GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1123 GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1124 GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1125 GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1126 GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1127 GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1128 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1129 GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1130 GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1131 GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1132 GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1133 GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1134 GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1135 GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1136 GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1137 GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1138 GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1139 GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1140 GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1141 GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1142 GEN_VSX_HELPER_X1(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1143 GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1144 GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1146 GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1147 GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1148 GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1149 GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1150 GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1151 GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1152 GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1153 GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1154 GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1155 GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1156 GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1157 GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1158 GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1159 GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1160 GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1161 GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1162 GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1163 GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1164 GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1165 GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1166 GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1167 GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1168 GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1169 GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1170 GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1172 GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1173 GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1174 GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1175 GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1176 GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1177 GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1178 GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1179 GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1180 GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1181 GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1182 GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1183 GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1184 GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1185 GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1186 GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1187 GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1188 GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1189 GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1190 GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1191 GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1192 GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1193 GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1194 GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1195 GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1196 GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1197 GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1198 GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1199 GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1200 GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1201 GEN_VSX_HELPER_X3(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
1202 GEN_VSX_HELPER_X3(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
1204 #define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type)             \
1205 static void gen_##name(DisasContext *ctx)                                     \
1206 {                                                                             \
1207     TCGv_ptr xt, xa, b, c;                                                    \
1208     if (unlikely(!ctx->vsx_enabled)) {                                        \
1209         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1210         return;                                                               \
1211     }                                                                         \
1212     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
1213     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
1214     if (ctx->opcode & PPC_BIT32(25)) {                                        \
1215         /*                                                                    \
1216          * AxT + B                                                            \
1217          */                                                                   \
1218         b = gen_vsr_ptr(xT(ctx->opcode));                                     \
1219         c = gen_vsr_ptr(xB(ctx->opcode));                                     \
1220     } else {                                                                  \
1221         /*                                                                    \
1222          * AxB + T                                                            \
1223          */                                                                   \
1224         b = gen_vsr_ptr(xB(ctx->opcode));                                     \
1225         c = gen_vsr_ptr(xT(ctx->opcode));                                     \
1226     }                                                                         \
1227     gen_helper_##name(cpu_env, xt, xa, b, c);                                 \
1228     tcg_temp_free_ptr(xt);                                                    \
1229     tcg_temp_free_ptr(xa);                                                    \
1230     tcg_temp_free_ptr(b);                                                     \
1231     tcg_temp_free_ptr(c);                                                     \
1234 GEN_VSX_HELPER_VSX_MADD(xsmadddp, 0x04, 0x04, 0x05, 0, PPC2_VSX)
1235 GEN_VSX_HELPER_VSX_MADD(xsmsubdp, 0x04, 0x06, 0x07, 0, PPC2_VSX)
1236 GEN_VSX_HELPER_VSX_MADD(xsnmadddp, 0x04, 0x14, 0x15, 0, PPC2_VSX)
1237 GEN_VSX_HELPER_VSX_MADD(xsnmsubdp, 0x04, 0x16, 0x17, 0, PPC2_VSX)
1238 GEN_VSX_HELPER_VSX_MADD(xsmaddsp, 0x04, 0x00, 0x01, 0, PPC2_VSX207)
1239 GEN_VSX_HELPER_VSX_MADD(xsmsubsp, 0x04, 0x02, 0x03, 0, PPC2_VSX207)
1240 GEN_VSX_HELPER_VSX_MADD(xsnmaddsp, 0x04, 0x10, 0x11, 0, PPC2_VSX207)
1241 GEN_VSX_HELPER_VSX_MADD(xsnmsubsp, 0x04, 0x12, 0x13, 0, PPC2_VSX207)
1242 GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX)
1243 GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX)
1244 GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX)
1245 GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX)
1246 GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX)
1247 GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX)
1248 GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX)
1249 GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX)
1251 static void gen_xxbrd(DisasContext *ctx)
1253     TCGv_i64 xth;
1254     TCGv_i64 xtl;
1255     TCGv_i64 xbh;
1256     TCGv_i64 xbl;
1258     if (unlikely(!ctx->vsx_enabled)) {
1259         gen_exception(ctx, POWERPC_EXCP_VSXU);
1260         return;
1261     }
1262     xth = tcg_temp_new_i64();
1263     xtl = tcg_temp_new_i64();
1264     xbh = tcg_temp_new_i64();
1265     xbl = tcg_temp_new_i64();
1266     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1267     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1269     tcg_gen_bswap64_i64(xth, xbh);
1270     tcg_gen_bswap64_i64(xtl, xbl);
1271     set_cpu_vsr(xT(ctx->opcode), xth, true);
1272     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1274     tcg_temp_free_i64(xth);
1275     tcg_temp_free_i64(xtl);
1276     tcg_temp_free_i64(xbh);
1277     tcg_temp_free_i64(xbl);
1280 static void gen_xxbrh(DisasContext *ctx)
1282     TCGv_i64 xth;
1283     TCGv_i64 xtl;
1284     TCGv_i64 xbh;
1285     TCGv_i64 xbl;
1287     if (unlikely(!ctx->vsx_enabled)) {
1288         gen_exception(ctx, POWERPC_EXCP_VSXU);
1289         return;
1290     }
1291     xth = tcg_temp_new_i64();
1292     xtl = tcg_temp_new_i64();
1293     xbh = tcg_temp_new_i64();
1294     xbl = tcg_temp_new_i64();
1295     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1296     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1298     gen_bswap16x8(xth, xtl, xbh, xbl);
1299     set_cpu_vsr(xT(ctx->opcode), xth, true);
1300     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1302     tcg_temp_free_i64(xth);
1303     tcg_temp_free_i64(xtl);
1304     tcg_temp_free_i64(xbh);
1305     tcg_temp_free_i64(xbl);
1308 static void gen_xxbrq(DisasContext *ctx)
1310     TCGv_i64 xth;
1311     TCGv_i64 xtl;
1312     TCGv_i64 xbh;
1313     TCGv_i64 xbl;
1314     TCGv_i64 t0;
1316     if (unlikely(!ctx->vsx_enabled)) {
1317         gen_exception(ctx, POWERPC_EXCP_VSXU);
1318         return;
1319     }
1320     xth = tcg_temp_new_i64();
1321     xtl = tcg_temp_new_i64();
1322     xbh = tcg_temp_new_i64();
1323     xbl = tcg_temp_new_i64();
1324     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1325     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1326     t0 = tcg_temp_new_i64();
1328     tcg_gen_bswap64_i64(t0, xbl);
1329     tcg_gen_bswap64_i64(xtl, xbh);
1330     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1331     tcg_gen_mov_i64(xth, t0);
1332     set_cpu_vsr(xT(ctx->opcode), xth, true);
1334     tcg_temp_free_i64(t0);
1335     tcg_temp_free_i64(xth);
1336     tcg_temp_free_i64(xtl);
1337     tcg_temp_free_i64(xbh);
1338     tcg_temp_free_i64(xbl);
1341 static void gen_xxbrw(DisasContext *ctx)
1343     TCGv_i64 xth;
1344     TCGv_i64 xtl;
1345     TCGv_i64 xbh;
1346     TCGv_i64 xbl;
1348     if (unlikely(!ctx->vsx_enabled)) {
1349         gen_exception(ctx, POWERPC_EXCP_VSXU);
1350         return;
1351     }
1352     xth = tcg_temp_new_i64();
1353     xtl = tcg_temp_new_i64();
1354     xbh = tcg_temp_new_i64();
1355     xbl = tcg_temp_new_i64();
1356     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1357     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1359     gen_bswap32x4(xth, xtl, xbh, xbl);
1360     set_cpu_vsr(xT(ctx->opcode), xth, true);
1361     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1363     tcg_temp_free_i64(xth);
1364     tcg_temp_free_i64(xtl);
1365     tcg_temp_free_i64(xbh);
1366     tcg_temp_free_i64(xbl);
1369 #define VSX_LOGICAL(name, vece, tcg_op)                              \
1370 static void glue(gen_, name)(DisasContext *ctx)                      \
1371     {                                                                \
1372         if (unlikely(!ctx->vsx_enabled)) {                           \
1373             gen_exception(ctx, POWERPC_EXCP_VSXU);                   \
1374             return;                                                  \
1375         }                                                            \
1376         tcg_op(vece, vsr_full_offset(xT(ctx->opcode)),               \
1377                vsr_full_offset(xA(ctx->opcode)),                     \
1378                vsr_full_offset(xB(ctx->opcode)), 16, 16);            \
1379     }
1381 VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1382 VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1383 VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1384 VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1385 VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1386 VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1387 VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1388 VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1390 #define VSX_XXMRG(name, high)                               \
1391 static void glue(gen_, name)(DisasContext *ctx)             \
1392     {                                                       \
1393         TCGv_i64 a0, a1, b0, b1, tmp;                       \
1394         if (unlikely(!ctx->vsx_enabled)) {                  \
1395             gen_exception(ctx, POWERPC_EXCP_VSXU);          \
1396             return;                                         \
1397         }                                                   \
1398         a0 = tcg_temp_new_i64();                            \
1399         a1 = tcg_temp_new_i64();                            \
1400         b0 = tcg_temp_new_i64();                            \
1401         b1 = tcg_temp_new_i64();                            \
1402         tmp = tcg_temp_new_i64();                           \
1403         get_cpu_vsr(a0, xA(ctx->opcode), high);             \
1404         get_cpu_vsr(a1, xA(ctx->opcode), high);             \
1405         get_cpu_vsr(b0, xB(ctx->opcode), high);             \
1406         get_cpu_vsr(b1, xB(ctx->opcode), high);             \
1407         tcg_gen_shri_i64(a0, a0, 32);                       \
1408         tcg_gen_shri_i64(b0, b0, 32);                       \
1409         tcg_gen_deposit_i64(tmp, b0, a0, 32, 32);           \
1410         set_cpu_vsr(xT(ctx->opcode), tmp, true);            \
1411         tcg_gen_deposit_i64(tmp, b1, a1, 32, 32);           \
1412         set_cpu_vsr(xT(ctx->opcode), tmp, false);           \
1413         tcg_temp_free_i64(a0);                              \
1414         tcg_temp_free_i64(a1);                              \
1415         tcg_temp_free_i64(b0);                              \
1416         tcg_temp_free_i64(b1);                              \
1417         tcg_temp_free_i64(tmp);                             \
1418     }
1420 VSX_XXMRG(xxmrghw, 1)
1421 VSX_XXMRG(xxmrglw, 0)
1423 static void gen_xxsel(DisasContext *ctx)
1425     int rt = xT(ctx->opcode);
1426     int ra = xA(ctx->opcode);
1427     int rb = xB(ctx->opcode);
1428     int rc = xC(ctx->opcode);
1430     if (unlikely(!ctx->vsx_enabled)) {
1431         gen_exception(ctx, POWERPC_EXCP_VSXU);
1432         return;
1433     }
1434     tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(rt), vsr_full_offset(rc),
1435                         vsr_full_offset(rb), vsr_full_offset(ra), 16, 16);
1438 static bool trans_XXSPLTW(DisasContext *ctx, arg_XX2 *a)
1440     int tofs, bofs;
1442     REQUIRE_VSX(ctx);
1444     tofs = vsr_full_offset(a->xt);
1445     bofs = vsr_full_offset(a->xb);
1446     bofs += a->uim << MO_32;
1447 #ifndef HOST_WORDS_BIG_ENDIAN
1448     bofs ^= 8 | 4;
1449 #endif
1451     tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1452     return true;
1455 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1457 static bool trans_XXSPLTIB(DisasContext *ctx, arg_X_imm8 *a)
1459     if (a->xt < 32) {
1460         REQUIRE_VSX(ctx);
1461     } else {
1462         REQUIRE_VECTOR(ctx);
1463     }
1464     tcg_gen_gvec_dup_imm(MO_8, vsr_full_offset(a->xt), 16, 16, a->imm);
1465     return true;
1468 static bool trans_XXSPLTIW(DisasContext *ctx, arg_8RR_D *a)
1470     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1471     REQUIRE_VSX(ctx);
1473     tcg_gen_gvec_dup_imm(MO_32, vsr_full_offset(a->xt), 16, 16, a->si);
1475     return true;
1478 static bool trans_XXSPLTIDP(DisasContext *ctx, arg_8RR_D *a)
1480     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1481     REQUIRE_VSX(ctx);
1483     tcg_gen_gvec_dup_imm(MO_64, vsr_full_offset(a->xt), 16, 16,
1484                          helper_todouble(a->si));
1485     return true;
1488 static bool trans_XXSPLTI32DX(DisasContext *ctx, arg_8RR_D_IX *a)
1490     TCGv_i32 imm;
1492     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1493     REQUIRE_VSX(ctx);
1495     imm = tcg_constant_i32(a->si);
1497     tcg_gen_st_i32(imm, cpu_env,
1498         offsetof(CPUPPCState, vsr[a->xt].VsrW(0 + a->ix)));
1499     tcg_gen_st_i32(imm, cpu_env,
1500         offsetof(CPUPPCState, vsr[a->xt].VsrW(2 + a->ix)));
1502     return true;
1505 static bool trans_LXVKQ(DisasContext *ctx, arg_X_uim5 *a)
1507     static const uint64_t values[32] = {
1508         0, /* Unspecified */
1509         0x3FFF000000000000llu, /* QP +1.0 */
1510         0x4000000000000000llu, /* QP +2.0 */
1511         0x4000800000000000llu, /* QP +3.0 */
1512         0x4001000000000000llu, /* QP +4.0 */
1513         0x4001400000000000llu, /* QP +5.0 */
1514         0x4001800000000000llu, /* QP +6.0 */
1515         0x4001C00000000000llu, /* QP +7.0 */
1516         0x7FFF000000000000llu, /* QP +Inf */
1517         0x7FFF800000000000llu, /* QP dQNaN */
1518         0, /* Unspecified */
1519         0, /* Unspecified */
1520         0, /* Unspecified */
1521         0, /* Unspecified */
1522         0, /* Unspecified */
1523         0, /* Unspecified */
1524         0x8000000000000000llu, /* QP -0.0 */
1525         0xBFFF000000000000llu, /* QP -1.0 */
1526         0xC000000000000000llu, /* QP -2.0 */
1527         0xC000800000000000llu, /* QP -3.0 */
1528         0xC001000000000000llu, /* QP -4.0 */
1529         0xC001400000000000llu, /* QP -5.0 */
1530         0xC001800000000000llu, /* QP -6.0 */
1531         0xC001C00000000000llu, /* QP -7.0 */
1532         0xFFFF000000000000llu, /* QP -Inf */
1533     };
1535     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1536     REQUIRE_VSX(ctx);
1538     if (values[a->uim]) {
1539         set_cpu_vsr(a->xt, tcg_constant_i64(0x0), false);
1540         set_cpu_vsr(a->xt, tcg_constant_i64(values[a->uim]), true);
1541     } else {
1542         gen_invalid(ctx);
1543     }
1545     return true;
1548 static void gen_xxsldwi(DisasContext *ctx)
1550     TCGv_i64 xth, xtl;
1551     if (unlikely(!ctx->vsx_enabled)) {
1552         gen_exception(ctx, POWERPC_EXCP_VSXU);
1553         return;
1554     }
1555     xth = tcg_temp_new_i64();
1556     xtl = tcg_temp_new_i64();
1558     switch (SHW(ctx->opcode)) {
1559         case 0: {
1560             get_cpu_vsr(xth, xA(ctx->opcode), true);
1561             get_cpu_vsr(xtl, xA(ctx->opcode), false);
1562             break;
1563         }
1564         case 1: {
1565             TCGv_i64 t0 = tcg_temp_new_i64();
1566             get_cpu_vsr(xth, xA(ctx->opcode), true);
1567             tcg_gen_shli_i64(xth, xth, 32);
1568             get_cpu_vsr(t0, xA(ctx->opcode), false);
1569             tcg_gen_shri_i64(t0, t0, 32);
1570             tcg_gen_or_i64(xth, xth, t0);
1571             get_cpu_vsr(xtl, xA(ctx->opcode), false);
1572             tcg_gen_shli_i64(xtl, xtl, 32);
1573             get_cpu_vsr(t0, xB(ctx->opcode), true);
1574             tcg_gen_shri_i64(t0, t0, 32);
1575             tcg_gen_or_i64(xtl, xtl, t0);
1576             tcg_temp_free_i64(t0);
1577             break;
1578         }
1579         case 2: {
1580             get_cpu_vsr(xth, xA(ctx->opcode), false);
1581             get_cpu_vsr(xtl, xB(ctx->opcode), true);
1582             break;
1583         }
1584         case 3: {
1585             TCGv_i64 t0 = tcg_temp_new_i64();
1586             get_cpu_vsr(xth, xA(ctx->opcode), false);
1587             tcg_gen_shli_i64(xth, xth, 32);
1588             get_cpu_vsr(t0, xB(ctx->opcode), true);
1589             tcg_gen_shri_i64(t0, t0, 32);
1590             tcg_gen_or_i64(xth, xth, t0);
1591             get_cpu_vsr(xtl, xB(ctx->opcode), true);
1592             tcg_gen_shli_i64(xtl, xtl, 32);
1593             get_cpu_vsr(t0, xB(ctx->opcode), false);
1594             tcg_gen_shri_i64(t0, t0, 32);
1595             tcg_gen_or_i64(xtl, xtl, t0);
1596             tcg_temp_free_i64(t0);
1597             break;
1598         }
1599     }
1601     set_cpu_vsr(xT(ctx->opcode), xth, true);
1602     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1604     tcg_temp_free_i64(xth);
1605     tcg_temp_free_i64(xtl);
1608 #define VSX_EXTRACT_INSERT(name)                                \
1609 static void gen_##name(DisasContext *ctx)                       \
1610 {                                                               \
1611     TCGv_ptr xt, xb;                                            \
1612     TCGv_i32 t0;                                                \
1613     TCGv_i64 t1;                                                \
1614     uint8_t uimm = UIMM4(ctx->opcode);                          \
1615                                                                 \
1616     if (unlikely(!ctx->vsx_enabled)) {                          \
1617         gen_exception(ctx, POWERPC_EXCP_VSXU);                  \
1618         return;                                                 \
1619     }                                                           \
1620     xt = gen_vsr_ptr(xT(ctx->opcode));                          \
1621     xb = gen_vsr_ptr(xB(ctx->opcode));                          \
1622     t0 = tcg_temp_new_i32();                                    \
1623     t1 = tcg_temp_new_i64();                                    \
1624     /*                                                          \
1625      * uimm > 15 out of bound and for                           \
1626      * uimm > 12 handle as per hardware in helper               \
1627      */                                                         \
1628     if (uimm > 15) {                                            \
1629         tcg_gen_movi_i64(t1, 0);                                \
1630         set_cpu_vsr(xT(ctx->opcode), t1, true);                 \
1631         set_cpu_vsr(xT(ctx->opcode), t1, false);                \
1632         return;                                                 \
1633     }                                                           \
1634     tcg_gen_movi_i32(t0, uimm);                                 \
1635     gen_helper_##name(cpu_env, xt, xb, t0);                     \
1636     tcg_temp_free_ptr(xb);                                      \
1637     tcg_temp_free_ptr(xt);                                      \
1638     tcg_temp_free_i32(t0);                                      \
1639     tcg_temp_free_i64(t1);                                      \
1642 VSX_EXTRACT_INSERT(xxextractuw)
1643 VSX_EXTRACT_INSERT(xxinsertw)
1645 #ifdef TARGET_PPC64
1646 static void gen_xsxexpdp(DisasContext *ctx)
1648     TCGv rt = cpu_gpr[rD(ctx->opcode)];
1649     TCGv_i64 t0;
1650     if (unlikely(!ctx->vsx_enabled)) {
1651         gen_exception(ctx, POWERPC_EXCP_VSXU);
1652         return;
1653     }
1654     t0 = tcg_temp_new_i64();
1655     get_cpu_vsr(t0, xB(ctx->opcode), true);
1656     tcg_gen_extract_i64(rt, t0, 52, 11);
1657     tcg_temp_free_i64(t0);
1660 static void gen_xsxexpqp(DisasContext *ctx)
1662     TCGv_i64 xth;
1663     TCGv_i64 xtl;
1664     TCGv_i64 xbh;
1666     if (unlikely(!ctx->vsx_enabled)) {
1667         gen_exception(ctx, POWERPC_EXCP_VSXU);
1668         return;
1669     }
1670     xth = tcg_temp_new_i64();
1671     xtl = tcg_temp_new_i64();
1672     xbh = tcg_temp_new_i64();
1673     get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1675     tcg_gen_extract_i64(xth, xbh, 48, 15);
1676     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1677     tcg_gen_movi_i64(xtl, 0);
1678     set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1680     tcg_temp_free_i64(xbh);
1681     tcg_temp_free_i64(xth);
1682     tcg_temp_free_i64(xtl);
1685 static void gen_xsiexpdp(DisasContext *ctx)
1687     TCGv_i64 xth;
1688     TCGv ra = cpu_gpr[rA(ctx->opcode)];
1689     TCGv rb = cpu_gpr[rB(ctx->opcode)];
1690     TCGv_i64 t0;
1692     if (unlikely(!ctx->vsx_enabled)) {
1693         gen_exception(ctx, POWERPC_EXCP_VSXU);
1694         return;
1695     }
1696     t0 = tcg_temp_new_i64();
1697     xth = tcg_temp_new_i64();
1698     tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1699     tcg_gen_andi_i64(t0, rb, 0x7FF);
1700     tcg_gen_shli_i64(t0, t0, 52);
1701     tcg_gen_or_i64(xth, xth, t0);
1702     set_cpu_vsr(xT(ctx->opcode), xth, true);
1703     /* dword[1] is undefined */
1704     tcg_temp_free_i64(t0);
1705     tcg_temp_free_i64(xth);
1708 static void gen_xsiexpqp(DisasContext *ctx)
1710     TCGv_i64 xth;
1711     TCGv_i64 xtl;
1712     TCGv_i64 xah;
1713     TCGv_i64 xal;
1714     TCGv_i64 xbh;
1715     TCGv_i64 t0;
1717     if (unlikely(!ctx->vsx_enabled)) {
1718         gen_exception(ctx, POWERPC_EXCP_VSXU);
1719         return;
1720     }
1721     xth = tcg_temp_new_i64();
1722     xtl = tcg_temp_new_i64();
1723     xah = tcg_temp_new_i64();
1724     xal = tcg_temp_new_i64();
1725     get_cpu_vsr(xah, rA(ctx->opcode) + 32, true);
1726     get_cpu_vsr(xal, rA(ctx->opcode) + 32, false);
1727     xbh = tcg_temp_new_i64();
1728     get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1729     t0 = tcg_temp_new_i64();
1731     tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1732     tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1733     tcg_gen_shli_i64(t0, t0, 48);
1734     tcg_gen_or_i64(xth, xth, t0);
1735     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1736     tcg_gen_mov_i64(xtl, xal);
1737     set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1739     tcg_temp_free_i64(t0);
1740     tcg_temp_free_i64(xth);
1741     tcg_temp_free_i64(xtl);
1742     tcg_temp_free_i64(xah);
1743     tcg_temp_free_i64(xal);
1744     tcg_temp_free_i64(xbh);
1747 static void gen_xsxsigdp(DisasContext *ctx)
1749     TCGv rt = cpu_gpr[rD(ctx->opcode)];
1750     TCGv_i64 t0, t1, zr, nan, exp;
1752     if (unlikely(!ctx->vsx_enabled)) {
1753         gen_exception(ctx, POWERPC_EXCP_VSXU);
1754         return;
1755     }
1756     exp = tcg_temp_new_i64();
1757     t0 = tcg_temp_new_i64();
1758     t1 = tcg_temp_new_i64();
1759     zr = tcg_const_i64(0);
1760     nan = tcg_const_i64(2047);
1762     get_cpu_vsr(t1, xB(ctx->opcode), true);
1763     tcg_gen_extract_i64(exp, t1, 52, 11);
1764     tcg_gen_movi_i64(t0, 0x0010000000000000);
1765     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1766     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1767     get_cpu_vsr(t1, xB(ctx->opcode), true);
1768     tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1770     tcg_temp_free_i64(t0);
1771     tcg_temp_free_i64(t1);
1772     tcg_temp_free_i64(exp);
1773     tcg_temp_free_i64(zr);
1774     tcg_temp_free_i64(nan);
1777 static void gen_xsxsigqp(DisasContext *ctx)
1779     TCGv_i64 t0, zr, nan, exp;
1780     TCGv_i64 xth;
1781     TCGv_i64 xtl;
1782     TCGv_i64 xbh;
1783     TCGv_i64 xbl;
1785     if (unlikely(!ctx->vsx_enabled)) {
1786         gen_exception(ctx, POWERPC_EXCP_VSXU);
1787         return;
1788     }
1789     xth = tcg_temp_new_i64();
1790     xtl = tcg_temp_new_i64();
1791     xbh = tcg_temp_new_i64();
1792     xbl = tcg_temp_new_i64();
1793     get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1794     get_cpu_vsr(xbl, rB(ctx->opcode) + 32, false);
1795     exp = tcg_temp_new_i64();
1796     t0 = tcg_temp_new_i64();
1797     zr = tcg_const_i64(0);
1798     nan = tcg_const_i64(32767);
1800     tcg_gen_extract_i64(exp, xbh, 48, 15);
1801     tcg_gen_movi_i64(t0, 0x0001000000000000);
1802     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1803     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1804     tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
1805     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1806     tcg_gen_mov_i64(xtl, xbl);
1807     set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1809     tcg_temp_free_i64(t0);
1810     tcg_temp_free_i64(exp);
1811     tcg_temp_free_i64(zr);
1812     tcg_temp_free_i64(nan);
1813     tcg_temp_free_i64(xth);
1814     tcg_temp_free_i64(xtl);
1815     tcg_temp_free_i64(xbh);
1816     tcg_temp_free_i64(xbl);
1818 #endif
1820 static void gen_xviexpsp(DisasContext *ctx)
1822     TCGv_i64 xth;
1823     TCGv_i64 xtl;
1824     TCGv_i64 xah;
1825     TCGv_i64 xal;
1826     TCGv_i64 xbh;
1827     TCGv_i64 xbl;
1828     TCGv_i64 t0;
1830     if (unlikely(!ctx->vsx_enabled)) {
1831         gen_exception(ctx, POWERPC_EXCP_VSXU);
1832         return;
1833     }
1834     xth = tcg_temp_new_i64();
1835     xtl = tcg_temp_new_i64();
1836     xah = tcg_temp_new_i64();
1837     xal = tcg_temp_new_i64();
1838     xbh = tcg_temp_new_i64();
1839     xbl = tcg_temp_new_i64();
1840     get_cpu_vsr(xah, xA(ctx->opcode), true);
1841     get_cpu_vsr(xal, xA(ctx->opcode), false);
1842     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1843     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1844     t0 = tcg_temp_new_i64();
1846     tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1847     tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1848     tcg_gen_shli_i64(t0, t0, 23);
1849     tcg_gen_or_i64(xth, xth, t0);
1850     set_cpu_vsr(xT(ctx->opcode), xth, true);
1851     tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1852     tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1853     tcg_gen_shli_i64(t0, t0, 23);
1854     tcg_gen_or_i64(xtl, xtl, t0);
1855     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1857     tcg_temp_free_i64(t0);
1858     tcg_temp_free_i64(xth);
1859     tcg_temp_free_i64(xtl);
1860     tcg_temp_free_i64(xah);
1861     tcg_temp_free_i64(xal);
1862     tcg_temp_free_i64(xbh);
1863     tcg_temp_free_i64(xbl);
1866 static void gen_xviexpdp(DisasContext *ctx)
1868     TCGv_i64 xth;
1869     TCGv_i64 xtl;
1870     TCGv_i64 xah;
1871     TCGv_i64 xal;
1872     TCGv_i64 xbh;
1873     TCGv_i64 xbl;
1875     if (unlikely(!ctx->vsx_enabled)) {
1876         gen_exception(ctx, POWERPC_EXCP_VSXU);
1877         return;
1878     }
1879     xth = tcg_temp_new_i64();
1880     xtl = tcg_temp_new_i64();
1881     xah = tcg_temp_new_i64();
1882     xal = tcg_temp_new_i64();
1883     xbh = tcg_temp_new_i64();
1884     xbl = tcg_temp_new_i64();
1885     get_cpu_vsr(xah, xA(ctx->opcode), true);
1886     get_cpu_vsr(xal, xA(ctx->opcode), false);
1887     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1888     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1890     tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
1891     set_cpu_vsr(xT(ctx->opcode), xth, true);
1893     tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
1894     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1896     tcg_temp_free_i64(xth);
1897     tcg_temp_free_i64(xtl);
1898     tcg_temp_free_i64(xah);
1899     tcg_temp_free_i64(xal);
1900     tcg_temp_free_i64(xbh);
1901     tcg_temp_free_i64(xbl);
1904 static void gen_xvxexpsp(DisasContext *ctx)
1906     TCGv_i64 xth;
1907     TCGv_i64 xtl;
1908     TCGv_i64 xbh;
1909     TCGv_i64 xbl;
1911     if (unlikely(!ctx->vsx_enabled)) {
1912         gen_exception(ctx, POWERPC_EXCP_VSXU);
1913         return;
1914     }
1915     xth = tcg_temp_new_i64();
1916     xtl = tcg_temp_new_i64();
1917     xbh = tcg_temp_new_i64();
1918     xbl = tcg_temp_new_i64();
1919     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1920     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1922     tcg_gen_shri_i64(xth, xbh, 23);
1923     tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
1924     set_cpu_vsr(xT(ctx->opcode), xth, true);
1925     tcg_gen_shri_i64(xtl, xbl, 23);
1926     tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
1927     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1929     tcg_temp_free_i64(xth);
1930     tcg_temp_free_i64(xtl);
1931     tcg_temp_free_i64(xbh);
1932     tcg_temp_free_i64(xbl);
1935 static void gen_xvxexpdp(DisasContext *ctx)
1937     TCGv_i64 xth;
1938     TCGv_i64 xtl;
1939     TCGv_i64 xbh;
1940     TCGv_i64 xbl;
1942     if (unlikely(!ctx->vsx_enabled)) {
1943         gen_exception(ctx, POWERPC_EXCP_VSXU);
1944         return;
1945     }
1946     xth = tcg_temp_new_i64();
1947     xtl = tcg_temp_new_i64();
1948     xbh = tcg_temp_new_i64();
1949     xbl = tcg_temp_new_i64();
1950     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1951     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1953     tcg_gen_extract_i64(xth, xbh, 52, 11);
1954     set_cpu_vsr(xT(ctx->opcode), xth, true);
1955     tcg_gen_extract_i64(xtl, xbl, 52, 11);
1956     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1958     tcg_temp_free_i64(xth);
1959     tcg_temp_free_i64(xtl);
1960     tcg_temp_free_i64(xbh);
1961     tcg_temp_free_i64(xbl);
1964 GEN_VSX_HELPER_X2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
1966 static void gen_xvxsigdp(DisasContext *ctx)
1968     TCGv_i64 xth;
1969     TCGv_i64 xtl;
1970     TCGv_i64 xbh;
1971     TCGv_i64 xbl;
1972     TCGv_i64 t0, zr, nan, exp;
1974     if (unlikely(!ctx->vsx_enabled)) {
1975         gen_exception(ctx, POWERPC_EXCP_VSXU);
1976         return;
1977     }
1978     xth = tcg_temp_new_i64();
1979     xtl = tcg_temp_new_i64();
1980     xbh = tcg_temp_new_i64();
1981     xbl = tcg_temp_new_i64();
1982     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1983     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1984     exp = tcg_temp_new_i64();
1985     t0 = tcg_temp_new_i64();
1986     zr = tcg_const_i64(0);
1987     nan = tcg_const_i64(2047);
1989     tcg_gen_extract_i64(exp, xbh, 52, 11);
1990     tcg_gen_movi_i64(t0, 0x0010000000000000);
1991     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1992     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1993     tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
1994     set_cpu_vsr(xT(ctx->opcode), xth, true);
1996     tcg_gen_extract_i64(exp, xbl, 52, 11);
1997     tcg_gen_movi_i64(t0, 0x0010000000000000);
1998     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1999     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2000     tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
2001     set_cpu_vsr(xT(ctx->opcode), xtl, false);
2003     tcg_temp_free_i64(t0);
2004     tcg_temp_free_i64(exp);
2005     tcg_temp_free_i64(zr);
2006     tcg_temp_free_i64(nan);
2007     tcg_temp_free_i64(xth);
2008     tcg_temp_free_i64(xtl);
2009     tcg_temp_free_i64(xbh);
2010     tcg_temp_free_i64(xbl);
2013 static bool do_lstxv(DisasContext *ctx, int ra, TCGv displ,
2014                      int rt, bool store, bool paired)
2016     TCGv ea;
2017     TCGv_i64 xt;
2018     MemOp mop;
2019     int rt1, rt2;
2021     xt = tcg_temp_new_i64();
2023     mop = DEF_MEMOP(MO_UQ);
2025     gen_set_access_type(ctx, ACCESS_INT);
2026     ea = do_ea_calc(ctx, ra, displ);
2028     if (paired && ctx->le_mode) {
2029         rt1 = rt + 1;
2030         rt2 = rt;
2031     } else {
2032         rt1 = rt;
2033         rt2 = rt + 1;
2034     }
2036     if (store) {
2037         get_cpu_vsr(xt, rt1, !ctx->le_mode);
2038         tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2039         gen_addr_add(ctx, ea, ea, 8);
2040         get_cpu_vsr(xt, rt1, ctx->le_mode);
2041         tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2042         if (paired) {
2043             gen_addr_add(ctx, ea, ea, 8);
2044             get_cpu_vsr(xt, rt2, !ctx->le_mode);
2045             tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2046             gen_addr_add(ctx, ea, ea, 8);
2047             get_cpu_vsr(xt, rt2, ctx->le_mode);
2048             tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2049         }
2050     } else {
2051         tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2052         set_cpu_vsr(rt1, xt, !ctx->le_mode);
2053         gen_addr_add(ctx, ea, ea, 8);
2054         tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2055         set_cpu_vsr(rt1, xt, ctx->le_mode);
2056         if (paired) {
2057             gen_addr_add(ctx, ea, ea, 8);
2058             tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2059             set_cpu_vsr(rt2, xt, !ctx->le_mode);
2060             gen_addr_add(ctx, ea, ea, 8);
2061             tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2062             set_cpu_vsr(rt2, xt, ctx->le_mode);
2063         }
2064     }
2066     tcg_temp_free(ea);
2067     tcg_temp_free_i64(xt);
2068     return true;
2071 static bool do_lstxv_D(DisasContext *ctx, arg_D *a, bool store, bool paired)
2073     if (paired) {
2074         REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2075     } else {
2076         REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2077     }
2079     if (paired || a->rt >= 32) {
2080         REQUIRE_VSX(ctx);
2081     } else {
2082         REQUIRE_VECTOR(ctx);
2083     }
2085     return do_lstxv(ctx, a->ra, tcg_constant_tl(a->si), a->rt, store, paired);
2088 static bool do_lstxv_PLS_D(DisasContext *ctx, arg_PLS_D *a,
2089                            bool store, bool paired)
2091     arg_D d;
2092     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2093     REQUIRE_VSX(ctx);
2095     if (!resolve_PLS_D(ctx, &d, a)) {
2096         return true;
2097     }
2099     return do_lstxv(ctx, d.ra, tcg_constant_tl(d.si), d.rt, store, paired);
2102 static bool do_lstxv_X(DisasContext *ctx, arg_X *a, bool store, bool paired)
2104     if (paired) {
2105         REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2106     } else {
2107         REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2108     }
2110     if (paired || a->rt >= 32) {
2111         REQUIRE_VSX(ctx);
2112     } else {
2113         REQUIRE_VECTOR(ctx);
2114     }
2116     return do_lstxv(ctx, a->ra, cpu_gpr[a->rb], a->rt, store, paired);
2119 TRANS(STXV, do_lstxv_D, true, false)
2120 TRANS(LXV, do_lstxv_D, false, false)
2121 TRANS(STXVP, do_lstxv_D, true, true)
2122 TRANS(LXVP, do_lstxv_D, false, true)
2123 TRANS(STXVX, do_lstxv_X, true, false)
2124 TRANS(LXVX, do_lstxv_X, false, false)
2125 TRANS(STXVPX, do_lstxv_X, true, true)
2126 TRANS(LXVPX, do_lstxv_X, false, true)
2127 TRANS64(PSTXV, do_lstxv_PLS_D, true, false)
2128 TRANS64(PLXV, do_lstxv_PLS_D, false, false)
2129 TRANS64(PSTXVP, do_lstxv_PLS_D, true, true)
2130 TRANS64(PLXVP, do_lstxv_PLS_D, false, true)
2132 static void gen_xxblendv_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b,
2133                              TCGv_vec c)
2135     TCGv_vec tmp = tcg_temp_new_vec_matching(c);
2136     tcg_gen_sari_vec(vece, tmp, c, (8 << vece) - 1);
2137     tcg_gen_bitsel_vec(vece, t, tmp, b, a);
2138     tcg_temp_free_vec(tmp);
2141 static bool do_xxblendv(DisasContext *ctx, arg_XX4 *a, unsigned vece)
2143     static const TCGOpcode vecop_list[] = {
2144         INDEX_op_sari_vec, 0
2145     };
2146     static const GVecGen4 ops[4] = {
2147         {
2148             .fniv = gen_xxblendv_vec,
2149             .fno = gen_helper_XXBLENDVB,
2150             .opt_opc = vecop_list,
2151             .vece = MO_8
2152         },
2153         {
2154             .fniv = gen_xxblendv_vec,
2155             .fno = gen_helper_XXBLENDVH,
2156             .opt_opc = vecop_list,
2157             .vece = MO_16
2158         },
2159         {
2160             .fniv = gen_xxblendv_vec,
2161             .fno = gen_helper_XXBLENDVW,
2162             .opt_opc = vecop_list,
2163             .vece = MO_32
2164         },
2165         {
2166             .fniv = gen_xxblendv_vec,
2167             .fno = gen_helper_XXBLENDVD,
2168             .opt_opc = vecop_list,
2169             .vece = MO_64
2170         }
2171     };
2173     REQUIRE_VSX(ctx);
2175     tcg_gen_gvec_4(vsr_full_offset(a->xt), vsr_full_offset(a->xa),
2176                    vsr_full_offset(a->xb), vsr_full_offset(a->xc),
2177                    16, 16, &ops[vece]);
2179     return true;
2182 TRANS(XXBLENDVB, do_xxblendv, MO_8)
2183 TRANS(XXBLENDVH, do_xxblendv, MO_16)
2184 TRANS(XXBLENDVW, do_xxblendv, MO_32)
2185 TRANS(XXBLENDVD, do_xxblendv, MO_64)
2187 static bool do_xsmaxmincjdp(DisasContext *ctx, arg_XX3 *a,
2188                             void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2190     TCGv_ptr xt, xa, xb;
2192     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2193     REQUIRE_VSX(ctx);
2195     xt = gen_vsr_ptr(a->xt);
2196     xa = gen_vsr_ptr(a->xa);
2197     xb = gen_vsr_ptr(a->xb);
2199     helper(cpu_env, xt, xa, xb);
2201     tcg_temp_free_ptr(xt);
2202     tcg_temp_free_ptr(xa);
2203     tcg_temp_free_ptr(xb);
2205     return true;
2208 TRANS(XSMAXCDP, do_xsmaxmincjdp, gen_helper_xsmaxcdp)
2209 TRANS(XSMINCDP, do_xsmaxmincjdp, gen_helper_xsmincdp)
2210 TRANS(XSMAXJDP, do_xsmaxmincjdp, gen_helper_xsmaxjdp)
2211 TRANS(XSMINJDP, do_xsmaxmincjdp, gen_helper_xsminjdp)
2213 #undef GEN_XX2FORM
2214 #undef GEN_XX3FORM
2215 #undef GEN_XX2IFORM
2216 #undef GEN_XX3_RC_FORM
2217 #undef GEN_XX3FORM_DM
2218 #undef VSX_LOGICAL