target/ppc: Implemented xvf16ger*
[qemu.git] / target / ppc / translate / vsx-impl.c.inc
blob01978a585a48e3a4e0e4c03389c71946076deefb
1 /***                           VSX extension                               ***/
3 static inline void get_cpu_vsr(TCGv_i64 dst, int n, bool high)
5     tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, high));
8 static inline void set_cpu_vsr(int n, TCGv_i64 src, bool high)
10     tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, high));
13 static inline TCGv_ptr gen_vsr_ptr(int reg)
15     TCGv_ptr r = tcg_temp_new_ptr();
16     tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg));
17     return r;
20 static inline TCGv_ptr gen_acc_ptr(int reg)
22     TCGv_ptr r = tcg_temp_new_ptr();
23     tcg_gen_addi_ptr(r, cpu_env, acc_full_offset(reg));
24     return r;
27 #define VSX_LOAD_SCALAR(name, operation)                      \
28 static void gen_##name(DisasContext *ctx)                     \
29 {                                                             \
30     TCGv EA;                                                  \
31     TCGv_i64 t0;                                              \
32     if (unlikely(!ctx->vsx_enabled)) {                        \
33         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
34         return;                                               \
35     }                                                         \
36     t0 = tcg_temp_new_i64();                                  \
37     gen_set_access_type(ctx, ACCESS_INT);                     \
38     EA = tcg_temp_new();                                      \
39     gen_addr_reg_index(ctx, EA);                              \
40     gen_qemu_##operation(ctx, t0, EA);                        \
41     set_cpu_vsr(xT(ctx->opcode), t0, true);                   \
42     /* NOTE: cpu_vsrl is undefined */                         \
43     tcg_temp_free(EA);                                        \
44     tcg_temp_free_i64(t0);                                    \
47 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
48 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
49 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
50 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
51 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
52 VSX_LOAD_SCALAR(lxsspx, ld32fs)
54 static void gen_lxvd2x(DisasContext *ctx)
56     TCGv EA;
57     TCGv_i64 t0;
58     if (unlikely(!ctx->vsx_enabled)) {
59         gen_exception(ctx, POWERPC_EXCP_VSXU);
60         return;
61     }
62     t0 = tcg_temp_new_i64();
63     gen_set_access_type(ctx, ACCESS_INT);
64     EA = tcg_temp_new();
65     gen_addr_reg_index(ctx, EA);
66     gen_qemu_ld64_i64(ctx, t0, EA);
67     set_cpu_vsr(xT(ctx->opcode), t0, true);
68     tcg_gen_addi_tl(EA, EA, 8);
69     gen_qemu_ld64_i64(ctx, t0, EA);
70     set_cpu_vsr(xT(ctx->opcode), t0, false);
71     tcg_temp_free(EA);
72     tcg_temp_free_i64(t0);
75 static void gen_lxvw4x(DisasContext *ctx)
77     TCGv EA;
78     TCGv_i64 xth;
79     TCGv_i64 xtl;
80     if (unlikely(!ctx->vsx_enabled)) {
81         gen_exception(ctx, POWERPC_EXCP_VSXU);
82         return;
83     }
84     xth = tcg_temp_new_i64();
85     xtl = tcg_temp_new_i64();
87     gen_set_access_type(ctx, ACCESS_INT);
88     EA = tcg_temp_new();
90     gen_addr_reg_index(ctx, EA);
91     if (ctx->le_mode) {
92         TCGv_i64 t0 = tcg_temp_new_i64();
93         TCGv_i64 t1 = tcg_temp_new_i64();
95         tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
96         tcg_gen_shri_i64(t1, t0, 32);
97         tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
98         tcg_gen_addi_tl(EA, EA, 8);
99         tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
100         tcg_gen_shri_i64(t1, t0, 32);
101         tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
102         tcg_temp_free_i64(t0);
103         tcg_temp_free_i64(t1);
104     } else {
105         tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
106         tcg_gen_addi_tl(EA, EA, 8);
107         tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
108     }
109     set_cpu_vsr(xT(ctx->opcode), xth, true);
110     set_cpu_vsr(xT(ctx->opcode), xtl, false);
111     tcg_temp_free(EA);
112     tcg_temp_free_i64(xth);
113     tcg_temp_free_i64(xtl);
116 static void gen_lxvwsx(DisasContext *ctx)
118     TCGv EA;
119     TCGv_i32 data;
121     if (xT(ctx->opcode) < 32) {
122         if (unlikely(!ctx->vsx_enabled)) {
123             gen_exception(ctx, POWERPC_EXCP_VSXU);
124             return;
125         }
126     } else {
127         if (unlikely(!ctx->altivec_enabled)) {
128             gen_exception(ctx, POWERPC_EXCP_VPU);
129             return;
130         }
131     }
133     gen_set_access_type(ctx, ACCESS_INT);
134     EA = tcg_temp_new();
136     gen_addr_reg_index(ctx, EA);
138     data = tcg_temp_new_i32();
139     tcg_gen_qemu_ld_i32(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UL));
140     tcg_gen_gvec_dup_i32(MO_UL, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
142     tcg_temp_free(EA);
143     tcg_temp_free_i32(data);
146 static void gen_lxvdsx(DisasContext *ctx)
148     TCGv EA;
149     TCGv_i64 data;
151     if (unlikely(!ctx->vsx_enabled)) {
152         gen_exception(ctx, POWERPC_EXCP_VSXU);
153         return;
154     }
156     gen_set_access_type(ctx, ACCESS_INT);
157     EA = tcg_temp_new();
159     gen_addr_reg_index(ctx, EA);
161     data = tcg_temp_new_i64();
162     tcg_gen_qemu_ld_i64(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UQ));
163     tcg_gen_gvec_dup_i64(MO_UQ, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
165     tcg_temp_free(EA);
166     tcg_temp_free_i64(data);
169 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
170                           TCGv_i64 inh, TCGv_i64 inl)
172     TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
173     TCGv_i64 t0 = tcg_temp_new_i64();
174     TCGv_i64 t1 = tcg_temp_new_i64();
176     /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
177     tcg_gen_and_i64(t0, inh, mask);
178     tcg_gen_shli_i64(t0, t0, 8);
179     tcg_gen_shri_i64(t1, inh, 8);
180     tcg_gen_and_i64(t1, t1, mask);
181     tcg_gen_or_i64(outh, t0, t1);
183     /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
184     tcg_gen_and_i64(t0, inl, mask);
185     tcg_gen_shli_i64(t0, t0, 8);
186     tcg_gen_shri_i64(t1, inl, 8);
187     tcg_gen_and_i64(t1, t1, mask);
188     tcg_gen_or_i64(outl, t0, t1);
190     tcg_temp_free_i64(t0);
191     tcg_temp_free_i64(t1);
192     tcg_temp_free_i64(mask);
195 static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
196                           TCGv_i64 inh, TCGv_i64 inl)
198     TCGv_i64 hi = tcg_temp_new_i64();
199     TCGv_i64 lo = tcg_temp_new_i64();
201     tcg_gen_bswap64_i64(hi, inh);
202     tcg_gen_bswap64_i64(lo, inl);
203     tcg_gen_shri_i64(outh, hi, 32);
204     tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
205     tcg_gen_shri_i64(outl, lo, 32);
206     tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
208     tcg_temp_free_i64(hi);
209     tcg_temp_free_i64(lo);
211 static void gen_lxvh8x(DisasContext *ctx)
213     TCGv EA;
214     TCGv_i64 xth;
215     TCGv_i64 xtl;
217     if (unlikely(!ctx->vsx_enabled)) {
218         gen_exception(ctx, POWERPC_EXCP_VSXU);
219         return;
220     }
221     xth = tcg_temp_new_i64();
222     xtl = tcg_temp_new_i64();
223     gen_set_access_type(ctx, ACCESS_INT);
225     EA = tcg_temp_new();
226     gen_addr_reg_index(ctx, EA);
227     tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
228     tcg_gen_addi_tl(EA, EA, 8);
229     tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
230     if (ctx->le_mode) {
231         gen_bswap16x8(xth, xtl, xth, xtl);
232     }
233     set_cpu_vsr(xT(ctx->opcode), xth, true);
234     set_cpu_vsr(xT(ctx->opcode), xtl, false);
235     tcg_temp_free(EA);
236     tcg_temp_free_i64(xth);
237     tcg_temp_free_i64(xtl);
240 static void gen_lxvb16x(DisasContext *ctx)
242     TCGv EA;
243     TCGv_i64 xth;
244     TCGv_i64 xtl;
246     if (unlikely(!ctx->vsx_enabled)) {
247         gen_exception(ctx, POWERPC_EXCP_VSXU);
248         return;
249     }
250     xth = tcg_temp_new_i64();
251     xtl = tcg_temp_new_i64();
252     gen_set_access_type(ctx, ACCESS_INT);
253     EA = tcg_temp_new();
254     gen_addr_reg_index(ctx, EA);
255     tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
256     tcg_gen_addi_tl(EA, EA, 8);
257     tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
258     set_cpu_vsr(xT(ctx->opcode), xth, true);
259     set_cpu_vsr(xT(ctx->opcode), xtl, false);
260     tcg_temp_free(EA);
261     tcg_temp_free_i64(xth);
262     tcg_temp_free_i64(xtl);
265 #ifdef TARGET_PPC64
266 #define VSX_VECTOR_LOAD_STORE_LENGTH(name)                         \
267 static void gen_##name(DisasContext *ctx)                          \
268 {                                                                  \
269     TCGv EA;                                                       \
270     TCGv_ptr xt;                                                   \
271                                                                    \
272     if (xT(ctx->opcode) < 32) {                                    \
273         if (unlikely(!ctx->vsx_enabled)) {                         \
274             gen_exception(ctx, POWERPC_EXCP_VSXU);                 \
275             return;                                                \
276         }                                                          \
277     } else {                                                       \
278         if (unlikely(!ctx->altivec_enabled)) {                     \
279             gen_exception(ctx, POWERPC_EXCP_VPU);                  \
280             return;                                                \
281         }                                                          \
282     }                                                              \
283     EA = tcg_temp_new();                                           \
284     xt = gen_vsr_ptr(xT(ctx->opcode));                             \
285     gen_set_access_type(ctx, ACCESS_INT);                          \
286     gen_addr_register(ctx, EA);                                    \
287     gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]);  \
288     tcg_temp_free(EA);                                             \
289     tcg_temp_free_ptr(xt);                                         \
292 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
293 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
294 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
295 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
296 #endif
298 #define VSX_STORE_SCALAR(name, operation)                     \
299 static void gen_##name(DisasContext *ctx)                     \
300 {                                                             \
301     TCGv EA;                                                  \
302     TCGv_i64 t0;                                              \
303     if (unlikely(!ctx->vsx_enabled)) {                        \
304         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
305         return;                                               \
306     }                                                         \
307     t0 = tcg_temp_new_i64();                                  \
308     gen_set_access_type(ctx, ACCESS_INT);                     \
309     EA = tcg_temp_new();                                      \
310     gen_addr_reg_index(ctx, EA);                              \
311     get_cpu_vsr(t0, xS(ctx->opcode), true);                   \
312     gen_qemu_##operation(ctx, t0, EA);                        \
313     tcg_temp_free(EA);                                        \
314     tcg_temp_free_i64(t0);                                    \
317 VSX_STORE_SCALAR(stxsdx, st64_i64)
319 VSX_STORE_SCALAR(stxsibx, st8_i64)
320 VSX_STORE_SCALAR(stxsihx, st16_i64)
321 VSX_STORE_SCALAR(stxsiwx, st32_i64)
322 VSX_STORE_SCALAR(stxsspx, st32fs)
324 static void gen_stxvd2x(DisasContext *ctx)
326     TCGv EA;
327     TCGv_i64 t0;
328     if (unlikely(!ctx->vsx_enabled)) {
329         gen_exception(ctx, POWERPC_EXCP_VSXU);
330         return;
331     }
332     t0 = tcg_temp_new_i64();
333     gen_set_access_type(ctx, ACCESS_INT);
334     EA = tcg_temp_new();
335     gen_addr_reg_index(ctx, EA);
336     get_cpu_vsr(t0, xS(ctx->opcode), true);
337     gen_qemu_st64_i64(ctx, t0, EA);
338     tcg_gen_addi_tl(EA, EA, 8);
339     get_cpu_vsr(t0, xS(ctx->opcode), false);
340     gen_qemu_st64_i64(ctx, t0, EA);
341     tcg_temp_free(EA);
342     tcg_temp_free_i64(t0);
345 static void gen_stxvw4x(DisasContext *ctx)
347     TCGv EA;
348     TCGv_i64 xsh;
349     TCGv_i64 xsl;
351     if (unlikely(!ctx->vsx_enabled)) {
352         gen_exception(ctx, POWERPC_EXCP_VSXU);
353         return;
354     }
355     xsh = tcg_temp_new_i64();
356     xsl = tcg_temp_new_i64();
357     get_cpu_vsr(xsh, xS(ctx->opcode), true);
358     get_cpu_vsr(xsl, xS(ctx->opcode), false);
359     gen_set_access_type(ctx, ACCESS_INT);
360     EA = tcg_temp_new();
361     gen_addr_reg_index(ctx, EA);
362     if (ctx->le_mode) {
363         TCGv_i64 t0 = tcg_temp_new_i64();
364         TCGv_i64 t1 = tcg_temp_new_i64();
366         tcg_gen_shri_i64(t0, xsh, 32);
367         tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
368         tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
369         tcg_gen_addi_tl(EA, EA, 8);
370         tcg_gen_shri_i64(t0, xsl, 32);
371         tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
372         tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
373         tcg_temp_free_i64(t0);
374         tcg_temp_free_i64(t1);
375     } else {
376         tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
377         tcg_gen_addi_tl(EA, EA, 8);
378         tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
379     }
380     tcg_temp_free(EA);
381     tcg_temp_free_i64(xsh);
382     tcg_temp_free_i64(xsl);
385 static void gen_stxvh8x(DisasContext *ctx)
387     TCGv EA;
388     TCGv_i64 xsh;
389     TCGv_i64 xsl;
391     if (unlikely(!ctx->vsx_enabled)) {
392         gen_exception(ctx, POWERPC_EXCP_VSXU);
393         return;
394     }
395     xsh = tcg_temp_new_i64();
396     xsl = tcg_temp_new_i64();
397     get_cpu_vsr(xsh, xS(ctx->opcode), true);
398     get_cpu_vsr(xsl, xS(ctx->opcode), false);
399     gen_set_access_type(ctx, ACCESS_INT);
400     EA = tcg_temp_new();
401     gen_addr_reg_index(ctx, EA);
402     if (ctx->le_mode) {
403         TCGv_i64 outh = tcg_temp_new_i64();
404         TCGv_i64 outl = tcg_temp_new_i64();
406         gen_bswap16x8(outh, outl, xsh, xsl);
407         tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEUQ);
408         tcg_gen_addi_tl(EA, EA, 8);
409         tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEUQ);
410         tcg_temp_free_i64(outh);
411         tcg_temp_free_i64(outl);
412     } else {
413         tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
414         tcg_gen_addi_tl(EA, EA, 8);
415         tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
416     }
417     tcg_temp_free(EA);
418     tcg_temp_free_i64(xsh);
419     tcg_temp_free_i64(xsl);
422 static void gen_stxvb16x(DisasContext *ctx)
424     TCGv EA;
425     TCGv_i64 xsh;
426     TCGv_i64 xsl;
428     if (unlikely(!ctx->vsx_enabled)) {
429         gen_exception(ctx, POWERPC_EXCP_VSXU);
430         return;
431     }
432     xsh = tcg_temp_new_i64();
433     xsl = tcg_temp_new_i64();
434     get_cpu_vsr(xsh, xS(ctx->opcode), true);
435     get_cpu_vsr(xsl, xS(ctx->opcode), false);
436     gen_set_access_type(ctx, ACCESS_INT);
437     EA = tcg_temp_new();
438     gen_addr_reg_index(ctx, EA);
439     tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
440     tcg_gen_addi_tl(EA, EA, 8);
441     tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
442     tcg_temp_free(EA);
443     tcg_temp_free_i64(xsh);
444     tcg_temp_free_i64(xsl);
447 static void gen_mfvsrwz(DisasContext *ctx)
449     if (xS(ctx->opcode) < 32) {
450         if (unlikely(!ctx->fpu_enabled)) {
451             gen_exception(ctx, POWERPC_EXCP_FPU);
452             return;
453         }
454     } else {
455         if (unlikely(!ctx->altivec_enabled)) {
456             gen_exception(ctx, POWERPC_EXCP_VPU);
457             return;
458         }
459     }
460     TCGv_i64 tmp = tcg_temp_new_i64();
461     TCGv_i64 xsh = tcg_temp_new_i64();
462     get_cpu_vsr(xsh, xS(ctx->opcode), true);
463     tcg_gen_ext32u_i64(tmp, xsh);
464     tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
465     tcg_temp_free_i64(tmp);
466     tcg_temp_free_i64(xsh);
469 static void gen_mtvsrwa(DisasContext *ctx)
471     if (xS(ctx->opcode) < 32) {
472         if (unlikely(!ctx->fpu_enabled)) {
473             gen_exception(ctx, POWERPC_EXCP_FPU);
474             return;
475         }
476     } else {
477         if (unlikely(!ctx->altivec_enabled)) {
478             gen_exception(ctx, POWERPC_EXCP_VPU);
479             return;
480         }
481     }
482     TCGv_i64 tmp = tcg_temp_new_i64();
483     TCGv_i64 xsh = tcg_temp_new_i64();
484     tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
485     tcg_gen_ext32s_i64(xsh, tmp);
486     set_cpu_vsr(xT(ctx->opcode), xsh, true);
487     tcg_temp_free_i64(tmp);
488     tcg_temp_free_i64(xsh);
491 static void gen_mtvsrwz(DisasContext *ctx)
493     if (xS(ctx->opcode) < 32) {
494         if (unlikely(!ctx->fpu_enabled)) {
495             gen_exception(ctx, POWERPC_EXCP_FPU);
496             return;
497         }
498     } else {
499         if (unlikely(!ctx->altivec_enabled)) {
500             gen_exception(ctx, POWERPC_EXCP_VPU);
501             return;
502         }
503     }
504     TCGv_i64 tmp = tcg_temp_new_i64();
505     TCGv_i64 xsh = tcg_temp_new_i64();
506     tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
507     tcg_gen_ext32u_i64(xsh, tmp);
508     set_cpu_vsr(xT(ctx->opcode), xsh, true);
509     tcg_temp_free_i64(tmp);
510     tcg_temp_free_i64(xsh);
513 #if defined(TARGET_PPC64)
514 static void gen_mfvsrd(DisasContext *ctx)
516     TCGv_i64 t0;
517     if (xS(ctx->opcode) < 32) {
518         if (unlikely(!ctx->fpu_enabled)) {
519             gen_exception(ctx, POWERPC_EXCP_FPU);
520             return;
521         }
522     } else {
523         if (unlikely(!ctx->altivec_enabled)) {
524             gen_exception(ctx, POWERPC_EXCP_VPU);
525             return;
526         }
527     }
528     t0 = tcg_temp_new_i64();
529     get_cpu_vsr(t0, xS(ctx->opcode), true);
530     tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
531     tcg_temp_free_i64(t0);
534 static void gen_mtvsrd(DisasContext *ctx)
536     TCGv_i64 t0;
537     if (xS(ctx->opcode) < 32) {
538         if (unlikely(!ctx->fpu_enabled)) {
539             gen_exception(ctx, POWERPC_EXCP_FPU);
540             return;
541         }
542     } else {
543         if (unlikely(!ctx->altivec_enabled)) {
544             gen_exception(ctx, POWERPC_EXCP_VPU);
545             return;
546         }
547     }
548     t0 = tcg_temp_new_i64();
549     tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
550     set_cpu_vsr(xT(ctx->opcode), t0, true);
551     tcg_temp_free_i64(t0);
554 static void gen_mfvsrld(DisasContext *ctx)
556     TCGv_i64 t0;
557     if (xS(ctx->opcode) < 32) {
558         if (unlikely(!ctx->vsx_enabled)) {
559             gen_exception(ctx, POWERPC_EXCP_VSXU);
560             return;
561         }
562     } else {
563         if (unlikely(!ctx->altivec_enabled)) {
564             gen_exception(ctx, POWERPC_EXCP_VPU);
565             return;
566         }
567     }
568     t0 = tcg_temp_new_i64();
569     get_cpu_vsr(t0, xS(ctx->opcode), false);
570     tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
571     tcg_temp_free_i64(t0);
574 static void gen_mtvsrdd(DisasContext *ctx)
576     TCGv_i64 t0;
577     if (xT(ctx->opcode) < 32) {
578         if (unlikely(!ctx->vsx_enabled)) {
579             gen_exception(ctx, POWERPC_EXCP_VSXU);
580             return;
581         }
582     } else {
583         if (unlikely(!ctx->altivec_enabled)) {
584             gen_exception(ctx, POWERPC_EXCP_VPU);
585             return;
586         }
587     }
589     t0 = tcg_temp_new_i64();
590     if (!rA(ctx->opcode)) {
591         tcg_gen_movi_i64(t0, 0);
592     } else {
593         tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
594     }
595     set_cpu_vsr(xT(ctx->opcode), t0, true);
597     tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
598     set_cpu_vsr(xT(ctx->opcode), t0, false);
599     tcg_temp_free_i64(t0);
602 static void gen_mtvsrws(DisasContext *ctx)
604     TCGv_i64 t0;
605     if (xT(ctx->opcode) < 32) {
606         if (unlikely(!ctx->vsx_enabled)) {
607             gen_exception(ctx, POWERPC_EXCP_VSXU);
608             return;
609         }
610     } else {
611         if (unlikely(!ctx->altivec_enabled)) {
612             gen_exception(ctx, POWERPC_EXCP_VPU);
613             return;
614         }
615     }
617     t0 = tcg_temp_new_i64();
618     tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
619                         cpu_gpr[rA(ctx->opcode)], 32, 32);
620     set_cpu_vsr(xT(ctx->opcode), t0, false);
621     set_cpu_vsr(xT(ctx->opcode), t0, true);
622     tcg_temp_free_i64(t0);
625 #endif
627 #define OP_ABS 1
628 #define OP_NABS 2
629 #define OP_NEG 3
630 #define OP_CPSGN 4
631 #define SGN_MASK_DP  0x8000000000000000ull
632 #define SGN_MASK_SP 0x8000000080000000ull
634 #define VSX_SCALAR_MOVE(name, op, sgn_mask)                       \
635 static void glue(gen_, name)(DisasContext *ctx)                   \
636     {                                                             \
637         TCGv_i64 xb, sgm;                                         \
638         if (unlikely(!ctx->vsx_enabled)) {                        \
639             gen_exception(ctx, POWERPC_EXCP_VSXU);                \
640             return;                                               \
641         }                                                         \
642         xb = tcg_temp_new_i64();                                  \
643         sgm = tcg_temp_new_i64();                                 \
644         get_cpu_vsr(xb, xB(ctx->opcode), true);                   \
645         tcg_gen_movi_i64(sgm, sgn_mask);                          \
646         switch (op) {                                             \
647             case OP_ABS: {                                        \
648                 tcg_gen_andc_i64(xb, xb, sgm);                    \
649                 break;                                            \
650             }                                                     \
651             case OP_NABS: {                                       \
652                 tcg_gen_or_i64(xb, xb, sgm);                      \
653                 break;                                            \
654             }                                                     \
655             case OP_NEG: {                                        \
656                 tcg_gen_xor_i64(xb, xb, sgm);                     \
657                 break;                                            \
658             }                                                     \
659             case OP_CPSGN: {                                      \
660                 TCGv_i64 xa = tcg_temp_new_i64();                 \
661                 get_cpu_vsr(xa, xA(ctx->opcode), true);           \
662                 tcg_gen_and_i64(xa, xa, sgm);                     \
663                 tcg_gen_andc_i64(xb, xb, sgm);                    \
664                 tcg_gen_or_i64(xb, xb, xa);                       \
665                 tcg_temp_free_i64(xa);                            \
666                 break;                                            \
667             }                                                     \
668         }                                                         \
669         set_cpu_vsr(xT(ctx->opcode), xb, true);                   \
670         set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
671         tcg_temp_free_i64(xb);                                    \
672         tcg_temp_free_i64(sgm);                                   \
673     }
675 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
676 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
677 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
678 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
680 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask)                    \
681 static void glue(gen_, name)(DisasContext *ctx)                   \
682 {                                                                 \
683     int xa;                                                       \
684     int xt = rD(ctx->opcode) + 32;                                \
685     int xb = rB(ctx->opcode) + 32;                                \
686     TCGv_i64 xah, xbh, xbl, sgm, tmp;                             \
687                                                                   \
688     if (unlikely(!ctx->vsx_enabled)) {                            \
689         gen_exception(ctx, POWERPC_EXCP_VSXU);                    \
690         return;                                                   \
691     }                                                             \
692     xbh = tcg_temp_new_i64();                                     \
693     xbl = tcg_temp_new_i64();                                     \
694     sgm = tcg_temp_new_i64();                                     \
695     tmp = tcg_temp_new_i64();                                     \
696     get_cpu_vsr(xbh, xb, true);                                   \
697     get_cpu_vsr(xbl, xb, false);                                  \
698     tcg_gen_movi_i64(sgm, sgn_mask);                              \
699     switch (op) {                                                 \
700     case OP_ABS:                                                  \
701         tcg_gen_andc_i64(xbh, xbh, sgm);                          \
702         break;                                                    \
703     case OP_NABS:                                                 \
704         tcg_gen_or_i64(xbh, xbh, sgm);                            \
705         break;                                                    \
706     case OP_NEG:                                                  \
707         tcg_gen_xor_i64(xbh, xbh, sgm);                           \
708         break;                                                    \
709     case OP_CPSGN:                                                \
710         xah = tcg_temp_new_i64();                                 \
711         xa = rA(ctx->opcode) + 32;                                \
712         get_cpu_vsr(tmp, xa, true);                               \
713         tcg_gen_and_i64(xah, tmp, sgm);                           \
714         tcg_gen_andc_i64(xbh, xbh, sgm);                          \
715         tcg_gen_or_i64(xbh, xbh, xah);                            \
716         tcg_temp_free_i64(xah);                                   \
717         break;                                                    \
718     }                                                             \
719     set_cpu_vsr(xt, xbh, true);                                   \
720     set_cpu_vsr(xt, xbl, false);                                  \
721     tcg_temp_free_i64(xbl);                                       \
722     tcg_temp_free_i64(xbh);                                       \
723     tcg_temp_free_i64(sgm);                                       \
724     tcg_temp_free_i64(tmp);                                       \
727 VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
728 VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
729 VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
730 VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
732 #define VSX_VECTOR_MOVE(name, op, sgn_mask)                      \
733 static void glue(gen_, name)(DisasContext *ctx)                  \
734     {                                                            \
735         TCGv_i64 xbh, xbl, sgm;                                  \
736         if (unlikely(!ctx->vsx_enabled)) {                       \
737             gen_exception(ctx, POWERPC_EXCP_VSXU);               \
738             return;                                              \
739         }                                                        \
740         xbh = tcg_temp_new_i64();                                \
741         xbl = tcg_temp_new_i64();                                \
742         sgm = tcg_temp_new_i64();                                \
743         get_cpu_vsr(xbh, xB(ctx->opcode), true);                 \
744         get_cpu_vsr(xbl, xB(ctx->opcode), false);                \
745         tcg_gen_movi_i64(sgm, sgn_mask);                         \
746         switch (op) {                                            \
747             case OP_ABS: {                                       \
748                 tcg_gen_andc_i64(xbh, xbh, sgm);                 \
749                 tcg_gen_andc_i64(xbl, xbl, sgm);                 \
750                 break;                                           \
751             }                                                    \
752             case OP_NABS: {                                      \
753                 tcg_gen_or_i64(xbh, xbh, sgm);                   \
754                 tcg_gen_or_i64(xbl, xbl, sgm);                   \
755                 break;                                           \
756             }                                                    \
757             case OP_NEG: {                                       \
758                 tcg_gen_xor_i64(xbh, xbh, sgm);                  \
759                 tcg_gen_xor_i64(xbl, xbl, sgm);                  \
760                 break;                                           \
761             }                                                    \
762             case OP_CPSGN: {                                     \
763                 TCGv_i64 xah = tcg_temp_new_i64();               \
764                 TCGv_i64 xal = tcg_temp_new_i64();               \
765                 get_cpu_vsr(xah, xA(ctx->opcode), true);         \
766                 get_cpu_vsr(xal, xA(ctx->opcode), false);        \
767                 tcg_gen_and_i64(xah, xah, sgm);                  \
768                 tcg_gen_and_i64(xal, xal, sgm);                  \
769                 tcg_gen_andc_i64(xbh, xbh, sgm);                 \
770                 tcg_gen_andc_i64(xbl, xbl, sgm);                 \
771                 tcg_gen_or_i64(xbh, xbh, xah);                   \
772                 tcg_gen_or_i64(xbl, xbl, xal);                   \
773                 tcg_temp_free_i64(xah);                          \
774                 tcg_temp_free_i64(xal);                          \
775                 break;                                           \
776             }                                                    \
777         }                                                        \
778         set_cpu_vsr(xT(ctx->opcode), xbh, true);                 \
779         set_cpu_vsr(xT(ctx->opcode), xbl, false);                \
780         tcg_temp_free_i64(xbh);                                  \
781         tcg_temp_free_i64(xbl);                                  \
782         tcg_temp_free_i64(sgm);                                  \
783     }
785 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
786 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
787 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
788 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
789 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
790 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
791 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
792 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
794 #define VSX_CMP(name, op1, op2, inval, type)                                  \
795 static void gen_##name(DisasContext *ctx)                                     \
796 {                                                                             \
797     TCGv_i32 ignored;                                                         \
798     TCGv_ptr xt, xa, xb;                                                      \
799     if (unlikely(!ctx->vsx_enabled)) {                                        \
800         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
801         return;                                                               \
802     }                                                                         \
803     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
804     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
805     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
806     if ((ctx->opcode >> (31 - 21)) & 1) {                                     \
807         gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb);                   \
808     } else {                                                                  \
809         ignored = tcg_temp_new_i32();                                         \
810         gen_helper_##name(ignored, cpu_env, xt, xa, xb);                      \
811         tcg_temp_free_i32(ignored);                                           \
812     }                                                                         \
813     gen_helper_float_check_status(cpu_env);                                   \
814     tcg_temp_free_ptr(xt);                                                    \
815     tcg_temp_free_ptr(xa);                                                    \
816     tcg_temp_free_ptr(xb);                                                    \
819 VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
820 VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
821 VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
822 VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
823 VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
824 VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
825 VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
826 VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
828 static bool trans_XSCVQPDP(DisasContext *ctx, arg_X_tb_rc *a)
830     TCGv_i32 ro;
831     TCGv_ptr xt, xb;
833     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
834     REQUIRE_VSX(ctx);
836     ro = tcg_const_i32(a->rc);
838     xt = gen_avr_ptr(a->rt);
839     xb = gen_avr_ptr(a->rb);
840     gen_helper_XSCVQPDP(cpu_env, ro, xt, xb);
841     tcg_temp_free_i32(ro);
842     tcg_temp_free_ptr(xt);
843     tcg_temp_free_ptr(xb);
845     return true;
848 static bool do_helper_env_X_tb(DisasContext *ctx, arg_X_tb *a,
849                                void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr))
851     TCGv_ptr xt, xb;
853     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
854     REQUIRE_VSX(ctx);
856     xt = gen_avr_ptr(a->rt);
857     xb = gen_avr_ptr(a->rb);
858     gen_helper(cpu_env, xt, xb);
859     tcg_temp_free_ptr(xt);
860     tcg_temp_free_ptr(xb);
862     return true;
865 TRANS(XSCVUQQP, do_helper_env_X_tb, gen_helper_XSCVUQQP)
866 TRANS(XSCVSQQP, do_helper_env_X_tb, gen_helper_XSCVSQQP)
867 TRANS(XSCVQPUQZ, do_helper_env_X_tb, gen_helper_XSCVQPUQZ)
868 TRANS(XSCVQPSQZ, do_helper_env_X_tb, gen_helper_XSCVQPSQZ)
870 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type)                         \
871 static void gen_##name(DisasContext *ctx)                                     \
872 {                                                                             \
873     TCGv_i32 opc;                                                             \
874     if (unlikely(!ctx->vsx_enabled)) {                                        \
875         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
876         return;                                                               \
877     }                                                                         \
878     opc = tcg_const_i32(ctx->opcode);                                         \
879     gen_helper_##name(cpu_env, opc);                                          \
880     tcg_temp_free_i32(opc);                                                   \
883 #define GEN_VSX_HELPER_X3(name, op1, op2, inval, type)                        \
884 static void gen_##name(DisasContext *ctx)                                     \
885 {                                                                             \
886     TCGv_ptr xt, xa, xb;                                                      \
887     if (unlikely(!ctx->vsx_enabled)) {                                        \
888         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
889         return;                                                               \
890     }                                                                         \
891     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
892     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
893     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
894     gen_helper_##name(cpu_env, xt, xa, xb);                                   \
895     tcg_temp_free_ptr(xt);                                                    \
896     tcg_temp_free_ptr(xa);                                                    \
897     tcg_temp_free_ptr(xb);                                                    \
900 #define GEN_VSX_HELPER_X2(name, op1, op2, inval, type)                        \
901 static void gen_##name(DisasContext *ctx)                                     \
902 {                                                                             \
903     TCGv_ptr xt, xb;                                                          \
904     if (unlikely(!ctx->vsx_enabled)) {                                        \
905         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
906         return;                                                               \
907     }                                                                         \
908     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
909     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
910     gen_helper_##name(cpu_env, xt, xb);                                       \
911     tcg_temp_free_ptr(xt);                                                    \
912     tcg_temp_free_ptr(xb);                                                    \
915 #define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type)                     \
916 static void gen_##name(DisasContext *ctx)                                     \
917 {                                                                             \
918     TCGv_i32 opc;                                                             \
919     TCGv_ptr xa, xb;                                                          \
920     if (unlikely(!ctx->vsx_enabled)) {                                        \
921         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
922         return;                                                               \
923     }                                                                         \
924     opc = tcg_const_i32(ctx->opcode);                                         \
925     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
926     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
927     gen_helper_##name(cpu_env, opc, xa, xb);                                  \
928     tcg_temp_free_i32(opc);                                                   \
929     tcg_temp_free_ptr(xa);                                                    \
930     tcg_temp_free_ptr(xb);                                                    \
933 #define GEN_VSX_HELPER_X1(name, op1, op2, inval, type)                        \
934 static void gen_##name(DisasContext *ctx)                                     \
935 {                                                                             \
936     TCGv_i32 opc;                                                             \
937     TCGv_ptr xb;                                                              \
938     if (unlikely(!ctx->vsx_enabled)) {                                        \
939         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
940         return;                                                               \
941     }                                                                         \
942     opc = tcg_const_i32(ctx->opcode);                                         \
943     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
944     gen_helper_##name(cpu_env, opc, xb);                                      \
945     tcg_temp_free_i32(opc);                                                   \
946     tcg_temp_free_ptr(xb);                                                    \
949 #define GEN_VSX_HELPER_R3(name, op1, op2, inval, type)                        \
950 static void gen_##name(DisasContext *ctx)                                     \
951 {                                                                             \
952     TCGv_i32 opc;                                                             \
953     TCGv_ptr xt, xa, xb;                                                      \
954     if (unlikely(!ctx->vsx_enabled)) {                                        \
955         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
956         return;                                                               \
957     }                                                                         \
958     opc = tcg_const_i32(ctx->opcode);                                         \
959     xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
960     xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
961     xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
962     gen_helper_##name(cpu_env, opc, xt, xa, xb);                              \
963     tcg_temp_free_i32(opc);                                                   \
964     tcg_temp_free_ptr(xt);                                                    \
965     tcg_temp_free_ptr(xa);                                                    \
966     tcg_temp_free_ptr(xb);                                                    \
969 #define GEN_VSX_HELPER_R2(name, op1, op2, inval, type)                        \
970 static void gen_##name(DisasContext *ctx)                                     \
971 {                                                                             \
972     TCGv_i32 opc;                                                             \
973     TCGv_ptr xt, xb;                                                          \
974     if (unlikely(!ctx->vsx_enabled)) {                                        \
975         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
976         return;                                                               \
977     }                                                                         \
978     opc = tcg_const_i32(ctx->opcode);                                         \
979     xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
980     xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
981     gen_helper_##name(cpu_env, opc, xt, xb);                                  \
982     tcg_temp_free_i32(opc);                                                   \
983     tcg_temp_free_ptr(xt);                                                    \
984     tcg_temp_free_ptr(xb);                                                    \
987 #define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type)                     \
988 static void gen_##name(DisasContext *ctx)                                     \
989 {                                                                             \
990     TCGv_i32 opc;                                                             \
991     TCGv_ptr xa, xb;                                                          \
992     if (unlikely(!ctx->vsx_enabled)) {                                        \
993         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
994         return;                                                               \
995     }                                                                         \
996     opc = tcg_const_i32(ctx->opcode);                                         \
997     xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
998     xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
999     gen_helper_##name(cpu_env, opc, xa, xb);                                  \
1000     tcg_temp_free_i32(opc);                                                   \
1001     tcg_temp_free_ptr(xa);                                                    \
1002     tcg_temp_free_ptr(xb);                                                    \
1005 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
1006 static void gen_##name(DisasContext *ctx)                     \
1007 {                                                             \
1008     TCGv_i64 t0;                                              \
1009     TCGv_i64 t1;                                              \
1010     if (unlikely(!ctx->vsx_enabled)) {                        \
1011         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
1012         return;                                               \
1013     }                                                         \
1014     t0 = tcg_temp_new_i64();                                  \
1015     t1 = tcg_temp_new_i64();                                  \
1016     get_cpu_vsr(t0, xB(ctx->opcode), true);                   \
1017     gen_helper_##name(t1, cpu_env, t0);                       \
1018     set_cpu_vsr(xT(ctx->opcode), t1, true);                   \
1019     set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
1020     tcg_temp_free_i64(t0);                                    \
1021     tcg_temp_free_i64(t1);                                    \
1024 GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
1025 GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
1026 GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
1027 GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
1028 GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
1029 GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
1030 GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
1031 GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
1032 GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
1033 GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
1034 GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
1035 GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
1036 GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
1037 GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1038 GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1039 GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1040 GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1041 GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1042 GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1043 GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1044 GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1045 GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1046 GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1047 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1048 GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1049 GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1050 GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1051 GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1052 GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1053 GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1054 GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1056 bool trans_XSCVSPDPN(DisasContext *ctx, arg_XX2 *a)
1058     TCGv_i64 tmp;
1060     REQUIRE_INSNS_FLAGS2(ctx, VSX207);
1061     REQUIRE_VSX(ctx);
1063     tmp = tcg_temp_new_i64();
1064     get_cpu_vsr(tmp, a->xb, true);
1066     gen_helper_XSCVSPDPN(tmp, tmp);
1068     set_cpu_vsr(a->xt, tmp, true);
1069     set_cpu_vsr(a->xt, tcg_constant_i64(0), false);
1071     tcg_temp_free_i64(tmp);
1073     return true;
1076 GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1077 GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1078 GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1079 GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1080 GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1081 GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1082 GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1083 GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1084 GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1085 GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1086 GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1087 GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1088 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1089 GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1090 GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1091 GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1092 GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1093 GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1094 GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1095 GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1096 GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1097 GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1098 GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1099 GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1100 GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1101 GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1102 GEN_VSX_HELPER_X1(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1103 GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1104 GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1106 GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1107 GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1108 GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1109 GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1110 GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1111 GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1112 GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1113 GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1114 GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1115 GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1116 GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1117 GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1118 GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1119 GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1120 GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1121 GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1122 GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1123 GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1124 GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1125 GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1126 GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1127 GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1128 GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1129 GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1130 GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1132 GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1133 GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1134 GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1135 GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1136 GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1137 GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1138 GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1139 GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1140 GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1141 GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1142 GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1143 GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1144 GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1145 GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1146 GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1147 GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1148 GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1149 GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1150 GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1151 GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1152 GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1153 GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1154 GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1155 GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1156 GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1157 GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1158 GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1159 GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1160 GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1162 static bool trans_XXPERM(DisasContext *ctx, arg_XX3 *a)
1164     TCGv_ptr xt, xa, xb;
1166     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1167     REQUIRE_VSX(ctx);
1169     xt = gen_vsr_ptr(a->xt);
1170     xa = gen_vsr_ptr(a->xa);
1171     xb = gen_vsr_ptr(a->xb);
1173     gen_helper_VPERM(xt, xa, xt, xb);
1175     tcg_temp_free_ptr(xt);
1176     tcg_temp_free_ptr(xa);
1177     tcg_temp_free_ptr(xb);
1179     return true;
1182 static bool trans_XXPERMR(DisasContext *ctx, arg_XX3 *a)
1184     TCGv_ptr xt, xa, xb;
1186     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1187     REQUIRE_VSX(ctx);
1189     xt = gen_vsr_ptr(a->xt);
1190     xa = gen_vsr_ptr(a->xa);
1191     xb = gen_vsr_ptr(a->xb);
1193     gen_helper_VPERMR(xt, xa, xt, xb);
1195     tcg_temp_free_ptr(xt);
1196     tcg_temp_free_ptr(xa);
1197     tcg_temp_free_ptr(xb);
1199     return true;
1202 static bool trans_XXPERMDI(DisasContext *ctx, arg_XX3_dm *a)
1204     TCGv_i64 t0, t1;
1206     REQUIRE_INSNS_FLAGS2(ctx, VSX);
1207     REQUIRE_VSX(ctx);
1209     t0 = tcg_temp_new_i64();
1211     if (unlikely(a->xt == a->xa || a->xt == a->xb)) {
1212         t1 = tcg_temp_new_i64();
1214         get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0);
1215         get_cpu_vsr(t1, a->xb, (a->dm & 1) == 0);
1217         set_cpu_vsr(a->xt, t0, true);
1218         set_cpu_vsr(a->xt, t1, false);
1220         tcg_temp_free_i64(t1);
1221     } else {
1222         get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0);
1223         set_cpu_vsr(a->xt, t0, true);
1225         get_cpu_vsr(t0, a->xb, (a->dm & 1) == 0);
1226         set_cpu_vsr(a->xt, t0, false);
1227     }
1229     tcg_temp_free_i64(t0);
1231     return true;
1234 static bool trans_XXPERMX(DisasContext *ctx, arg_8RR_XX4_uim3 *a)
1236     TCGv_ptr xt, xa, xb, xc;
1238     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1239     REQUIRE_VSX(ctx);
1241     xt = gen_vsr_ptr(a->xt);
1242     xa = gen_vsr_ptr(a->xa);
1243     xb = gen_vsr_ptr(a->xb);
1244     xc = gen_vsr_ptr(a->xc);
1246     gen_helper_XXPERMX(xt, xa, xb, xc, tcg_constant_tl(a->uim3));
1248     tcg_temp_free_ptr(xt);
1249     tcg_temp_free_ptr(xa);
1250     tcg_temp_free_ptr(xb);
1251     tcg_temp_free_ptr(xc);
1253     return true;
1256 typedef void (*xxgenpcv_genfn)(TCGv_ptr, TCGv_ptr);
1258 static bool do_xxgenpcv(DisasContext *ctx, arg_X_imm5 *a,
1259                         const xxgenpcv_genfn fn[4])
1261     TCGv_ptr xt, vrb;
1263     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1264     REQUIRE_VSX(ctx);
1266     if (a->imm & ~0x3) {
1267         gen_invalid(ctx);
1268         return true;
1269     }
1271     xt = gen_vsr_ptr(a->xt);
1272     vrb = gen_avr_ptr(a->vrb);
1274     fn[a->imm](xt, vrb);
1276     tcg_temp_free_ptr(xt);
1277     tcg_temp_free_ptr(vrb);
1279     return true;
1282 #define XXGENPCV(NAME) \
1283     static bool trans_##NAME(DisasContext *ctx, arg_X_imm5 *a)  \
1284     {                                                           \
1285         static const xxgenpcv_genfn fn[4] = {                   \
1286             gen_helper_##NAME##_be_exp,                         \
1287             gen_helper_##NAME##_be_comp,                        \
1288             gen_helper_##NAME##_le_exp,                         \
1289             gen_helper_##NAME##_le_comp,                        \
1290         };                                                      \
1291         return do_xxgenpcv(ctx, a, fn);                         \
1292     }
1294 XXGENPCV(XXGENPCVBM)
1295 XXGENPCV(XXGENPCVHM)
1296 XXGENPCV(XXGENPCVWM)
1297 XXGENPCV(XXGENPCVDM)
1298 #undef XXGENPCV
1300 static bool do_xsmadd(DisasContext *ctx, int tgt, int src1, int src2, int src3,
1301         void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1303     TCGv_ptr t, s1, s2, s3;
1305     t = gen_vsr_ptr(tgt);
1306     s1 = gen_vsr_ptr(src1);
1307     s2 = gen_vsr_ptr(src2);
1308     s3 = gen_vsr_ptr(src3);
1310     gen_helper(cpu_env, t, s1, s2, s3);
1312     tcg_temp_free_ptr(t);
1313     tcg_temp_free_ptr(s1);
1314     tcg_temp_free_ptr(s2);
1315     tcg_temp_free_ptr(s3);
1317     return true;
1320 static bool do_xsmadd_XX3(DisasContext *ctx, arg_XX3 *a, bool type_a,
1321         void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1323     REQUIRE_VSX(ctx);
1325     if (type_a) {
1326         return do_xsmadd(ctx, a->xt, a->xa, a->xt, a->xb, gen_helper);
1327     }
1328     return do_xsmadd(ctx, a->xt, a->xa, a->xb, a->xt, gen_helper);
1331 TRANS_FLAGS2(VSX, XSMADDADP, do_xsmadd_XX3, true, gen_helper_XSMADDDP)
1332 TRANS_FLAGS2(VSX, XSMADDMDP, do_xsmadd_XX3, false, gen_helper_XSMADDDP)
1333 TRANS_FLAGS2(VSX, XSMSUBADP, do_xsmadd_XX3, true, gen_helper_XSMSUBDP)
1334 TRANS_FLAGS2(VSX, XSMSUBMDP, do_xsmadd_XX3, false, gen_helper_XSMSUBDP)
1335 TRANS_FLAGS2(VSX, XSNMADDADP, do_xsmadd_XX3, true, gen_helper_XSNMADDDP)
1336 TRANS_FLAGS2(VSX, XSNMADDMDP, do_xsmadd_XX3, false, gen_helper_XSNMADDDP)
1337 TRANS_FLAGS2(VSX, XSNMSUBADP, do_xsmadd_XX3, true, gen_helper_XSNMSUBDP)
1338 TRANS_FLAGS2(VSX, XSNMSUBMDP, do_xsmadd_XX3, false, gen_helper_XSNMSUBDP)
1339 TRANS_FLAGS2(VSX207, XSMADDASP, do_xsmadd_XX3, true, gen_helper_XSMADDSP)
1340 TRANS_FLAGS2(VSX207, XSMADDMSP, do_xsmadd_XX3, false, gen_helper_XSMADDSP)
1341 TRANS_FLAGS2(VSX207, XSMSUBASP, do_xsmadd_XX3, true, gen_helper_XSMSUBSP)
1342 TRANS_FLAGS2(VSX207, XSMSUBMSP, do_xsmadd_XX3, false, gen_helper_XSMSUBSP)
1343 TRANS_FLAGS2(VSX207, XSNMADDASP, do_xsmadd_XX3, true, gen_helper_XSNMADDSP)
1344 TRANS_FLAGS2(VSX207, XSNMADDMSP, do_xsmadd_XX3, false, gen_helper_XSNMADDSP)
1345 TRANS_FLAGS2(VSX207, XSNMSUBASP, do_xsmadd_XX3, true, gen_helper_XSNMSUBSP)
1346 TRANS_FLAGS2(VSX207, XSNMSUBMSP, do_xsmadd_XX3, false, gen_helper_XSNMSUBSP)
1348 static bool do_xsmadd_X(DisasContext *ctx, arg_X_rc *a,
1349         void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr),
1350         void (*gen_helper_ro)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1352     int vrt, vra, vrb;
1354     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1355     REQUIRE_VSX(ctx);
1357     vrt = a->rt + 32;
1358     vra = a->ra + 32;
1359     vrb = a->rb + 32;
1361     if (a->rc) {
1362         return do_xsmadd(ctx, vrt, vra, vrt, vrb, gen_helper_ro);
1363     }
1365     return do_xsmadd(ctx, vrt, vra, vrt, vrb, gen_helper);
1368 TRANS(XSMADDQP, do_xsmadd_X, gen_helper_XSMADDQP, gen_helper_XSMADDQPO)
1369 TRANS(XSMSUBQP, do_xsmadd_X, gen_helper_XSMSUBQP, gen_helper_XSMSUBQPO)
1370 TRANS(XSNMADDQP, do_xsmadd_X, gen_helper_XSNMADDQP, gen_helper_XSNMADDQPO)
1371 TRANS(XSNMSUBQP, do_xsmadd_X, gen_helper_XSNMSUBQP, gen_helper_XSNMSUBQPO)
1373 #define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type)             \
1374 static void gen_##name(DisasContext *ctx)                                     \
1375 {                                                                             \
1376     TCGv_ptr xt, s1, s2, s3;                                                  \
1377     if (unlikely(!ctx->vsx_enabled)) {                                        \
1378         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1379         return;                                                               \
1380     }                                                                         \
1381     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
1382     s1 = gen_vsr_ptr(xA(ctx->opcode));                                        \
1383     if (ctx->opcode & PPC_BIT32(25)) {                                        \
1384         /*                                                                    \
1385          * AxT + B                                                            \
1386          */                                                                   \
1387         s2 = gen_vsr_ptr(xB(ctx->opcode));                                    \
1388         s3 = gen_vsr_ptr(xT(ctx->opcode));                                    \
1389     } else {                                                                  \
1390         /*                                                                    \
1391          * AxB + T                                                            \
1392          */                                                                   \
1393         s2 = gen_vsr_ptr(xT(ctx->opcode));                                    \
1394         s3 = gen_vsr_ptr(xB(ctx->opcode));                                    \
1395     }                                                                         \
1396     gen_helper_##name(cpu_env, xt, s1, s2, s3);                               \
1397     tcg_temp_free_ptr(xt);                                                    \
1398     tcg_temp_free_ptr(s1);                                                    \
1399     tcg_temp_free_ptr(s2);                                                    \
1400     tcg_temp_free_ptr(s3);                                                    \
1403 GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX)
1404 GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX)
1405 GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX)
1406 GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX)
1407 GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX)
1408 GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX)
1409 GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX)
1410 GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX)
1412 static void gen_xxbrd(DisasContext *ctx)
1414     TCGv_i64 xth;
1415     TCGv_i64 xtl;
1416     TCGv_i64 xbh;
1417     TCGv_i64 xbl;
1419     if (unlikely(!ctx->vsx_enabled)) {
1420         gen_exception(ctx, POWERPC_EXCP_VSXU);
1421         return;
1422     }
1423     xth = tcg_temp_new_i64();
1424     xtl = tcg_temp_new_i64();
1425     xbh = tcg_temp_new_i64();
1426     xbl = tcg_temp_new_i64();
1427     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1428     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1430     tcg_gen_bswap64_i64(xth, xbh);
1431     tcg_gen_bswap64_i64(xtl, xbl);
1432     set_cpu_vsr(xT(ctx->opcode), xth, true);
1433     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1435     tcg_temp_free_i64(xth);
1436     tcg_temp_free_i64(xtl);
1437     tcg_temp_free_i64(xbh);
1438     tcg_temp_free_i64(xbl);
1441 static void gen_xxbrh(DisasContext *ctx)
1443     TCGv_i64 xth;
1444     TCGv_i64 xtl;
1445     TCGv_i64 xbh;
1446     TCGv_i64 xbl;
1448     if (unlikely(!ctx->vsx_enabled)) {
1449         gen_exception(ctx, POWERPC_EXCP_VSXU);
1450         return;
1451     }
1452     xth = tcg_temp_new_i64();
1453     xtl = tcg_temp_new_i64();
1454     xbh = tcg_temp_new_i64();
1455     xbl = tcg_temp_new_i64();
1456     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1457     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1459     gen_bswap16x8(xth, xtl, xbh, xbl);
1460     set_cpu_vsr(xT(ctx->opcode), xth, true);
1461     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1463     tcg_temp_free_i64(xth);
1464     tcg_temp_free_i64(xtl);
1465     tcg_temp_free_i64(xbh);
1466     tcg_temp_free_i64(xbl);
1469 static void gen_xxbrq(DisasContext *ctx)
1471     TCGv_i64 xth;
1472     TCGv_i64 xtl;
1473     TCGv_i64 xbh;
1474     TCGv_i64 xbl;
1475     TCGv_i64 t0;
1477     if (unlikely(!ctx->vsx_enabled)) {
1478         gen_exception(ctx, POWERPC_EXCP_VSXU);
1479         return;
1480     }
1481     xth = tcg_temp_new_i64();
1482     xtl = tcg_temp_new_i64();
1483     xbh = tcg_temp_new_i64();
1484     xbl = tcg_temp_new_i64();
1485     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1486     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1487     t0 = tcg_temp_new_i64();
1489     tcg_gen_bswap64_i64(t0, xbl);
1490     tcg_gen_bswap64_i64(xtl, xbh);
1491     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1492     tcg_gen_mov_i64(xth, t0);
1493     set_cpu_vsr(xT(ctx->opcode), xth, true);
1495     tcg_temp_free_i64(t0);
1496     tcg_temp_free_i64(xth);
1497     tcg_temp_free_i64(xtl);
1498     tcg_temp_free_i64(xbh);
1499     tcg_temp_free_i64(xbl);
1502 static void gen_xxbrw(DisasContext *ctx)
1504     TCGv_i64 xth;
1505     TCGv_i64 xtl;
1506     TCGv_i64 xbh;
1507     TCGv_i64 xbl;
1509     if (unlikely(!ctx->vsx_enabled)) {
1510         gen_exception(ctx, POWERPC_EXCP_VSXU);
1511         return;
1512     }
1513     xth = tcg_temp_new_i64();
1514     xtl = tcg_temp_new_i64();
1515     xbh = tcg_temp_new_i64();
1516     xbl = tcg_temp_new_i64();
1517     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1518     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1520     gen_bswap32x4(xth, xtl, xbh, xbl);
1521     set_cpu_vsr(xT(ctx->opcode), xth, true);
1522     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1524     tcg_temp_free_i64(xth);
1525     tcg_temp_free_i64(xtl);
1526     tcg_temp_free_i64(xbh);
1527     tcg_temp_free_i64(xbl);
1530 #define VSX_LOGICAL(name, vece, tcg_op)                              \
1531 static void glue(gen_, name)(DisasContext *ctx)                      \
1532     {                                                                \
1533         if (unlikely(!ctx->vsx_enabled)) {                           \
1534             gen_exception(ctx, POWERPC_EXCP_VSXU);                   \
1535             return;                                                  \
1536         }                                                            \
1537         tcg_op(vece, vsr_full_offset(xT(ctx->opcode)),               \
1538                vsr_full_offset(xA(ctx->opcode)),                     \
1539                vsr_full_offset(xB(ctx->opcode)), 16, 16);            \
1540     }
1542 VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1543 VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1544 VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1545 VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1546 VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1547 VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1548 VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1549 VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1551 #define VSX_XXMRG(name, high)                               \
1552 static void glue(gen_, name)(DisasContext *ctx)             \
1553     {                                                       \
1554         TCGv_i64 a0, a1, b0, b1, tmp;                       \
1555         if (unlikely(!ctx->vsx_enabled)) {                  \
1556             gen_exception(ctx, POWERPC_EXCP_VSXU);          \
1557             return;                                         \
1558         }                                                   \
1559         a0 = tcg_temp_new_i64();                            \
1560         a1 = tcg_temp_new_i64();                            \
1561         b0 = tcg_temp_new_i64();                            \
1562         b1 = tcg_temp_new_i64();                            \
1563         tmp = tcg_temp_new_i64();                           \
1564         get_cpu_vsr(a0, xA(ctx->opcode), high);             \
1565         get_cpu_vsr(a1, xA(ctx->opcode), high);             \
1566         get_cpu_vsr(b0, xB(ctx->opcode), high);             \
1567         get_cpu_vsr(b1, xB(ctx->opcode), high);             \
1568         tcg_gen_shri_i64(a0, a0, 32);                       \
1569         tcg_gen_shri_i64(b0, b0, 32);                       \
1570         tcg_gen_deposit_i64(tmp, b0, a0, 32, 32);           \
1571         set_cpu_vsr(xT(ctx->opcode), tmp, true);            \
1572         tcg_gen_deposit_i64(tmp, b1, a1, 32, 32);           \
1573         set_cpu_vsr(xT(ctx->opcode), tmp, false);           \
1574         tcg_temp_free_i64(a0);                              \
1575         tcg_temp_free_i64(a1);                              \
1576         tcg_temp_free_i64(b0);                              \
1577         tcg_temp_free_i64(b1);                              \
1578         tcg_temp_free_i64(tmp);                             \
1579     }
1581 VSX_XXMRG(xxmrghw, 1)
1582 VSX_XXMRG(xxmrglw, 0)
1584 static bool trans_XXSEL(DisasContext *ctx, arg_XX4 *a)
1586     REQUIRE_INSNS_FLAGS2(ctx, VSX);
1587     REQUIRE_VSX(ctx);
1589     tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(a->xt), vsr_full_offset(a->xc),
1590                         vsr_full_offset(a->xb), vsr_full_offset(a->xa), 16, 16);
1592     return true;
1595 static bool trans_XXSPLTW(DisasContext *ctx, arg_XX2_uim *a)
1597     int tofs, bofs;
1599     REQUIRE_VSX(ctx);
1601     tofs = vsr_full_offset(a->xt);
1602     bofs = vsr_full_offset(a->xb);
1603     bofs += a->uim << MO_32;
1604 #if !HOST_BIG_ENDIAN
1605     bofs ^= 8 | 4;
1606 #endif
1608     tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1609     return true;
1612 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1614 static bool trans_XXSPLTIB(DisasContext *ctx, arg_X_imm8 *a)
1616     if (a->xt < 32) {
1617         REQUIRE_VSX(ctx);
1618     } else {
1619         REQUIRE_VECTOR(ctx);
1620     }
1621     tcg_gen_gvec_dup_imm(MO_8, vsr_full_offset(a->xt), 16, 16, a->imm);
1622     return true;
1625 static bool trans_XXSPLTIW(DisasContext *ctx, arg_8RR_D *a)
1627     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1628     REQUIRE_VSX(ctx);
1630     tcg_gen_gvec_dup_imm(MO_32, vsr_full_offset(a->xt), 16, 16, a->si);
1632     return true;
1635 static bool trans_XXSPLTIDP(DisasContext *ctx, arg_8RR_D *a)
1637     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1638     REQUIRE_VSX(ctx);
1640     tcg_gen_gvec_dup_imm(MO_64, vsr_full_offset(a->xt), 16, 16,
1641                          helper_todouble(a->si));
1642     return true;
1645 static bool trans_XXSPLTI32DX(DisasContext *ctx, arg_8RR_D_IX *a)
1647     TCGv_i32 imm;
1649     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1650     REQUIRE_VSX(ctx);
1652     imm = tcg_constant_i32(a->si);
1654     tcg_gen_st_i32(imm, cpu_env,
1655         offsetof(CPUPPCState, vsr[a->xt].VsrW(0 + a->ix)));
1656     tcg_gen_st_i32(imm, cpu_env,
1657         offsetof(CPUPPCState, vsr[a->xt].VsrW(2 + a->ix)));
1659     return true;
1662 static bool trans_LXVKQ(DisasContext *ctx, arg_X_uim5 *a)
1664     static const uint64_t values[32] = {
1665         0, /* Unspecified */
1666         0x3FFF000000000000llu, /* QP +1.0 */
1667         0x4000000000000000llu, /* QP +2.0 */
1668         0x4000800000000000llu, /* QP +3.0 */
1669         0x4001000000000000llu, /* QP +4.0 */
1670         0x4001400000000000llu, /* QP +5.0 */
1671         0x4001800000000000llu, /* QP +6.0 */
1672         0x4001C00000000000llu, /* QP +7.0 */
1673         0x7FFF000000000000llu, /* QP +Inf */
1674         0x7FFF800000000000llu, /* QP dQNaN */
1675         0, /* Unspecified */
1676         0, /* Unspecified */
1677         0, /* Unspecified */
1678         0, /* Unspecified */
1679         0, /* Unspecified */
1680         0, /* Unspecified */
1681         0x8000000000000000llu, /* QP -0.0 */
1682         0xBFFF000000000000llu, /* QP -1.0 */
1683         0xC000000000000000llu, /* QP -2.0 */
1684         0xC000800000000000llu, /* QP -3.0 */
1685         0xC001000000000000llu, /* QP -4.0 */
1686         0xC001400000000000llu, /* QP -5.0 */
1687         0xC001800000000000llu, /* QP -6.0 */
1688         0xC001C00000000000llu, /* QP -7.0 */
1689         0xFFFF000000000000llu, /* QP -Inf */
1690     };
1692     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1693     REQUIRE_VSX(ctx);
1695     if (values[a->uim]) {
1696         set_cpu_vsr(a->xt, tcg_constant_i64(0x0), false);
1697         set_cpu_vsr(a->xt, tcg_constant_i64(values[a->uim]), true);
1698     } else {
1699         gen_invalid(ctx);
1700     }
1702     return true;
1705 static bool trans_XVTLSBB(DisasContext *ctx, arg_XX2_bf_xb *a)
1707     TCGv_i64 xb, t0, t1, all_true, all_false, mask, zero;
1709     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1710     REQUIRE_VSX(ctx);
1712     xb = tcg_temp_new_i64();
1713     t0 = tcg_temp_new_i64();
1714     t1 = tcg_temp_new_i64();
1715     all_true = tcg_temp_new_i64();
1716     all_false = tcg_temp_new_i64();
1717     mask = tcg_constant_i64(dup_const(MO_8, 1));
1718     zero = tcg_constant_i64(0);
1720     get_cpu_vsr(xb, a->xb, true);
1721     tcg_gen_and_i64(t0, mask, xb);
1722     get_cpu_vsr(xb, a->xb, false);
1723     tcg_gen_and_i64(t1, mask, xb);
1725     tcg_gen_or_i64(all_false, t0, t1);
1726     tcg_gen_and_i64(all_true, t0, t1);
1728     tcg_gen_setcond_i64(TCG_COND_EQ, all_false, all_false, zero);
1729     tcg_gen_shli_i64(all_false, all_false, 1);
1730     tcg_gen_setcond_i64(TCG_COND_EQ, all_true, all_true, mask);
1731     tcg_gen_shli_i64(all_true, all_true, 3);
1733     tcg_gen_or_i64(t0, all_false, all_true);
1734     tcg_gen_extrl_i64_i32(cpu_crf[a->bf], t0);
1736     tcg_temp_free_i64(xb);
1737     tcg_temp_free_i64(t0);
1738     tcg_temp_free_i64(t1);
1739     tcg_temp_free_i64(all_true);
1740     tcg_temp_free_i64(all_false);
1742     return true;
1745 static void gen_xxsldwi(DisasContext *ctx)
1747     TCGv_i64 xth, xtl;
1748     if (unlikely(!ctx->vsx_enabled)) {
1749         gen_exception(ctx, POWERPC_EXCP_VSXU);
1750         return;
1751     }
1752     xth = tcg_temp_new_i64();
1753     xtl = tcg_temp_new_i64();
1755     switch (SHW(ctx->opcode)) {
1756         case 0: {
1757             get_cpu_vsr(xth, xA(ctx->opcode), true);
1758             get_cpu_vsr(xtl, xA(ctx->opcode), false);
1759             break;
1760         }
1761         case 1: {
1762             TCGv_i64 t0 = tcg_temp_new_i64();
1763             get_cpu_vsr(xth, xA(ctx->opcode), true);
1764             tcg_gen_shli_i64(xth, xth, 32);
1765             get_cpu_vsr(t0, xA(ctx->opcode), false);
1766             tcg_gen_shri_i64(t0, t0, 32);
1767             tcg_gen_or_i64(xth, xth, t0);
1768             get_cpu_vsr(xtl, xA(ctx->opcode), false);
1769             tcg_gen_shli_i64(xtl, xtl, 32);
1770             get_cpu_vsr(t0, xB(ctx->opcode), true);
1771             tcg_gen_shri_i64(t0, t0, 32);
1772             tcg_gen_or_i64(xtl, xtl, t0);
1773             tcg_temp_free_i64(t0);
1774             break;
1775         }
1776         case 2: {
1777             get_cpu_vsr(xth, xA(ctx->opcode), false);
1778             get_cpu_vsr(xtl, xB(ctx->opcode), true);
1779             break;
1780         }
1781         case 3: {
1782             TCGv_i64 t0 = tcg_temp_new_i64();
1783             get_cpu_vsr(xth, xA(ctx->opcode), false);
1784             tcg_gen_shli_i64(xth, xth, 32);
1785             get_cpu_vsr(t0, xB(ctx->opcode), true);
1786             tcg_gen_shri_i64(t0, t0, 32);
1787             tcg_gen_or_i64(xth, xth, t0);
1788             get_cpu_vsr(xtl, xB(ctx->opcode), true);
1789             tcg_gen_shli_i64(xtl, xtl, 32);
1790             get_cpu_vsr(t0, xB(ctx->opcode), false);
1791             tcg_gen_shri_i64(t0, t0, 32);
1792             tcg_gen_or_i64(xtl, xtl, t0);
1793             tcg_temp_free_i64(t0);
1794             break;
1795         }
1796     }
1798     set_cpu_vsr(xT(ctx->opcode), xth, true);
1799     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1801     tcg_temp_free_i64(xth);
1802     tcg_temp_free_i64(xtl);
1805 static bool do_vsx_extract_insert(DisasContext *ctx, arg_XX2_uim *a,
1806     void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_i32))
1808     TCGv_i64 zero = tcg_constant_i64(0);
1809     TCGv_ptr xt, xb;
1811     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1812     REQUIRE_VSX(ctx);
1814     /*
1815      * uim > 15 out of bound and for
1816      * uim > 12 handle as per hardware in helper
1817      */
1818     if (a->uim > 15) {
1819         set_cpu_vsr(a->xt, zero, true);
1820         set_cpu_vsr(a->xt, zero, false);
1821     } else {
1822         xt = gen_vsr_ptr(a->xt);
1823         xb = gen_vsr_ptr(a->xb);
1824         gen_helper(xt, xb, tcg_constant_i32(a->uim));
1825         tcg_temp_free_ptr(xb);
1826         tcg_temp_free_ptr(xt);
1827     }
1829     return true;
1832 TRANS(XXEXTRACTUW, do_vsx_extract_insert, gen_helper_XXEXTRACTUW)
1833 TRANS(XXINSERTW, do_vsx_extract_insert, gen_helper_XXINSERTW)
1835 #ifdef TARGET_PPC64
1836 static void gen_xsxexpdp(DisasContext *ctx)
1838     TCGv rt = cpu_gpr[rD(ctx->opcode)];
1839     TCGv_i64 t0;
1840     if (unlikely(!ctx->vsx_enabled)) {
1841         gen_exception(ctx, POWERPC_EXCP_VSXU);
1842         return;
1843     }
1844     t0 = tcg_temp_new_i64();
1845     get_cpu_vsr(t0, xB(ctx->opcode), true);
1846     tcg_gen_extract_i64(rt, t0, 52, 11);
1847     tcg_temp_free_i64(t0);
1850 static void gen_xsxexpqp(DisasContext *ctx)
1852     TCGv_i64 xth;
1853     TCGv_i64 xtl;
1854     TCGv_i64 xbh;
1856     if (unlikely(!ctx->vsx_enabled)) {
1857         gen_exception(ctx, POWERPC_EXCP_VSXU);
1858         return;
1859     }
1860     xth = tcg_temp_new_i64();
1861     xtl = tcg_temp_new_i64();
1862     xbh = tcg_temp_new_i64();
1863     get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1865     tcg_gen_extract_i64(xth, xbh, 48, 15);
1866     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1867     tcg_gen_movi_i64(xtl, 0);
1868     set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1870     tcg_temp_free_i64(xbh);
1871     tcg_temp_free_i64(xth);
1872     tcg_temp_free_i64(xtl);
1875 static void gen_xsiexpdp(DisasContext *ctx)
1877     TCGv_i64 xth;
1878     TCGv ra = cpu_gpr[rA(ctx->opcode)];
1879     TCGv rb = cpu_gpr[rB(ctx->opcode)];
1880     TCGv_i64 t0;
1882     if (unlikely(!ctx->vsx_enabled)) {
1883         gen_exception(ctx, POWERPC_EXCP_VSXU);
1884         return;
1885     }
1886     t0 = tcg_temp_new_i64();
1887     xth = tcg_temp_new_i64();
1888     tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1889     tcg_gen_andi_i64(t0, rb, 0x7FF);
1890     tcg_gen_shli_i64(t0, t0, 52);
1891     tcg_gen_or_i64(xth, xth, t0);
1892     set_cpu_vsr(xT(ctx->opcode), xth, true);
1893     set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false);
1894     tcg_temp_free_i64(t0);
1895     tcg_temp_free_i64(xth);
1898 static void gen_xsiexpqp(DisasContext *ctx)
1900     TCGv_i64 xth;
1901     TCGv_i64 xtl;
1902     TCGv_i64 xah;
1903     TCGv_i64 xal;
1904     TCGv_i64 xbh;
1905     TCGv_i64 t0;
1907     if (unlikely(!ctx->vsx_enabled)) {
1908         gen_exception(ctx, POWERPC_EXCP_VSXU);
1909         return;
1910     }
1911     xth = tcg_temp_new_i64();
1912     xtl = tcg_temp_new_i64();
1913     xah = tcg_temp_new_i64();
1914     xal = tcg_temp_new_i64();
1915     get_cpu_vsr(xah, rA(ctx->opcode) + 32, true);
1916     get_cpu_vsr(xal, rA(ctx->opcode) + 32, false);
1917     xbh = tcg_temp_new_i64();
1918     get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1919     t0 = tcg_temp_new_i64();
1921     tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1922     tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1923     tcg_gen_shli_i64(t0, t0, 48);
1924     tcg_gen_or_i64(xth, xth, t0);
1925     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1926     tcg_gen_mov_i64(xtl, xal);
1927     set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1929     tcg_temp_free_i64(t0);
1930     tcg_temp_free_i64(xth);
1931     tcg_temp_free_i64(xtl);
1932     tcg_temp_free_i64(xah);
1933     tcg_temp_free_i64(xal);
1934     tcg_temp_free_i64(xbh);
1937 static void gen_xsxsigdp(DisasContext *ctx)
1939     TCGv rt = cpu_gpr[rD(ctx->opcode)];
1940     TCGv_i64 t0, t1, zr, nan, exp;
1942     if (unlikely(!ctx->vsx_enabled)) {
1943         gen_exception(ctx, POWERPC_EXCP_VSXU);
1944         return;
1945     }
1946     exp = tcg_temp_new_i64();
1947     t0 = tcg_temp_new_i64();
1948     t1 = tcg_temp_new_i64();
1949     zr = tcg_const_i64(0);
1950     nan = tcg_const_i64(2047);
1952     get_cpu_vsr(t1, xB(ctx->opcode), true);
1953     tcg_gen_extract_i64(exp, t1, 52, 11);
1954     tcg_gen_movi_i64(t0, 0x0010000000000000);
1955     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1956     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1957     get_cpu_vsr(t1, xB(ctx->opcode), true);
1958     tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1960     tcg_temp_free_i64(t0);
1961     tcg_temp_free_i64(t1);
1962     tcg_temp_free_i64(exp);
1963     tcg_temp_free_i64(zr);
1964     tcg_temp_free_i64(nan);
1967 static void gen_xsxsigqp(DisasContext *ctx)
1969     TCGv_i64 t0, zr, nan, exp;
1970     TCGv_i64 xth;
1971     TCGv_i64 xtl;
1972     TCGv_i64 xbh;
1973     TCGv_i64 xbl;
1975     if (unlikely(!ctx->vsx_enabled)) {
1976         gen_exception(ctx, POWERPC_EXCP_VSXU);
1977         return;
1978     }
1979     xth = tcg_temp_new_i64();
1980     xtl = tcg_temp_new_i64();
1981     xbh = tcg_temp_new_i64();
1982     xbl = tcg_temp_new_i64();
1983     get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1984     get_cpu_vsr(xbl, rB(ctx->opcode) + 32, false);
1985     exp = tcg_temp_new_i64();
1986     t0 = tcg_temp_new_i64();
1987     zr = tcg_const_i64(0);
1988     nan = tcg_const_i64(32767);
1990     tcg_gen_extract_i64(exp, xbh, 48, 15);
1991     tcg_gen_movi_i64(t0, 0x0001000000000000);
1992     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1993     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1994     tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
1995     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1996     tcg_gen_mov_i64(xtl, xbl);
1997     set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1999     tcg_temp_free_i64(t0);
2000     tcg_temp_free_i64(exp);
2001     tcg_temp_free_i64(zr);
2002     tcg_temp_free_i64(nan);
2003     tcg_temp_free_i64(xth);
2004     tcg_temp_free_i64(xtl);
2005     tcg_temp_free_i64(xbh);
2006     tcg_temp_free_i64(xbl);
2008 #endif
2010 static void gen_xviexpsp(DisasContext *ctx)
2012     TCGv_i64 xth;
2013     TCGv_i64 xtl;
2014     TCGv_i64 xah;
2015     TCGv_i64 xal;
2016     TCGv_i64 xbh;
2017     TCGv_i64 xbl;
2018     TCGv_i64 t0;
2020     if (unlikely(!ctx->vsx_enabled)) {
2021         gen_exception(ctx, POWERPC_EXCP_VSXU);
2022         return;
2023     }
2024     xth = tcg_temp_new_i64();
2025     xtl = tcg_temp_new_i64();
2026     xah = tcg_temp_new_i64();
2027     xal = tcg_temp_new_i64();
2028     xbh = tcg_temp_new_i64();
2029     xbl = tcg_temp_new_i64();
2030     get_cpu_vsr(xah, xA(ctx->opcode), true);
2031     get_cpu_vsr(xal, xA(ctx->opcode), false);
2032     get_cpu_vsr(xbh, xB(ctx->opcode), true);
2033     get_cpu_vsr(xbl, xB(ctx->opcode), false);
2034     t0 = tcg_temp_new_i64();
2036     tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
2037     tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
2038     tcg_gen_shli_i64(t0, t0, 23);
2039     tcg_gen_or_i64(xth, xth, t0);
2040     set_cpu_vsr(xT(ctx->opcode), xth, true);
2041     tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
2042     tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
2043     tcg_gen_shli_i64(t0, t0, 23);
2044     tcg_gen_or_i64(xtl, xtl, t0);
2045     set_cpu_vsr(xT(ctx->opcode), xtl, false);
2047     tcg_temp_free_i64(t0);
2048     tcg_temp_free_i64(xth);
2049     tcg_temp_free_i64(xtl);
2050     tcg_temp_free_i64(xah);
2051     tcg_temp_free_i64(xal);
2052     tcg_temp_free_i64(xbh);
2053     tcg_temp_free_i64(xbl);
2056 static void gen_xviexpdp(DisasContext *ctx)
2058     TCGv_i64 xth;
2059     TCGv_i64 xtl;
2060     TCGv_i64 xah;
2061     TCGv_i64 xal;
2062     TCGv_i64 xbh;
2063     TCGv_i64 xbl;
2065     if (unlikely(!ctx->vsx_enabled)) {
2066         gen_exception(ctx, POWERPC_EXCP_VSXU);
2067         return;
2068     }
2069     xth = tcg_temp_new_i64();
2070     xtl = tcg_temp_new_i64();
2071     xah = tcg_temp_new_i64();
2072     xal = tcg_temp_new_i64();
2073     xbh = tcg_temp_new_i64();
2074     xbl = tcg_temp_new_i64();
2075     get_cpu_vsr(xah, xA(ctx->opcode), true);
2076     get_cpu_vsr(xal, xA(ctx->opcode), false);
2077     get_cpu_vsr(xbh, xB(ctx->opcode), true);
2078     get_cpu_vsr(xbl, xB(ctx->opcode), false);
2080     tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
2081     set_cpu_vsr(xT(ctx->opcode), xth, true);
2083     tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
2084     set_cpu_vsr(xT(ctx->opcode), xtl, false);
2086     tcg_temp_free_i64(xth);
2087     tcg_temp_free_i64(xtl);
2088     tcg_temp_free_i64(xah);
2089     tcg_temp_free_i64(xal);
2090     tcg_temp_free_i64(xbh);
2091     tcg_temp_free_i64(xbl);
2094 static void gen_xvxexpsp(DisasContext *ctx)
2096     TCGv_i64 xth;
2097     TCGv_i64 xtl;
2098     TCGv_i64 xbh;
2099     TCGv_i64 xbl;
2101     if (unlikely(!ctx->vsx_enabled)) {
2102         gen_exception(ctx, POWERPC_EXCP_VSXU);
2103         return;
2104     }
2105     xth = tcg_temp_new_i64();
2106     xtl = tcg_temp_new_i64();
2107     xbh = tcg_temp_new_i64();
2108     xbl = tcg_temp_new_i64();
2109     get_cpu_vsr(xbh, xB(ctx->opcode), true);
2110     get_cpu_vsr(xbl, xB(ctx->opcode), false);
2112     tcg_gen_shri_i64(xth, xbh, 23);
2113     tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
2114     set_cpu_vsr(xT(ctx->opcode), xth, true);
2115     tcg_gen_shri_i64(xtl, xbl, 23);
2116     tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
2117     set_cpu_vsr(xT(ctx->opcode), xtl, false);
2119     tcg_temp_free_i64(xth);
2120     tcg_temp_free_i64(xtl);
2121     tcg_temp_free_i64(xbh);
2122     tcg_temp_free_i64(xbl);
2125 static void gen_xvxexpdp(DisasContext *ctx)
2127     TCGv_i64 xth;
2128     TCGv_i64 xtl;
2129     TCGv_i64 xbh;
2130     TCGv_i64 xbl;
2132     if (unlikely(!ctx->vsx_enabled)) {
2133         gen_exception(ctx, POWERPC_EXCP_VSXU);
2134         return;
2135     }
2136     xth = tcg_temp_new_i64();
2137     xtl = tcg_temp_new_i64();
2138     xbh = tcg_temp_new_i64();
2139     xbl = tcg_temp_new_i64();
2140     get_cpu_vsr(xbh, xB(ctx->opcode), true);
2141     get_cpu_vsr(xbl, xB(ctx->opcode), false);
2143     tcg_gen_extract_i64(xth, xbh, 52, 11);
2144     set_cpu_vsr(xT(ctx->opcode), xth, true);
2145     tcg_gen_extract_i64(xtl, xbl, 52, 11);
2146     set_cpu_vsr(xT(ctx->opcode), xtl, false);
2148     tcg_temp_free_i64(xth);
2149     tcg_temp_free_i64(xtl);
2150     tcg_temp_free_i64(xbh);
2151     tcg_temp_free_i64(xbl);
2154 static bool trans_XVXSIGSP(DisasContext *ctx, arg_XX2 *a)
2156     TCGv_ptr t, b;
2158     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2159     REQUIRE_VSX(ctx);
2161     t = gen_vsr_ptr(a->xt);
2162     b = gen_vsr_ptr(a->xb);
2164     gen_helper_XVXSIGSP(t, b);
2166     tcg_temp_free_ptr(t);
2167     tcg_temp_free_ptr(b);
2169     return true;
2172 static void gen_xvxsigdp(DisasContext *ctx)
2174     TCGv_i64 xth;
2175     TCGv_i64 xtl;
2176     TCGv_i64 xbh;
2177     TCGv_i64 xbl;
2178     TCGv_i64 t0, zr, nan, exp;
2180     if (unlikely(!ctx->vsx_enabled)) {
2181         gen_exception(ctx, POWERPC_EXCP_VSXU);
2182         return;
2183     }
2184     xth = tcg_temp_new_i64();
2185     xtl = tcg_temp_new_i64();
2186     xbh = tcg_temp_new_i64();
2187     xbl = tcg_temp_new_i64();
2188     get_cpu_vsr(xbh, xB(ctx->opcode), true);
2189     get_cpu_vsr(xbl, xB(ctx->opcode), false);
2190     exp = tcg_temp_new_i64();
2191     t0 = tcg_temp_new_i64();
2192     zr = tcg_const_i64(0);
2193     nan = tcg_const_i64(2047);
2195     tcg_gen_extract_i64(exp, xbh, 52, 11);
2196     tcg_gen_movi_i64(t0, 0x0010000000000000);
2197     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2198     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2199     tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
2200     set_cpu_vsr(xT(ctx->opcode), xth, true);
2202     tcg_gen_extract_i64(exp, xbl, 52, 11);
2203     tcg_gen_movi_i64(t0, 0x0010000000000000);
2204     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2205     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2206     tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
2207     set_cpu_vsr(xT(ctx->opcode), xtl, false);
2209     tcg_temp_free_i64(t0);
2210     tcg_temp_free_i64(exp);
2211     tcg_temp_free_i64(zr);
2212     tcg_temp_free_i64(nan);
2213     tcg_temp_free_i64(xth);
2214     tcg_temp_free_i64(xtl);
2215     tcg_temp_free_i64(xbh);
2216     tcg_temp_free_i64(xbl);
2219 static bool do_lstxv(DisasContext *ctx, int ra, TCGv displ,
2220                      int rt, bool store, bool paired)
2222     TCGv ea;
2223     TCGv_i64 xt;
2224     MemOp mop;
2225     int rt1, rt2;
2227     xt = tcg_temp_new_i64();
2229     mop = DEF_MEMOP(MO_UQ);
2231     gen_set_access_type(ctx, ACCESS_INT);
2232     ea = do_ea_calc(ctx, ra, displ);
2234     if (paired && ctx->le_mode) {
2235         rt1 = rt + 1;
2236         rt2 = rt;
2237     } else {
2238         rt1 = rt;
2239         rt2 = rt + 1;
2240     }
2242     if (store) {
2243         get_cpu_vsr(xt, rt1, !ctx->le_mode);
2244         tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2245         gen_addr_add(ctx, ea, ea, 8);
2246         get_cpu_vsr(xt, rt1, ctx->le_mode);
2247         tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2248         if (paired) {
2249             gen_addr_add(ctx, ea, ea, 8);
2250             get_cpu_vsr(xt, rt2, !ctx->le_mode);
2251             tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2252             gen_addr_add(ctx, ea, ea, 8);
2253             get_cpu_vsr(xt, rt2, ctx->le_mode);
2254             tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2255         }
2256     } else {
2257         tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2258         set_cpu_vsr(rt1, xt, !ctx->le_mode);
2259         gen_addr_add(ctx, ea, ea, 8);
2260         tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2261         set_cpu_vsr(rt1, xt, ctx->le_mode);
2262         if (paired) {
2263             gen_addr_add(ctx, ea, ea, 8);
2264             tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2265             set_cpu_vsr(rt2, xt, !ctx->le_mode);
2266             gen_addr_add(ctx, ea, ea, 8);
2267             tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2268             set_cpu_vsr(rt2, xt, ctx->le_mode);
2269         }
2270     }
2272     tcg_temp_free(ea);
2273     tcg_temp_free_i64(xt);
2274     return true;
2277 static bool do_lstxv_D(DisasContext *ctx, arg_D *a, bool store, bool paired)
2279     if (paired || a->rt >= 32) {
2280         REQUIRE_VSX(ctx);
2281     } else {
2282         REQUIRE_VECTOR(ctx);
2283     }
2285     return do_lstxv(ctx, a->ra, tcg_constant_tl(a->si), a->rt, store, paired);
2288 static bool do_lstxv_PLS_D(DisasContext *ctx, arg_PLS_D *a,
2289                            bool store, bool paired)
2291     arg_D d;
2292     REQUIRE_VSX(ctx);
2294     if (!resolve_PLS_D(ctx, &d, a)) {
2295         return true;
2296     }
2298     return do_lstxv(ctx, d.ra, tcg_constant_tl(d.si), d.rt, store, paired);
2301 static bool do_lstxv_X(DisasContext *ctx, arg_X *a, bool store, bool paired)
2303     if (paired || a->rt >= 32) {
2304         REQUIRE_VSX(ctx);
2305     } else {
2306         REQUIRE_VECTOR(ctx);
2307     }
2309     return do_lstxv(ctx, a->ra, cpu_gpr[a->rb], a->rt, store, paired);
2312 static bool do_lstxsd(DisasContext *ctx, int rt, int ra, TCGv displ, bool store)
2314     TCGv ea;
2315     TCGv_i64 xt;
2316     MemOp mop;
2318     if (store) {
2319         REQUIRE_VECTOR(ctx);
2320     } else {
2321         REQUIRE_VSX(ctx);
2322     }
2324     xt = tcg_temp_new_i64();
2325     mop = DEF_MEMOP(MO_UQ);
2327     gen_set_access_type(ctx, ACCESS_INT);
2328     ea = do_ea_calc(ctx, ra, displ);
2330     if (store) {
2331         get_cpu_vsr(xt, rt + 32, true);
2332         tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2333     } else {
2334         tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2335         set_cpu_vsr(rt + 32, xt, true);
2336         set_cpu_vsr(rt + 32, tcg_constant_i64(0), false);
2337     }
2339     tcg_temp_free(ea);
2340     tcg_temp_free_i64(xt);
2342     return true;
2345 static bool do_lstxsd_DS(DisasContext *ctx, arg_D *a, bool store)
2347     return do_lstxsd(ctx, a->rt, a->ra, tcg_constant_tl(a->si), store);
2350 static bool do_plstxsd_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool store)
2352     arg_D d;
2354     if (!resolve_PLS_D(ctx, &d, a)) {
2355         return true;
2356     }
2358     return do_lstxsd(ctx, d.rt, d.ra, tcg_constant_tl(d.si), store);
2361 static bool do_lstxssp(DisasContext *ctx, int rt, int ra, TCGv displ, bool store)
2363     TCGv ea;
2364     TCGv_i64 xt;
2366     REQUIRE_VECTOR(ctx);
2368     xt = tcg_temp_new_i64();
2370     gen_set_access_type(ctx, ACCESS_INT);
2371     ea = do_ea_calc(ctx, ra, displ);
2373     if (store) {
2374         get_cpu_vsr(xt, rt + 32, true);
2375         gen_qemu_st32fs(ctx, xt, ea);
2376     } else {
2377         gen_qemu_ld32fs(ctx, xt, ea);
2378         set_cpu_vsr(rt + 32, xt, true);
2379         set_cpu_vsr(rt + 32, tcg_constant_i64(0), false);
2380     }
2382     tcg_temp_free(ea);
2383     tcg_temp_free_i64(xt);
2385     return true;
2388 static bool do_lstxssp_DS(DisasContext *ctx, arg_D *a, bool store)
2390     return do_lstxssp(ctx, a->rt, a->ra, tcg_constant_tl(a->si), store);
2393 static bool do_plstxssp_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool store)
2395     arg_D d;
2397     if (!resolve_PLS_D(ctx, &d, a)) {
2398         return true;
2399     }
2401     return do_lstxssp(ctx, d.rt, d.ra, tcg_constant_tl(d.si), store);
2404 TRANS_FLAGS2(ISA300, LXSD, do_lstxsd_DS, false)
2405 TRANS_FLAGS2(ISA300, STXSD, do_lstxsd_DS, true)
2406 TRANS_FLAGS2(ISA300, LXSSP, do_lstxssp_DS, false)
2407 TRANS_FLAGS2(ISA300, STXSSP, do_lstxssp_DS, true)
2408 TRANS_FLAGS2(ISA300, STXV, do_lstxv_D, true, false)
2409 TRANS_FLAGS2(ISA300, LXV, do_lstxv_D, false, false)
2410 TRANS_FLAGS2(ISA310, STXVP, do_lstxv_D, true, true)
2411 TRANS_FLAGS2(ISA310, LXVP, do_lstxv_D, false, true)
2412 TRANS_FLAGS2(ISA300, STXVX, do_lstxv_X, true, false)
2413 TRANS_FLAGS2(ISA300, LXVX, do_lstxv_X, false, false)
2414 TRANS_FLAGS2(ISA310, STXVPX, do_lstxv_X, true, true)
2415 TRANS_FLAGS2(ISA310, LXVPX, do_lstxv_X, false, true)
2416 TRANS64_FLAGS2(ISA310, PLXSD, do_plstxsd_PLS_D, false)
2417 TRANS64_FLAGS2(ISA310, PSTXSD, do_plstxsd_PLS_D, true)
2418 TRANS64_FLAGS2(ISA310, PLXSSP, do_plstxssp_PLS_D, false)
2419 TRANS64_FLAGS2(ISA310, PSTXSSP, do_plstxssp_PLS_D, true)
2420 TRANS64_FLAGS2(ISA310, PSTXV, do_lstxv_PLS_D, true, false)
2421 TRANS64_FLAGS2(ISA310, PLXV, do_lstxv_PLS_D, false, false)
2422 TRANS64_FLAGS2(ISA310, PSTXVP, do_lstxv_PLS_D, true, true)
2423 TRANS64_FLAGS2(ISA310, PLXVP, do_lstxv_PLS_D, false, true)
2425 static bool do_lstrm(DisasContext *ctx, arg_X *a, MemOp mop, bool store)
2427     TCGv ea;
2428     TCGv_i64 xt;
2430     REQUIRE_VSX(ctx);
2432     xt = tcg_temp_new_i64();
2434     gen_set_access_type(ctx, ACCESS_INT);
2435     ea = do_ea_calc(ctx, a->ra , cpu_gpr[a->rb]);
2437     if (store) {
2438         get_cpu_vsr(xt, a->rt, false);
2439         tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2440     } else {
2441         tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2442         set_cpu_vsr(a->rt, xt, false);
2443         set_cpu_vsr(a->rt, tcg_constant_i64(0), true);
2444     }
2446     tcg_temp_free(ea);
2447     tcg_temp_free_i64(xt);
2448     return true;
2451 TRANS_FLAGS2(ISA310, LXVRBX, do_lstrm, DEF_MEMOP(MO_UB), false)
2452 TRANS_FLAGS2(ISA310, LXVRHX, do_lstrm, DEF_MEMOP(MO_UW), false)
2453 TRANS_FLAGS2(ISA310, LXVRWX, do_lstrm, DEF_MEMOP(MO_UL), false)
2454 TRANS_FLAGS2(ISA310, LXVRDX, do_lstrm, DEF_MEMOP(MO_UQ), false)
2455 TRANS_FLAGS2(ISA310, STXVRBX, do_lstrm, DEF_MEMOP(MO_UB), true)
2456 TRANS_FLAGS2(ISA310, STXVRHX, do_lstrm, DEF_MEMOP(MO_UW), true)
2457 TRANS_FLAGS2(ISA310, STXVRWX, do_lstrm, DEF_MEMOP(MO_UL), true)
2458 TRANS_FLAGS2(ISA310, STXVRDX, do_lstrm, DEF_MEMOP(MO_UQ), true)
2460 static void gen_xxeval_i64(TCGv_i64 t, TCGv_i64 a, TCGv_i64 b, TCGv_i64 c,
2461                            int64_t imm)
2463     /*
2464      * Instead of processing imm bit-by-bit, we'll skip the computation of
2465      * conjunctions whose corresponding bit is unset.
2466      */
2467     int bit;
2468     TCGv_i64 conj, disj;
2470     conj = tcg_temp_new_i64();
2471     disj = tcg_const_i64(0);
2473     /* Iterate over set bits from the least to the most significant bit */
2474     while (imm) {
2475         /*
2476          * Get the next bit to be processed with ctz64. Invert the result of
2477          * ctz64 to match the indexing used by PowerISA.
2478          */
2479         bit = 7 - ctz64(imm);
2480         if (bit & 0x4) {
2481             tcg_gen_mov_i64(conj, a);
2482         } else {
2483             tcg_gen_not_i64(conj, a);
2484         }
2485         if (bit & 0x2) {
2486             tcg_gen_and_i64(conj, conj, b);
2487         } else {
2488             tcg_gen_andc_i64(conj, conj, b);
2489         }
2490         if (bit & 0x1) {
2491             tcg_gen_and_i64(conj, conj, c);
2492         } else {
2493             tcg_gen_andc_i64(conj, conj, c);
2494         }
2495         tcg_gen_or_i64(disj, disj, conj);
2497         /* Unset the least significant bit that is set */
2498         imm &= imm - 1;
2499     }
2501     tcg_gen_mov_i64(t, disj);
2503     tcg_temp_free_i64(conj);
2504     tcg_temp_free_i64(disj);
2507 static void gen_xxeval_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b,
2508                            TCGv_vec c, int64_t imm)
2510     /*
2511      * Instead of processing imm bit-by-bit, we'll skip the computation of
2512      * conjunctions whose corresponding bit is unset.
2513      */
2514     int bit;
2515     TCGv_vec disj, conj;
2517     disj = tcg_const_zeros_vec_matching(t);
2518     conj = tcg_temp_new_vec_matching(t);
2520     /* Iterate over set bits from the least to the most significant bit */
2521     while (imm) {
2522         /*
2523          * Get the next bit to be processed with ctz64. Invert the result of
2524          * ctz64 to match the indexing used by PowerISA.
2525          */
2526         bit = 7 - ctz64(imm);
2527         if (bit & 0x4) {
2528             tcg_gen_mov_vec(conj, a);
2529         } else {
2530             tcg_gen_not_vec(vece, conj, a);
2531         }
2532         if (bit & 0x2) {
2533             tcg_gen_and_vec(vece, conj, conj, b);
2534         } else {
2535             tcg_gen_andc_vec(vece, conj, conj, b);
2536         }
2537         if (bit & 0x1) {
2538             tcg_gen_and_vec(vece, conj, conj, c);
2539         } else {
2540             tcg_gen_andc_vec(vece, conj, conj, c);
2541         }
2542         tcg_gen_or_vec(vece, disj, disj, conj);
2544         /* Unset the least significant bit that is set */
2545         imm &= imm - 1;
2546     }
2548     tcg_gen_mov_vec(t, disj);
2550     tcg_temp_free_vec(disj);
2551     tcg_temp_free_vec(conj);
2554 static bool trans_XXEVAL(DisasContext *ctx, arg_8RR_XX4_imm *a)
2556     static const TCGOpcode vecop_list[] = {
2557         INDEX_op_andc_vec, 0
2558     };
2559     static const GVecGen4i op = {
2560         .fniv = gen_xxeval_vec,
2561         .fno = gen_helper_XXEVAL,
2562         .fni8 = gen_xxeval_i64,
2563         .opt_opc = vecop_list,
2564         .vece = MO_64
2565     };
2566     int xt = vsr_full_offset(a->xt), xa = vsr_full_offset(a->xa),
2567         xb = vsr_full_offset(a->xb), xc = vsr_full_offset(a->xc);
2569     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2570     REQUIRE_VSX(ctx);
2572     /* Equivalent functions that can be implemented with a single gen_gvec */
2573     switch (a->imm) {
2574     case 0b00000000: /* true */
2575         set_cpu_vsr(a->xt, tcg_constant_i64(0), true);
2576         set_cpu_vsr(a->xt, tcg_constant_i64(0), false);
2577         break;
2578     case 0b00000011: /* and(B,A) */
2579         tcg_gen_gvec_and(MO_64, xt, xb, xa, 16, 16);
2580         break;
2581     case 0b00000101: /* and(C,A) */
2582         tcg_gen_gvec_and(MO_64, xt, xc, xa, 16, 16);
2583         break;
2584     case 0b00001111: /* A */
2585         tcg_gen_gvec_mov(MO_64, xt, xa, 16, 16);
2586         break;
2587     case 0b00010001: /* and(C,B) */
2588         tcg_gen_gvec_and(MO_64, xt, xc, xb, 16, 16);
2589         break;
2590     case 0b00011011: /* C?B:A */
2591         tcg_gen_gvec_bitsel(MO_64, xt, xc, xb, xa, 16, 16);
2592         break;
2593     case 0b00011101: /* B?C:A */
2594         tcg_gen_gvec_bitsel(MO_64, xt, xb, xc, xa, 16, 16);
2595         break;
2596     case 0b00100111: /* C?A:B */
2597         tcg_gen_gvec_bitsel(MO_64, xt, xc, xa, xb, 16, 16);
2598         break;
2599     case 0b00110011: /* B */
2600         tcg_gen_gvec_mov(MO_64, xt, xb, 16, 16);
2601         break;
2602     case 0b00110101: /* A?C:B */
2603         tcg_gen_gvec_bitsel(MO_64, xt, xa, xc, xb, 16, 16);
2604         break;
2605     case 0b00111100: /* xor(B,A) */
2606         tcg_gen_gvec_xor(MO_64, xt, xb, xa, 16, 16);
2607         break;
2608     case 0b00111111: /* or(B,A) */
2609         tcg_gen_gvec_or(MO_64, xt, xb, xa, 16, 16);
2610         break;
2611     case 0b01000111: /* B?A:C */
2612         tcg_gen_gvec_bitsel(MO_64, xt, xb, xa, xc, 16, 16);
2613         break;
2614     case 0b01010011: /* A?B:C */
2615         tcg_gen_gvec_bitsel(MO_64, xt, xa, xb, xc, 16, 16);
2616         break;
2617     case 0b01010101: /* C */
2618         tcg_gen_gvec_mov(MO_64, xt, xc, 16, 16);
2619         break;
2620     case 0b01011010: /* xor(C,A) */
2621         tcg_gen_gvec_xor(MO_64, xt, xc, xa, 16, 16);
2622         break;
2623     case 0b01011111: /* or(C,A) */
2624         tcg_gen_gvec_or(MO_64, xt, xc, xa, 16, 16);
2625         break;
2626     case 0b01100110: /* xor(C,B) */
2627         tcg_gen_gvec_xor(MO_64, xt, xc, xb, 16, 16);
2628         break;
2629     case 0b01110111: /* or(C,B) */
2630         tcg_gen_gvec_or(MO_64, xt, xc, xb, 16, 16);
2631         break;
2632     case 0b10001000: /* nor(C,B) */
2633         tcg_gen_gvec_nor(MO_64, xt, xc, xb, 16, 16);
2634         break;
2635     case 0b10011001: /* eqv(C,B) */
2636         tcg_gen_gvec_eqv(MO_64, xt, xc, xb, 16, 16);
2637         break;
2638     case 0b10100000: /* nor(C,A) */
2639         tcg_gen_gvec_nor(MO_64, xt, xc, xa, 16, 16);
2640         break;
2641     case 0b10100101: /* eqv(C,A) */
2642         tcg_gen_gvec_eqv(MO_64, xt, xc, xa, 16, 16);
2643         break;
2644     case 0b10101010: /* not(C) */
2645         tcg_gen_gvec_not(MO_64, xt, xc, 16, 16);
2646         break;
2647     case 0b11000000: /* nor(B,A) */
2648         tcg_gen_gvec_nor(MO_64, xt,  xb, xa, 16, 16);
2649         break;
2650     case 0b11000011: /* eqv(B,A) */
2651         tcg_gen_gvec_eqv(MO_64, xt,  xb, xa, 16, 16);
2652         break;
2653     case 0b11001100: /* not(B) */
2654         tcg_gen_gvec_not(MO_64, xt, xb, 16, 16);
2655         break;
2656     case 0b11101110: /* nand(C,B) */
2657         tcg_gen_gvec_nand(MO_64, xt, xc, xb, 16, 16);
2658         break;
2659     case 0b11110000: /* not(A) */
2660         tcg_gen_gvec_not(MO_64, xt, xa, 16, 16);
2661         break;
2662     case 0b11111010: /* nand(C,A) */
2663         tcg_gen_gvec_nand(MO_64, xt, xc, xa, 16, 16);
2664         break;
2665     case 0b11111100: /* nand(B,A) */
2666         tcg_gen_gvec_nand(MO_64, xt, xb, xa, 16, 16);
2667         break;
2668     case 0b11111111: /* true */
2669         set_cpu_vsr(a->xt, tcg_constant_i64(-1), true);
2670         set_cpu_vsr(a->xt, tcg_constant_i64(-1), false);
2671         break;
2672     default:
2673         /* Fallback to compute all conjunctions/disjunctions */
2674         tcg_gen_gvec_4i(xt, xa, xb, xc, 16, 16, a->imm, &op);
2675     }
2677     return true;
2680 static void gen_xxblendv_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b,
2681                              TCGv_vec c)
2683     TCGv_vec tmp = tcg_temp_new_vec_matching(c);
2684     tcg_gen_sari_vec(vece, tmp, c, (8 << vece) - 1);
2685     tcg_gen_bitsel_vec(vece, t, tmp, b, a);
2686     tcg_temp_free_vec(tmp);
2689 static bool do_xxblendv(DisasContext *ctx, arg_8RR_XX4 *a, unsigned vece)
2691     static const TCGOpcode vecop_list[] = {
2692         INDEX_op_sari_vec, 0
2693     };
2694     static const GVecGen4 ops[4] = {
2695         {
2696             .fniv = gen_xxblendv_vec,
2697             .fno = gen_helper_XXBLENDVB,
2698             .opt_opc = vecop_list,
2699             .vece = MO_8
2700         },
2701         {
2702             .fniv = gen_xxblendv_vec,
2703             .fno = gen_helper_XXBLENDVH,
2704             .opt_opc = vecop_list,
2705             .vece = MO_16
2706         },
2707         {
2708             .fniv = gen_xxblendv_vec,
2709             .fno = gen_helper_XXBLENDVW,
2710             .opt_opc = vecop_list,
2711             .vece = MO_32
2712         },
2713         {
2714             .fniv = gen_xxblendv_vec,
2715             .fno = gen_helper_XXBLENDVD,
2716             .opt_opc = vecop_list,
2717             .vece = MO_64
2718         }
2719     };
2721     REQUIRE_VSX(ctx);
2723     tcg_gen_gvec_4(vsr_full_offset(a->xt), vsr_full_offset(a->xa),
2724                    vsr_full_offset(a->xb), vsr_full_offset(a->xc),
2725                    16, 16, &ops[vece]);
2727     return true;
2730 TRANS(XXBLENDVB, do_xxblendv, MO_8)
2731 TRANS(XXBLENDVH, do_xxblendv, MO_16)
2732 TRANS(XXBLENDVW, do_xxblendv, MO_32)
2733 TRANS(XXBLENDVD, do_xxblendv, MO_64)
2735 static bool do_helper_XX3(DisasContext *ctx, arg_XX3 *a,
2736     void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2738     TCGv_ptr xt, xa, xb;
2740     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2741     REQUIRE_VSX(ctx);
2743     xt = gen_vsr_ptr(a->xt);
2744     xa = gen_vsr_ptr(a->xa);
2745     xb = gen_vsr_ptr(a->xb);
2747     helper(cpu_env, xt, xa, xb);
2749     tcg_temp_free_ptr(xt);
2750     tcg_temp_free_ptr(xa);
2751     tcg_temp_free_ptr(xb);
2753     return true;
2756 TRANS(XSCMPEQDP, do_helper_XX3, gen_helper_XSCMPEQDP)
2757 TRANS(XSCMPGEDP, do_helper_XX3, gen_helper_XSCMPGEDP)
2758 TRANS(XSCMPGTDP, do_helper_XX3, gen_helper_XSCMPGTDP)
2759 TRANS(XSMAXCDP, do_helper_XX3, gen_helper_XSMAXCDP)
2760 TRANS(XSMINCDP, do_helper_XX3, gen_helper_XSMINCDP)
2761 TRANS(XSMAXJDP, do_helper_XX3, gen_helper_XSMAXJDP)
2762 TRANS(XSMINJDP, do_helper_XX3, gen_helper_XSMINJDP)
2764 static bool do_helper_X(arg_X *a,
2765     void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2767     TCGv_ptr rt, ra, rb;
2769     rt = gen_avr_ptr(a->rt);
2770     ra = gen_avr_ptr(a->ra);
2771     rb = gen_avr_ptr(a->rb);
2773     helper(cpu_env, rt, ra, rb);
2775     tcg_temp_free_ptr(rt);
2776     tcg_temp_free_ptr(ra);
2777     tcg_temp_free_ptr(rb);
2779     return true;
2782 static bool do_xscmpqp(DisasContext *ctx, arg_X *a,
2783     void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2785     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2786     REQUIRE_VSX(ctx);
2788     return do_helper_X(a, helper);
2791 TRANS(XSCMPEQQP, do_xscmpqp, gen_helper_XSCMPEQQP)
2792 TRANS(XSCMPGEQP, do_xscmpqp, gen_helper_XSCMPGEQP)
2793 TRANS(XSCMPGTQP, do_xscmpqp, gen_helper_XSCMPGTQP)
2794 TRANS(XSMAXCQP, do_xscmpqp, gen_helper_XSMAXCQP)
2795 TRANS(XSMINCQP, do_xscmpqp, gen_helper_XSMINCQP)
2797 static bool trans_XVCVSPBF16(DisasContext *ctx, arg_XX2 *a)
2799     TCGv_ptr xt, xb;
2801     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2802     REQUIRE_VSX(ctx);
2804     xt = gen_vsr_ptr(a->xt);
2805     xb = gen_vsr_ptr(a->xb);
2807     gen_helper_XVCVSPBF16(cpu_env, xt, xb);
2809     tcg_temp_free_ptr(xt);
2810     tcg_temp_free_ptr(xb);
2812     return true;
2815 static bool trans_XVCVBF16SPN(DisasContext *ctx, arg_XX2 *a)
2817     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2818     REQUIRE_VSX(ctx);
2820     tcg_gen_gvec_shli(MO_32, vsr_full_offset(a->xt), vsr_full_offset(a->xb),
2821                       16, 16, 16);
2823     return true;
2826     /*
2827      *  The PowerISA 3.1 mentions that for the current version of the
2828      *  architecture, "the hardware implementation provides the effect of
2829      *  ACC[i] and VSRs 4*i to 4*i + 3 logically containing the same data"
2830      *  and "The Accumulators introduce no new logical state at this time"
2831      *  (page 501). For now it seems unnecessary to create new structures,
2832      *  so ACC[i] is the same as VSRs 4*i to 4*i+3 and therefore
2833      *  move to and from accumulators are no-ops.
2834      */
2835 static bool trans_XXMFACC(DisasContext *ctx, arg_X_a *a)
2837     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2838     REQUIRE_VSX(ctx);
2839     return true;
2842 static bool trans_XXMTACC(DisasContext *ctx, arg_X_a *a)
2844     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2845     REQUIRE_VSX(ctx);
2846     return true;
2849 static bool trans_XXSETACCZ(DisasContext *ctx, arg_X_a *a)
2851     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2852     REQUIRE_VSX(ctx);
2853     tcg_gen_gvec_dup_imm(MO_64, acc_full_offset(a->ra), 64, 64, 0);
2854     return true;
2857 static bool do_ger(DisasContext *ctx, arg_MMIRR_XX3 *a,
2858     void (*helper)(TCGv_env, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_i32))
2860     uint32_t mask;
2861     TCGv_ptr xt, xa, xb;
2862     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2863     REQUIRE_VSX(ctx);
2864     if (unlikely((a->xa / 4 == a->xt) || (a->xb / 4 == a->xt))) {
2865         gen_invalid(ctx);
2866         return true;
2867     }
2869     xt = gen_acc_ptr(a->xt);
2870     xa = gen_vsr_ptr(a->xa);
2871     xb = gen_vsr_ptr(a->xb);
2873     mask = ger_pack_masks(a->pmsk, a->ymsk, a->xmsk);
2874     helper(cpu_env, xa, xb, xt, tcg_constant_i32(mask));
2875     tcg_temp_free_ptr(xt);
2876     tcg_temp_free_ptr(xa);
2877     tcg_temp_free_ptr(xb);
2878     return true;
2881 TRANS(XVI4GER8, do_ger, gen_helper_XVI4GER8)
2882 TRANS(XVI4GER8PP, do_ger,  gen_helper_XVI4GER8PP)
2883 TRANS(XVI8GER4, do_ger, gen_helper_XVI8GER4)
2884 TRANS(XVI8GER4PP, do_ger,  gen_helper_XVI8GER4PP)
2885 TRANS(XVI8GER4SPP, do_ger, gen_helper_XVI8GER4SPP)
2886 TRANS(XVI16GER2, do_ger, gen_helper_XVI16GER2)
2887 TRANS(XVI16GER2PP, do_ger, gen_helper_XVI16GER2PP)
2888 TRANS(XVI16GER2S, do_ger, gen_helper_XVI16GER2S)
2889 TRANS(XVI16GER2SPP, do_ger, gen_helper_XVI16GER2SPP)
2891 TRANS64(PMXVI4GER8, do_ger, gen_helper_XVI4GER8)
2892 TRANS64(PMXVI4GER8PP, do_ger, gen_helper_XVI4GER8PP)
2893 TRANS64(PMXVI8GER4, do_ger, gen_helper_XVI8GER4)
2894 TRANS64(PMXVI8GER4PP, do_ger, gen_helper_XVI8GER4PP)
2895 TRANS64(PMXVI8GER4SPP, do_ger, gen_helper_XVI8GER4SPP)
2896 TRANS64(PMXVI16GER2, do_ger, gen_helper_XVI16GER2)
2897 TRANS64(PMXVI16GER2PP, do_ger, gen_helper_XVI16GER2PP)
2898 TRANS64(PMXVI16GER2S, do_ger, gen_helper_XVI16GER2S)
2899 TRANS64(PMXVI16GER2SPP, do_ger, gen_helper_XVI16GER2SPP)
2901 TRANS(XVF16GER2, do_ger, gen_helper_XVF16GER2)
2902 TRANS(XVF16GER2PP, do_ger, gen_helper_XVF16GER2PP)
2903 TRANS(XVF16GER2PN, do_ger, gen_helper_XVF16GER2PN)
2904 TRANS(XVF16GER2NP, do_ger, gen_helper_XVF16GER2NP)
2905 TRANS(XVF16GER2NN, do_ger, gen_helper_XVF16GER2NN)
2907 TRANS(XVF32GER, do_ger, gen_helper_XVF32GER)
2908 TRANS(XVF32GERPP, do_ger, gen_helper_XVF32GERPP)
2909 TRANS(XVF32GERPN, do_ger, gen_helper_XVF32GERPN)
2910 TRANS(XVF32GERNP, do_ger, gen_helper_XVF32GERNP)
2911 TRANS(XVF32GERNN, do_ger, gen_helper_XVF32GERNN)
2913 TRANS(XVF64GER, do_ger, gen_helper_XVF64GER)
2914 TRANS(XVF64GERPP, do_ger, gen_helper_XVF64GERPP)
2915 TRANS(XVF64GERPN, do_ger, gen_helper_XVF64GERPN)
2916 TRANS(XVF64GERNP, do_ger, gen_helper_XVF64GERNP)
2917 TRANS(XVF64GERNN, do_ger, gen_helper_XVF64GERNN)
2919 #undef GEN_XX2FORM
2920 #undef GEN_XX3FORM
2921 #undef GEN_XX2IFORM
2922 #undef GEN_XX3_RC_FORM
2923 #undef GEN_XX3FORM_DM
2924 #undef VSX_LOGICAL