target/ppc: introduce separate generator and helper for xscvqpdp
[qemu/armbru.git] / target / ppc / translate / vsx-impl.inc.c
blobffbe3b0fac1aae03b210605ff201a372a4e90d48
1 /*** VSX extension ***/
3 static inline void get_cpu_vsrh(TCGv_i64 dst, int n)
5 tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, true));
8 static inline void get_cpu_vsrl(TCGv_i64 dst, int n)
10 tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, false));
13 static inline void set_cpu_vsrh(int n, TCGv_i64 src)
15 tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, true));
18 static inline void set_cpu_vsrl(int n, TCGv_i64 src)
20 tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, false));
23 static inline TCGv_ptr gen_vsr_ptr(int reg)
25 TCGv_ptr r = tcg_temp_new_ptr();
26 tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg));
27 return r;
30 #define VSX_LOAD_SCALAR(name, operation) \
31 static void gen_##name(DisasContext *ctx) \
32 { \
33 TCGv EA; \
34 TCGv_i64 t0; \
35 if (unlikely(!ctx->vsx_enabled)) { \
36 gen_exception(ctx, POWERPC_EXCP_VSXU); \
37 return; \
38 } \
39 t0 = tcg_temp_new_i64(); \
40 gen_set_access_type(ctx, ACCESS_INT); \
41 EA = tcg_temp_new(); \
42 gen_addr_reg_index(ctx, EA); \
43 gen_qemu_##operation(ctx, t0, EA); \
44 set_cpu_vsrh(xT(ctx->opcode), t0); \
45 /* NOTE: cpu_vsrl is undefined */ \
46 tcg_temp_free(EA); \
47 tcg_temp_free_i64(t0); \
50 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
51 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
52 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
53 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
54 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
55 VSX_LOAD_SCALAR(lxsspx, ld32fs)
57 static void gen_lxvd2x(DisasContext *ctx)
59 TCGv EA;
60 TCGv_i64 t0;
61 if (unlikely(!ctx->vsx_enabled)) {
62 gen_exception(ctx, POWERPC_EXCP_VSXU);
63 return;
65 t0 = tcg_temp_new_i64();
66 gen_set_access_type(ctx, ACCESS_INT);
67 EA = tcg_temp_new();
68 gen_addr_reg_index(ctx, EA);
69 gen_qemu_ld64_i64(ctx, t0, EA);
70 set_cpu_vsrh(xT(ctx->opcode), t0);
71 tcg_gen_addi_tl(EA, EA, 8);
72 gen_qemu_ld64_i64(ctx, t0, EA);
73 set_cpu_vsrl(xT(ctx->opcode), t0);
74 tcg_temp_free(EA);
75 tcg_temp_free_i64(t0);
78 static void gen_lxvdsx(DisasContext *ctx)
80 TCGv EA;
81 TCGv_i64 t0;
82 TCGv_i64 t1;
83 if (unlikely(!ctx->vsx_enabled)) {
84 gen_exception(ctx, POWERPC_EXCP_VSXU);
85 return;
87 t0 = tcg_temp_new_i64();
88 t1 = tcg_temp_new_i64();
89 gen_set_access_type(ctx, ACCESS_INT);
90 EA = tcg_temp_new();
91 gen_addr_reg_index(ctx, EA);
92 gen_qemu_ld64_i64(ctx, t0, EA);
93 set_cpu_vsrh(xT(ctx->opcode), t0);
94 tcg_gen_mov_i64(t1, t0);
95 set_cpu_vsrl(xT(ctx->opcode), t1);
96 tcg_temp_free(EA);
97 tcg_temp_free_i64(t0);
98 tcg_temp_free_i64(t1);
101 static void gen_lxvw4x(DisasContext *ctx)
103 TCGv EA;
104 TCGv_i64 xth;
105 TCGv_i64 xtl;
106 if (unlikely(!ctx->vsx_enabled)) {
107 gen_exception(ctx, POWERPC_EXCP_VSXU);
108 return;
110 xth = tcg_temp_new_i64();
111 xtl = tcg_temp_new_i64();
113 gen_set_access_type(ctx, ACCESS_INT);
114 EA = tcg_temp_new();
116 gen_addr_reg_index(ctx, EA);
117 if (ctx->le_mode) {
118 TCGv_i64 t0 = tcg_temp_new_i64();
119 TCGv_i64 t1 = tcg_temp_new_i64();
121 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
122 tcg_gen_shri_i64(t1, t0, 32);
123 tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
124 tcg_gen_addi_tl(EA, EA, 8);
125 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
126 tcg_gen_shri_i64(t1, t0, 32);
127 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
128 tcg_temp_free_i64(t0);
129 tcg_temp_free_i64(t1);
130 } else {
131 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
132 tcg_gen_addi_tl(EA, EA, 8);
133 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
135 set_cpu_vsrh(xT(ctx->opcode), xth);
136 set_cpu_vsrl(xT(ctx->opcode), xtl);
137 tcg_temp_free(EA);
138 tcg_temp_free_i64(xth);
139 tcg_temp_free_i64(xtl);
142 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
143 TCGv_i64 inh, TCGv_i64 inl)
145 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
146 TCGv_i64 t0 = tcg_temp_new_i64();
147 TCGv_i64 t1 = tcg_temp_new_i64();
149 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
150 tcg_gen_and_i64(t0, inh, mask);
151 tcg_gen_shli_i64(t0, t0, 8);
152 tcg_gen_shri_i64(t1, inh, 8);
153 tcg_gen_and_i64(t1, t1, mask);
154 tcg_gen_or_i64(outh, t0, t1);
156 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
157 tcg_gen_and_i64(t0, inl, mask);
158 tcg_gen_shli_i64(t0, t0, 8);
159 tcg_gen_shri_i64(t1, inl, 8);
160 tcg_gen_and_i64(t1, t1, mask);
161 tcg_gen_or_i64(outl, t0, t1);
163 tcg_temp_free_i64(t0);
164 tcg_temp_free_i64(t1);
165 tcg_temp_free_i64(mask);
168 static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
169 TCGv_i64 inh, TCGv_i64 inl)
171 TCGv_i64 hi = tcg_temp_new_i64();
172 TCGv_i64 lo = tcg_temp_new_i64();
174 tcg_gen_bswap64_i64(hi, inh);
175 tcg_gen_bswap64_i64(lo, inl);
176 tcg_gen_shri_i64(outh, hi, 32);
177 tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
178 tcg_gen_shri_i64(outl, lo, 32);
179 tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
181 tcg_temp_free_i64(hi);
182 tcg_temp_free_i64(lo);
184 static void gen_lxvh8x(DisasContext *ctx)
186 TCGv EA;
187 TCGv_i64 xth;
188 TCGv_i64 xtl;
190 if (unlikely(!ctx->vsx_enabled)) {
191 gen_exception(ctx, POWERPC_EXCP_VSXU);
192 return;
194 xth = tcg_temp_new_i64();
195 xtl = tcg_temp_new_i64();
196 gen_set_access_type(ctx, ACCESS_INT);
198 EA = tcg_temp_new();
199 gen_addr_reg_index(ctx, EA);
200 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
201 tcg_gen_addi_tl(EA, EA, 8);
202 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
203 if (ctx->le_mode) {
204 gen_bswap16x8(xth, xtl, xth, xtl);
206 set_cpu_vsrh(xT(ctx->opcode), xth);
207 set_cpu_vsrl(xT(ctx->opcode), xtl);
208 tcg_temp_free(EA);
209 tcg_temp_free_i64(xth);
210 tcg_temp_free_i64(xtl);
213 static void gen_lxvb16x(DisasContext *ctx)
215 TCGv EA;
216 TCGv_i64 xth;
217 TCGv_i64 xtl;
219 if (unlikely(!ctx->vsx_enabled)) {
220 gen_exception(ctx, POWERPC_EXCP_VSXU);
221 return;
223 xth = tcg_temp_new_i64();
224 xtl = tcg_temp_new_i64();
225 gen_set_access_type(ctx, ACCESS_INT);
226 EA = tcg_temp_new();
227 gen_addr_reg_index(ctx, EA);
228 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
229 tcg_gen_addi_tl(EA, EA, 8);
230 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
231 set_cpu_vsrh(xT(ctx->opcode), xth);
232 set_cpu_vsrl(xT(ctx->opcode), xtl);
233 tcg_temp_free(EA);
234 tcg_temp_free_i64(xth);
235 tcg_temp_free_i64(xtl);
238 #define VSX_VECTOR_LOAD(name, op, indexed) \
239 static void gen_##name(DisasContext *ctx) \
241 int xt; \
242 TCGv EA; \
243 TCGv_i64 xth; \
244 TCGv_i64 xtl; \
246 if (indexed) { \
247 xt = xT(ctx->opcode); \
248 } else { \
249 xt = DQxT(ctx->opcode); \
252 if (xt < 32) { \
253 if (unlikely(!ctx->vsx_enabled)) { \
254 gen_exception(ctx, POWERPC_EXCP_VSXU); \
255 return; \
257 } else { \
258 if (unlikely(!ctx->altivec_enabled)) { \
259 gen_exception(ctx, POWERPC_EXCP_VPU); \
260 return; \
263 xth = tcg_temp_new_i64(); \
264 xtl = tcg_temp_new_i64(); \
265 gen_set_access_type(ctx, ACCESS_INT); \
266 EA = tcg_temp_new(); \
267 if (indexed) { \
268 gen_addr_reg_index(ctx, EA); \
269 } else { \
270 gen_addr_imm_index(ctx, EA, 0x0F); \
272 if (ctx->le_mode) { \
273 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
274 set_cpu_vsrl(xt, xtl); \
275 tcg_gen_addi_tl(EA, EA, 8); \
276 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
277 set_cpu_vsrh(xt, xth); \
278 } else { \
279 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
280 set_cpu_vsrh(xt, xth); \
281 tcg_gen_addi_tl(EA, EA, 8); \
282 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
283 set_cpu_vsrl(xt, xtl); \
285 tcg_temp_free(EA); \
286 tcg_temp_free_i64(xth); \
287 tcg_temp_free_i64(xtl); \
290 VSX_VECTOR_LOAD(lxv, ld_i64, 0)
291 VSX_VECTOR_LOAD(lxvx, ld_i64, 1)
293 #define VSX_VECTOR_STORE(name, op, indexed) \
294 static void gen_##name(DisasContext *ctx) \
296 int xt; \
297 TCGv EA; \
298 TCGv_i64 xth; \
299 TCGv_i64 xtl; \
301 if (indexed) { \
302 xt = xT(ctx->opcode); \
303 } else { \
304 xt = DQxT(ctx->opcode); \
307 if (xt < 32) { \
308 if (unlikely(!ctx->vsx_enabled)) { \
309 gen_exception(ctx, POWERPC_EXCP_VSXU); \
310 return; \
312 } else { \
313 if (unlikely(!ctx->altivec_enabled)) { \
314 gen_exception(ctx, POWERPC_EXCP_VPU); \
315 return; \
318 xth = tcg_temp_new_i64(); \
319 xtl = tcg_temp_new_i64(); \
320 get_cpu_vsrh(xth, xt); \
321 get_cpu_vsrl(xtl, xt); \
322 gen_set_access_type(ctx, ACCESS_INT); \
323 EA = tcg_temp_new(); \
324 if (indexed) { \
325 gen_addr_reg_index(ctx, EA); \
326 } else { \
327 gen_addr_imm_index(ctx, EA, 0x0F); \
329 if (ctx->le_mode) { \
330 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
331 tcg_gen_addi_tl(EA, EA, 8); \
332 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
333 } else { \
334 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
335 tcg_gen_addi_tl(EA, EA, 8); \
336 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
338 tcg_temp_free(EA); \
339 tcg_temp_free_i64(xth); \
340 tcg_temp_free_i64(xtl); \
343 VSX_VECTOR_STORE(stxv, st_i64, 0)
344 VSX_VECTOR_STORE(stxvx, st_i64, 1)
346 #ifdef TARGET_PPC64
347 #define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
348 static void gen_##name(DisasContext *ctx) \
350 TCGv EA, xt; \
352 if (xT(ctx->opcode) < 32) { \
353 if (unlikely(!ctx->vsx_enabled)) { \
354 gen_exception(ctx, POWERPC_EXCP_VSXU); \
355 return; \
357 } else { \
358 if (unlikely(!ctx->altivec_enabled)) { \
359 gen_exception(ctx, POWERPC_EXCP_VPU); \
360 return; \
363 EA = tcg_temp_new(); \
364 xt = tcg_const_tl(xT(ctx->opcode)); \
365 gen_set_access_type(ctx, ACCESS_INT); \
366 gen_addr_register(ctx, EA); \
367 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
368 tcg_temp_free(EA); \
369 tcg_temp_free(xt); \
372 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
373 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
374 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
375 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
376 #endif
378 #define VSX_LOAD_SCALAR_DS(name, operation) \
379 static void gen_##name(DisasContext *ctx) \
381 TCGv EA; \
382 TCGv_i64 xth; \
384 if (unlikely(!ctx->altivec_enabled)) { \
385 gen_exception(ctx, POWERPC_EXCP_VPU); \
386 return; \
388 xth = tcg_temp_new_i64(); \
389 gen_set_access_type(ctx, ACCESS_INT); \
390 EA = tcg_temp_new(); \
391 gen_addr_imm_index(ctx, EA, 0x03); \
392 gen_qemu_##operation(ctx, xth, EA); \
393 set_cpu_vsrh(rD(ctx->opcode) + 32, xth); \
394 /* NOTE: cpu_vsrl is undefined */ \
395 tcg_temp_free(EA); \
396 tcg_temp_free_i64(xth); \
399 VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
400 VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
402 #define VSX_STORE_SCALAR(name, operation) \
403 static void gen_##name(DisasContext *ctx) \
405 TCGv EA; \
406 TCGv_i64 t0; \
407 if (unlikely(!ctx->vsx_enabled)) { \
408 gen_exception(ctx, POWERPC_EXCP_VSXU); \
409 return; \
411 t0 = tcg_temp_new_i64(); \
412 gen_set_access_type(ctx, ACCESS_INT); \
413 EA = tcg_temp_new(); \
414 gen_addr_reg_index(ctx, EA); \
415 get_cpu_vsrh(t0, xS(ctx->opcode)); \
416 gen_qemu_##operation(ctx, t0, EA); \
417 tcg_temp_free(EA); \
418 tcg_temp_free_i64(t0); \
421 VSX_STORE_SCALAR(stxsdx, st64_i64)
423 VSX_STORE_SCALAR(stxsibx, st8_i64)
424 VSX_STORE_SCALAR(stxsihx, st16_i64)
425 VSX_STORE_SCALAR(stxsiwx, st32_i64)
426 VSX_STORE_SCALAR(stxsspx, st32fs)
428 static void gen_stxvd2x(DisasContext *ctx)
430 TCGv EA;
431 TCGv_i64 t0;
432 if (unlikely(!ctx->vsx_enabled)) {
433 gen_exception(ctx, POWERPC_EXCP_VSXU);
434 return;
436 t0 = tcg_temp_new_i64();
437 gen_set_access_type(ctx, ACCESS_INT);
438 EA = tcg_temp_new();
439 gen_addr_reg_index(ctx, EA);
440 get_cpu_vsrh(t0, xS(ctx->opcode));
441 gen_qemu_st64_i64(ctx, t0, EA);
442 tcg_gen_addi_tl(EA, EA, 8);
443 get_cpu_vsrl(t0, xS(ctx->opcode));
444 gen_qemu_st64_i64(ctx, t0, EA);
445 tcg_temp_free(EA);
446 tcg_temp_free_i64(t0);
449 static void gen_stxvw4x(DisasContext *ctx)
451 TCGv EA;
452 TCGv_i64 xsh;
453 TCGv_i64 xsl;
455 if (unlikely(!ctx->vsx_enabled)) {
456 gen_exception(ctx, POWERPC_EXCP_VSXU);
457 return;
459 xsh = tcg_temp_new_i64();
460 xsl = tcg_temp_new_i64();
461 get_cpu_vsrh(xsh, xS(ctx->opcode));
462 get_cpu_vsrl(xsl, xS(ctx->opcode));
463 gen_set_access_type(ctx, ACCESS_INT);
464 EA = tcg_temp_new();
465 gen_addr_reg_index(ctx, EA);
466 if (ctx->le_mode) {
467 TCGv_i64 t0 = tcg_temp_new_i64();
468 TCGv_i64 t1 = tcg_temp_new_i64();
470 tcg_gen_shri_i64(t0, xsh, 32);
471 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
472 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
473 tcg_gen_addi_tl(EA, EA, 8);
474 tcg_gen_shri_i64(t0, xsl, 32);
475 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
476 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
477 tcg_temp_free_i64(t0);
478 tcg_temp_free_i64(t1);
479 } else {
480 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
481 tcg_gen_addi_tl(EA, EA, 8);
482 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
484 tcg_temp_free(EA);
485 tcg_temp_free_i64(xsh);
486 tcg_temp_free_i64(xsl);
489 static void gen_stxvh8x(DisasContext *ctx)
491 TCGv EA;
492 TCGv_i64 xsh;
493 TCGv_i64 xsl;
495 if (unlikely(!ctx->vsx_enabled)) {
496 gen_exception(ctx, POWERPC_EXCP_VSXU);
497 return;
499 xsh = tcg_temp_new_i64();
500 xsl = tcg_temp_new_i64();
501 get_cpu_vsrh(xsh, xS(ctx->opcode));
502 get_cpu_vsrl(xsl, xS(ctx->opcode));
503 gen_set_access_type(ctx, ACCESS_INT);
504 EA = tcg_temp_new();
505 gen_addr_reg_index(ctx, EA);
506 if (ctx->le_mode) {
507 TCGv_i64 outh = tcg_temp_new_i64();
508 TCGv_i64 outl = tcg_temp_new_i64();
510 gen_bswap16x8(outh, outl, xsh, xsl);
511 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
512 tcg_gen_addi_tl(EA, EA, 8);
513 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
514 tcg_temp_free_i64(outh);
515 tcg_temp_free_i64(outl);
516 } else {
517 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
518 tcg_gen_addi_tl(EA, EA, 8);
519 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
521 tcg_temp_free(EA);
522 tcg_temp_free_i64(xsh);
523 tcg_temp_free_i64(xsl);
526 static void gen_stxvb16x(DisasContext *ctx)
528 TCGv EA;
529 TCGv_i64 xsh;
530 TCGv_i64 xsl;
532 if (unlikely(!ctx->vsx_enabled)) {
533 gen_exception(ctx, POWERPC_EXCP_VSXU);
534 return;
536 xsh = tcg_temp_new_i64();
537 xsl = tcg_temp_new_i64();
538 get_cpu_vsrh(xsh, xS(ctx->opcode));
539 get_cpu_vsrl(xsl, xS(ctx->opcode));
540 gen_set_access_type(ctx, ACCESS_INT);
541 EA = tcg_temp_new();
542 gen_addr_reg_index(ctx, EA);
543 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
544 tcg_gen_addi_tl(EA, EA, 8);
545 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
546 tcg_temp_free(EA);
547 tcg_temp_free_i64(xsh);
548 tcg_temp_free_i64(xsl);
551 #define VSX_STORE_SCALAR_DS(name, operation) \
552 static void gen_##name(DisasContext *ctx) \
554 TCGv EA; \
555 TCGv_i64 xth; \
557 if (unlikely(!ctx->altivec_enabled)) { \
558 gen_exception(ctx, POWERPC_EXCP_VPU); \
559 return; \
561 xth = tcg_temp_new_i64(); \
562 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
563 gen_set_access_type(ctx, ACCESS_INT); \
564 EA = tcg_temp_new(); \
565 gen_addr_imm_index(ctx, EA, 0x03); \
566 gen_qemu_##operation(ctx, xth, EA); \
567 /* NOTE: cpu_vsrl is undefined */ \
568 tcg_temp_free(EA); \
569 tcg_temp_free_i64(xth); \
572 VSX_STORE_SCALAR_DS(stxsd, st64_i64)
573 VSX_STORE_SCALAR_DS(stxssp, st32fs)
575 static void gen_mfvsrwz(DisasContext *ctx)
577 if (xS(ctx->opcode) < 32) {
578 if (unlikely(!ctx->fpu_enabled)) {
579 gen_exception(ctx, POWERPC_EXCP_FPU);
580 return;
582 } else {
583 if (unlikely(!ctx->altivec_enabled)) {
584 gen_exception(ctx, POWERPC_EXCP_VPU);
585 return;
588 TCGv_i64 tmp = tcg_temp_new_i64();
589 TCGv_i64 xsh = tcg_temp_new_i64();
590 get_cpu_vsrh(xsh, xS(ctx->opcode));
591 tcg_gen_ext32u_i64(tmp, xsh);
592 tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
593 tcg_temp_free_i64(tmp);
594 tcg_temp_free_i64(xsh);
597 static void gen_mtvsrwa(DisasContext *ctx)
599 if (xS(ctx->opcode) < 32) {
600 if (unlikely(!ctx->fpu_enabled)) {
601 gen_exception(ctx, POWERPC_EXCP_FPU);
602 return;
604 } else {
605 if (unlikely(!ctx->altivec_enabled)) {
606 gen_exception(ctx, POWERPC_EXCP_VPU);
607 return;
610 TCGv_i64 tmp = tcg_temp_new_i64();
611 TCGv_i64 xsh = tcg_temp_new_i64();
612 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
613 tcg_gen_ext32s_i64(xsh, tmp);
614 set_cpu_vsrh(xT(ctx->opcode), xsh);
615 tcg_temp_free_i64(tmp);
616 tcg_temp_free_i64(xsh);
619 static void gen_mtvsrwz(DisasContext *ctx)
621 if (xS(ctx->opcode) < 32) {
622 if (unlikely(!ctx->fpu_enabled)) {
623 gen_exception(ctx, POWERPC_EXCP_FPU);
624 return;
626 } else {
627 if (unlikely(!ctx->altivec_enabled)) {
628 gen_exception(ctx, POWERPC_EXCP_VPU);
629 return;
632 TCGv_i64 tmp = tcg_temp_new_i64();
633 TCGv_i64 xsh = tcg_temp_new_i64();
634 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
635 tcg_gen_ext32u_i64(xsh, tmp);
636 set_cpu_vsrh(xT(ctx->opcode), xsh);
637 tcg_temp_free_i64(tmp);
638 tcg_temp_free_i64(xsh);
641 #if defined(TARGET_PPC64)
642 static void gen_mfvsrd(DisasContext *ctx)
644 TCGv_i64 t0;
645 if (xS(ctx->opcode) < 32) {
646 if (unlikely(!ctx->fpu_enabled)) {
647 gen_exception(ctx, POWERPC_EXCP_FPU);
648 return;
650 } else {
651 if (unlikely(!ctx->altivec_enabled)) {
652 gen_exception(ctx, POWERPC_EXCP_VPU);
653 return;
656 t0 = tcg_temp_new_i64();
657 get_cpu_vsrh(t0, xS(ctx->opcode));
658 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
659 tcg_temp_free_i64(t0);
662 static void gen_mtvsrd(DisasContext *ctx)
664 TCGv_i64 t0;
665 if (xS(ctx->opcode) < 32) {
666 if (unlikely(!ctx->fpu_enabled)) {
667 gen_exception(ctx, POWERPC_EXCP_FPU);
668 return;
670 } else {
671 if (unlikely(!ctx->altivec_enabled)) {
672 gen_exception(ctx, POWERPC_EXCP_VPU);
673 return;
676 t0 = tcg_temp_new_i64();
677 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
678 set_cpu_vsrh(xT(ctx->opcode), t0);
679 tcg_temp_free_i64(t0);
682 static void gen_mfvsrld(DisasContext *ctx)
684 TCGv_i64 t0;
685 if (xS(ctx->opcode) < 32) {
686 if (unlikely(!ctx->vsx_enabled)) {
687 gen_exception(ctx, POWERPC_EXCP_VSXU);
688 return;
690 } else {
691 if (unlikely(!ctx->altivec_enabled)) {
692 gen_exception(ctx, POWERPC_EXCP_VPU);
693 return;
696 t0 = tcg_temp_new_i64();
697 get_cpu_vsrl(t0, xS(ctx->opcode));
698 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
699 tcg_temp_free_i64(t0);
702 static void gen_mtvsrdd(DisasContext *ctx)
704 TCGv_i64 t0;
705 if (xT(ctx->opcode) < 32) {
706 if (unlikely(!ctx->vsx_enabled)) {
707 gen_exception(ctx, POWERPC_EXCP_VSXU);
708 return;
710 } else {
711 if (unlikely(!ctx->altivec_enabled)) {
712 gen_exception(ctx, POWERPC_EXCP_VPU);
713 return;
717 t0 = tcg_temp_new_i64();
718 if (!rA(ctx->opcode)) {
719 tcg_gen_movi_i64(t0, 0);
720 } else {
721 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
723 set_cpu_vsrh(xT(ctx->opcode), t0);
725 tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
726 set_cpu_vsrl(xT(ctx->opcode), t0);
727 tcg_temp_free_i64(t0);
730 static void gen_mtvsrws(DisasContext *ctx)
732 TCGv_i64 t0;
733 if (xT(ctx->opcode) < 32) {
734 if (unlikely(!ctx->vsx_enabled)) {
735 gen_exception(ctx, POWERPC_EXCP_VSXU);
736 return;
738 } else {
739 if (unlikely(!ctx->altivec_enabled)) {
740 gen_exception(ctx, POWERPC_EXCP_VPU);
741 return;
745 t0 = tcg_temp_new_i64();
746 tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
747 cpu_gpr[rA(ctx->opcode)], 32, 32);
748 set_cpu_vsrl(xT(ctx->opcode), t0);
749 set_cpu_vsrh(xT(ctx->opcode), t0);
750 tcg_temp_free_i64(t0);
753 #endif
755 static void gen_xxpermdi(DisasContext *ctx)
757 TCGv_i64 xh, xl;
759 if (unlikely(!ctx->vsx_enabled)) {
760 gen_exception(ctx, POWERPC_EXCP_VSXU);
761 return;
764 xh = tcg_temp_new_i64();
765 xl = tcg_temp_new_i64();
767 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
768 (xT(ctx->opcode) == xB(ctx->opcode)))) {
769 if ((DM(ctx->opcode) & 2) == 0) {
770 get_cpu_vsrh(xh, xA(ctx->opcode));
771 } else {
772 get_cpu_vsrl(xh, xA(ctx->opcode));
774 if ((DM(ctx->opcode) & 1) == 0) {
775 get_cpu_vsrh(xl, xB(ctx->opcode));
776 } else {
777 get_cpu_vsrl(xl, xB(ctx->opcode));
780 set_cpu_vsrh(xT(ctx->opcode), xh);
781 set_cpu_vsrl(xT(ctx->opcode), xl);
782 } else {
783 if ((DM(ctx->opcode) & 2) == 0) {
784 get_cpu_vsrh(xh, xA(ctx->opcode));
785 set_cpu_vsrh(xT(ctx->opcode), xh);
786 } else {
787 get_cpu_vsrl(xh, xA(ctx->opcode));
788 set_cpu_vsrh(xT(ctx->opcode), xh);
790 if ((DM(ctx->opcode) & 1) == 0) {
791 get_cpu_vsrh(xl, xB(ctx->opcode));
792 set_cpu_vsrl(xT(ctx->opcode), xl);
793 } else {
794 get_cpu_vsrl(xl, xB(ctx->opcode));
795 set_cpu_vsrl(xT(ctx->opcode), xl);
798 tcg_temp_free_i64(xh);
799 tcg_temp_free_i64(xl);
802 #define OP_ABS 1
803 #define OP_NABS 2
804 #define OP_NEG 3
805 #define OP_CPSGN 4
806 #define SGN_MASK_DP 0x8000000000000000ull
807 #define SGN_MASK_SP 0x8000000080000000ull
809 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
810 static void glue(gen_, name)(DisasContext *ctx) \
812 TCGv_i64 xb, sgm; \
813 if (unlikely(!ctx->vsx_enabled)) { \
814 gen_exception(ctx, POWERPC_EXCP_VSXU); \
815 return; \
817 xb = tcg_temp_new_i64(); \
818 sgm = tcg_temp_new_i64(); \
819 get_cpu_vsrh(xb, xB(ctx->opcode)); \
820 tcg_gen_movi_i64(sgm, sgn_mask); \
821 switch (op) { \
822 case OP_ABS: { \
823 tcg_gen_andc_i64(xb, xb, sgm); \
824 break; \
826 case OP_NABS: { \
827 tcg_gen_or_i64(xb, xb, sgm); \
828 break; \
830 case OP_NEG: { \
831 tcg_gen_xor_i64(xb, xb, sgm); \
832 break; \
834 case OP_CPSGN: { \
835 TCGv_i64 xa = tcg_temp_new_i64(); \
836 get_cpu_vsrh(xa, xA(ctx->opcode)); \
837 tcg_gen_and_i64(xa, xa, sgm); \
838 tcg_gen_andc_i64(xb, xb, sgm); \
839 tcg_gen_or_i64(xb, xb, xa); \
840 tcg_temp_free_i64(xa); \
841 break; \
844 set_cpu_vsrh(xT(ctx->opcode), xb); \
845 tcg_temp_free_i64(xb); \
846 tcg_temp_free_i64(sgm); \
849 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
850 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
851 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
852 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
854 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
855 static void glue(gen_, name)(DisasContext *ctx) \
857 int xa; \
858 int xt = rD(ctx->opcode) + 32; \
859 int xb = rB(ctx->opcode) + 32; \
860 TCGv_i64 xah, xbh, xbl, sgm, tmp; \
862 if (unlikely(!ctx->vsx_enabled)) { \
863 gen_exception(ctx, POWERPC_EXCP_VSXU); \
864 return; \
866 xbh = tcg_temp_new_i64(); \
867 xbl = tcg_temp_new_i64(); \
868 sgm = tcg_temp_new_i64(); \
869 tmp = tcg_temp_new_i64(); \
870 get_cpu_vsrh(xbh, xb); \
871 get_cpu_vsrl(xbl, xb); \
872 tcg_gen_movi_i64(sgm, sgn_mask); \
873 switch (op) { \
874 case OP_ABS: \
875 tcg_gen_andc_i64(xbh, xbh, sgm); \
876 break; \
877 case OP_NABS: \
878 tcg_gen_or_i64(xbh, xbh, sgm); \
879 break; \
880 case OP_NEG: \
881 tcg_gen_xor_i64(xbh, xbh, sgm); \
882 break; \
883 case OP_CPSGN: \
884 xah = tcg_temp_new_i64(); \
885 xa = rA(ctx->opcode) + 32; \
886 get_cpu_vsrh(tmp, xa); \
887 tcg_gen_and_i64(xah, tmp, sgm); \
888 tcg_gen_andc_i64(xbh, xbh, sgm); \
889 tcg_gen_or_i64(xbh, xbh, xah); \
890 tcg_temp_free_i64(xah); \
891 break; \
893 set_cpu_vsrh(xt, xbh); \
894 set_cpu_vsrl(xt, xbl); \
895 tcg_temp_free_i64(xbl); \
896 tcg_temp_free_i64(xbh); \
897 tcg_temp_free_i64(sgm); \
898 tcg_temp_free_i64(tmp); \
901 VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
902 VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
903 VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
904 VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
906 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
907 static void glue(gen_, name)(DisasContext *ctx) \
909 TCGv_i64 xbh, xbl, sgm; \
910 if (unlikely(!ctx->vsx_enabled)) { \
911 gen_exception(ctx, POWERPC_EXCP_VSXU); \
912 return; \
914 xbh = tcg_temp_new_i64(); \
915 xbl = tcg_temp_new_i64(); \
916 sgm = tcg_temp_new_i64(); \
917 get_cpu_vsrh(xbh, xB(ctx->opcode)); \
918 get_cpu_vsrl(xbl, xB(ctx->opcode)); \
919 tcg_gen_movi_i64(sgm, sgn_mask); \
920 switch (op) { \
921 case OP_ABS: { \
922 tcg_gen_andc_i64(xbh, xbh, sgm); \
923 tcg_gen_andc_i64(xbl, xbl, sgm); \
924 break; \
926 case OP_NABS: { \
927 tcg_gen_or_i64(xbh, xbh, sgm); \
928 tcg_gen_or_i64(xbl, xbl, sgm); \
929 break; \
931 case OP_NEG: { \
932 tcg_gen_xor_i64(xbh, xbh, sgm); \
933 tcg_gen_xor_i64(xbl, xbl, sgm); \
934 break; \
936 case OP_CPSGN: { \
937 TCGv_i64 xah = tcg_temp_new_i64(); \
938 TCGv_i64 xal = tcg_temp_new_i64(); \
939 get_cpu_vsrh(xah, xA(ctx->opcode)); \
940 get_cpu_vsrl(xal, xA(ctx->opcode)); \
941 tcg_gen_and_i64(xah, xah, sgm); \
942 tcg_gen_and_i64(xal, xal, sgm); \
943 tcg_gen_andc_i64(xbh, xbh, sgm); \
944 tcg_gen_andc_i64(xbl, xbl, sgm); \
945 tcg_gen_or_i64(xbh, xbh, xah); \
946 tcg_gen_or_i64(xbl, xbl, xal); \
947 tcg_temp_free_i64(xah); \
948 tcg_temp_free_i64(xal); \
949 break; \
952 set_cpu_vsrh(xT(ctx->opcode), xbh); \
953 set_cpu_vsrl(xT(ctx->opcode), xbl); \
954 tcg_temp_free_i64(xbh); \
955 tcg_temp_free_i64(xbl); \
956 tcg_temp_free_i64(sgm); \
959 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
960 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
961 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
962 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
963 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
964 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
965 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
966 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
968 #define VSX_CMP(name, op1, op2, inval, type) \
969 static void gen_##name(DisasContext *ctx) \
971 TCGv_i32 ignored; \
972 TCGv_ptr xt, xa, xb; \
973 if (unlikely(!ctx->vsx_enabled)) { \
974 gen_exception(ctx, POWERPC_EXCP_VSXU); \
975 return; \
977 xt = gen_vsr_ptr(xT(ctx->opcode)); \
978 xa = gen_vsr_ptr(xA(ctx->opcode)); \
979 xb = gen_vsr_ptr(xB(ctx->opcode)); \
980 if ((ctx->opcode >> (31 - 21)) & 1) { \
981 gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb); \
982 } else { \
983 ignored = tcg_temp_new_i32(); \
984 gen_helper_##name(ignored, cpu_env, xt, xa, xb); \
985 tcg_temp_free_i32(ignored); \
987 gen_helper_float_check_status(cpu_env); \
988 tcg_temp_free_ptr(xt); \
989 tcg_temp_free_ptr(xa); \
990 tcg_temp_free_ptr(xb); \
993 VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
994 VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
995 VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
996 VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
997 VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
998 VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
999 VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
1000 VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
1002 static void gen_xscvqpdp(DisasContext *ctx)
1004 TCGv_i32 opc;
1005 TCGv_ptr xt, xb;
1006 if (unlikely(!ctx->vsx_enabled)) {
1007 gen_exception(ctx, POWERPC_EXCP_VSXU);
1008 return;
1010 opc = tcg_const_i32(ctx->opcode);
1011 xt = gen_vsr_ptr(xT(ctx->opcode));
1012 xb = gen_vsr_ptr(xB(ctx->opcode));
1013 gen_helper_xscvqpdp(cpu_env, opc, xt, xb);
1014 tcg_temp_free_i32(opc);
1015 tcg_temp_free_ptr(xt);
1016 tcg_temp_free_ptr(xb);
1019 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
1020 static void gen_##name(DisasContext *ctx) \
1022 TCGv_i32 opc; \
1023 if (unlikely(!ctx->vsx_enabled)) { \
1024 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1025 return; \
1027 opc = tcg_const_i32(ctx->opcode); \
1028 gen_helper_##name(cpu_env, opc); \
1029 tcg_temp_free_i32(opc); \
1032 #define GEN_VSX_HELPER_X3(name, op1, op2, inval, type) \
1033 static void gen_##name(DisasContext *ctx) \
1035 TCGv_ptr xt, xa, xb; \
1036 if (unlikely(!ctx->vsx_enabled)) { \
1037 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1038 return; \
1040 xt = gen_vsr_ptr(xT(ctx->opcode)); \
1041 xa = gen_vsr_ptr(xA(ctx->opcode)); \
1042 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1043 gen_helper_##name(cpu_env, xt, xa, xb); \
1044 tcg_temp_free_ptr(xt); \
1045 tcg_temp_free_ptr(xa); \
1046 tcg_temp_free_ptr(xb); \
1049 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
1050 static void gen_##name(DisasContext *ctx) \
1052 TCGv_i64 t0; \
1053 TCGv_i64 t1; \
1054 if (unlikely(!ctx->vsx_enabled)) { \
1055 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1056 return; \
1058 t0 = tcg_temp_new_i64(); \
1059 t1 = tcg_temp_new_i64(); \
1060 get_cpu_vsrh(t0, xB(ctx->opcode)); \
1061 gen_helper_##name(t1, cpu_env, t0); \
1062 set_cpu_vsrh(xT(ctx->opcode), t1); \
1063 tcg_temp_free_i64(t0); \
1064 tcg_temp_free_i64(t1); \
1067 GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
1068 GEN_VSX_HELPER_2(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
1069 GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
1070 GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
1071 GEN_VSX_HELPER_2(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
1072 GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
1073 GEN_VSX_HELPER_2(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
1074 GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
1075 GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
1076 GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
1077 GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
1078 GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
1079 GEN_VSX_HELPER_X3(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
1080 GEN_VSX_HELPER_X3(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
1081 GEN_VSX_HELPER_X3(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
1082 GEN_VSX_HELPER_X3(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
1083 GEN_VSX_HELPER_X3(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
1084 GEN_VSX_HELPER_X3(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
1085 GEN_VSX_HELPER_X3(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
1086 GEN_VSX_HELPER_X3(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
1087 GEN_VSX_HELPER_X3(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
1088 GEN_VSX_HELPER_X3(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
1089 GEN_VSX_HELPER_X3(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
1090 GEN_VSX_HELPER_X3(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
1091 GEN_VSX_HELPER_2(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
1092 GEN_VSX_HELPER_2(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1093 GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1094 GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1095 GEN_VSX_HELPER_2(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1096 GEN_VSX_HELPER_2(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1097 GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1098 GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1099 GEN_VSX_HELPER_2(xsmaxcdp, 0x00, 0x10, 0, PPC2_ISA300)
1100 GEN_VSX_HELPER_2(xsmincdp, 0x00, 0x11, 0, PPC2_ISA300)
1101 GEN_VSX_HELPER_2(xsmaxjdp, 0x00, 0x12, 0, PPC2_ISA300)
1102 GEN_VSX_HELPER_2(xsminjdp, 0x00, 0x12, 0, PPC2_ISA300)
1103 GEN_VSX_HELPER_2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1104 GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1105 GEN_VSX_HELPER_2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1106 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1107 GEN_VSX_HELPER_2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1108 GEN_VSX_HELPER_2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1109 GEN_VSX_HELPER_2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1110 GEN_VSX_HELPER_2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1111 GEN_VSX_HELPER_2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1112 GEN_VSX_HELPER_2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1113 GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1114 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
1115 GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1116 GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1117 GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1118 GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1119 GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1120 GEN_VSX_HELPER_2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1121 GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1122 GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1123 GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1124 GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1125 GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1126 GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1127 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1129 GEN_VSX_HELPER_2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1130 GEN_VSX_HELPER_2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1131 GEN_VSX_HELPER_2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1132 GEN_VSX_HELPER_2(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1134 GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1135 GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1136 GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1137 GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1138 GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1139 GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1140 GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1141 GEN_VSX_HELPER_X3(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
1142 GEN_VSX_HELPER_X3(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
1143 GEN_VSX_HELPER_X3(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
1144 GEN_VSX_HELPER_X3(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
1145 GEN_VSX_HELPER_X3(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
1146 GEN_VSX_HELPER_X3(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
1147 GEN_VSX_HELPER_X3(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
1148 GEN_VSX_HELPER_X3(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
1149 GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1150 GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1151 GEN_VSX_HELPER_2(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1152 GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1153 GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1155 GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1156 GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1157 GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1158 GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1159 GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1160 GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1161 GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1162 GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1163 GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1164 GEN_VSX_HELPER_X3(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
1165 GEN_VSX_HELPER_X3(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
1166 GEN_VSX_HELPER_X3(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
1167 GEN_VSX_HELPER_X3(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
1168 GEN_VSX_HELPER_X3(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
1169 GEN_VSX_HELPER_X3(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
1170 GEN_VSX_HELPER_X3(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
1171 GEN_VSX_HELPER_X3(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
1172 GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1173 GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1174 GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1175 GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1176 GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1177 GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1178 GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1179 GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1180 GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1181 GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1182 GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1183 GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1184 GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1185 GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1186 GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1187 GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1189 GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1190 GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1191 GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1192 GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1193 GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1194 GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1195 GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1196 GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1197 GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1198 GEN_VSX_HELPER_X3(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
1199 GEN_VSX_HELPER_X3(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
1200 GEN_VSX_HELPER_X3(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
1201 GEN_VSX_HELPER_X3(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
1202 GEN_VSX_HELPER_X3(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
1203 GEN_VSX_HELPER_X3(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
1204 GEN_VSX_HELPER_X3(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
1205 GEN_VSX_HELPER_X3(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
1206 GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1207 GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1208 GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1209 GEN_VSX_HELPER_2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1210 GEN_VSX_HELPER_2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1211 GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1212 GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1213 GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1214 GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1215 GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1216 GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1217 GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1218 GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1219 GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1220 GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1221 GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1222 GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1223 GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1224 GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1225 GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1226 GEN_VSX_HELPER_X3(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
1227 GEN_VSX_HELPER_X3(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
1229 static void gen_xxbrd(DisasContext *ctx)
1231 TCGv_i64 xth;
1232 TCGv_i64 xtl;
1233 TCGv_i64 xbh;
1234 TCGv_i64 xbl;
1236 if (unlikely(!ctx->vsx_enabled)) {
1237 gen_exception(ctx, POWERPC_EXCP_VSXU);
1238 return;
1240 xth = tcg_temp_new_i64();
1241 xtl = tcg_temp_new_i64();
1242 xbh = tcg_temp_new_i64();
1243 xbl = tcg_temp_new_i64();
1244 get_cpu_vsrh(xbh, xB(ctx->opcode));
1245 get_cpu_vsrl(xbl, xB(ctx->opcode));
1247 tcg_gen_bswap64_i64(xth, xbh);
1248 tcg_gen_bswap64_i64(xtl, xbl);
1249 set_cpu_vsrh(xT(ctx->opcode), xth);
1250 set_cpu_vsrl(xT(ctx->opcode), xtl);
1252 tcg_temp_free_i64(xth);
1253 tcg_temp_free_i64(xtl);
1254 tcg_temp_free_i64(xbh);
1255 tcg_temp_free_i64(xbl);
1258 static void gen_xxbrh(DisasContext *ctx)
1260 TCGv_i64 xth;
1261 TCGv_i64 xtl;
1262 TCGv_i64 xbh;
1263 TCGv_i64 xbl;
1265 if (unlikely(!ctx->vsx_enabled)) {
1266 gen_exception(ctx, POWERPC_EXCP_VSXU);
1267 return;
1269 xth = tcg_temp_new_i64();
1270 xtl = tcg_temp_new_i64();
1271 xbh = tcg_temp_new_i64();
1272 xbl = tcg_temp_new_i64();
1273 get_cpu_vsrh(xbh, xB(ctx->opcode));
1274 get_cpu_vsrl(xbl, xB(ctx->opcode));
1276 gen_bswap16x8(xth, xtl, xbh, xbl);
1277 set_cpu_vsrh(xT(ctx->opcode), xth);
1278 set_cpu_vsrl(xT(ctx->opcode), xtl);
1280 tcg_temp_free_i64(xth);
1281 tcg_temp_free_i64(xtl);
1282 tcg_temp_free_i64(xbh);
1283 tcg_temp_free_i64(xbl);
1286 static void gen_xxbrq(DisasContext *ctx)
1288 TCGv_i64 xth;
1289 TCGv_i64 xtl;
1290 TCGv_i64 xbh;
1291 TCGv_i64 xbl;
1292 TCGv_i64 t0;
1294 if (unlikely(!ctx->vsx_enabled)) {
1295 gen_exception(ctx, POWERPC_EXCP_VSXU);
1296 return;
1298 xth = tcg_temp_new_i64();
1299 xtl = tcg_temp_new_i64();
1300 xbh = tcg_temp_new_i64();
1301 xbl = tcg_temp_new_i64();
1302 get_cpu_vsrh(xbh, xB(ctx->opcode));
1303 get_cpu_vsrl(xbl, xB(ctx->opcode));
1304 t0 = tcg_temp_new_i64();
1306 tcg_gen_bswap64_i64(t0, xbl);
1307 tcg_gen_bswap64_i64(xtl, xbh);
1308 set_cpu_vsrl(xT(ctx->opcode), xtl);
1309 tcg_gen_mov_i64(xth, t0);
1310 set_cpu_vsrh(xT(ctx->opcode), xth);
1312 tcg_temp_free_i64(t0);
1313 tcg_temp_free_i64(xth);
1314 tcg_temp_free_i64(xtl);
1315 tcg_temp_free_i64(xbh);
1316 tcg_temp_free_i64(xbl);
1319 static void gen_xxbrw(DisasContext *ctx)
1321 TCGv_i64 xth;
1322 TCGv_i64 xtl;
1323 TCGv_i64 xbh;
1324 TCGv_i64 xbl;
1326 if (unlikely(!ctx->vsx_enabled)) {
1327 gen_exception(ctx, POWERPC_EXCP_VSXU);
1328 return;
1330 xth = tcg_temp_new_i64();
1331 xtl = tcg_temp_new_i64();
1332 xbh = tcg_temp_new_i64();
1333 xbl = tcg_temp_new_i64();
1334 get_cpu_vsrh(xbh, xB(ctx->opcode));
1335 get_cpu_vsrl(xbl, xB(ctx->opcode));
1337 gen_bswap32x4(xth, xtl, xbh, xbl);
1338 set_cpu_vsrh(xT(ctx->opcode), xth);
1339 set_cpu_vsrl(xT(ctx->opcode), xtl);
1341 tcg_temp_free_i64(xth);
1342 tcg_temp_free_i64(xtl);
1343 tcg_temp_free_i64(xbh);
1344 tcg_temp_free_i64(xbl);
1347 #define VSX_LOGICAL(name, vece, tcg_op) \
1348 static void glue(gen_, name)(DisasContext *ctx) \
1350 if (unlikely(!ctx->vsx_enabled)) { \
1351 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1352 return; \
1354 tcg_op(vece, vsr_full_offset(xT(ctx->opcode)), \
1355 vsr_full_offset(xA(ctx->opcode)), \
1356 vsr_full_offset(xB(ctx->opcode)), 16, 16); \
1359 VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1360 VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1361 VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1362 VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1363 VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1364 VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1365 VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1366 VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1368 #define VSX_XXMRG(name, high) \
1369 static void glue(gen_, name)(DisasContext *ctx) \
1371 TCGv_i64 a0, a1, b0, b1, tmp; \
1372 if (unlikely(!ctx->vsx_enabled)) { \
1373 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1374 return; \
1376 a0 = tcg_temp_new_i64(); \
1377 a1 = tcg_temp_new_i64(); \
1378 b0 = tcg_temp_new_i64(); \
1379 b1 = tcg_temp_new_i64(); \
1380 tmp = tcg_temp_new_i64(); \
1381 if (high) { \
1382 get_cpu_vsrh(a0, xA(ctx->opcode)); \
1383 get_cpu_vsrh(a1, xA(ctx->opcode)); \
1384 get_cpu_vsrh(b0, xB(ctx->opcode)); \
1385 get_cpu_vsrh(b1, xB(ctx->opcode)); \
1386 } else { \
1387 get_cpu_vsrl(a0, xA(ctx->opcode)); \
1388 get_cpu_vsrl(a1, xA(ctx->opcode)); \
1389 get_cpu_vsrl(b0, xB(ctx->opcode)); \
1390 get_cpu_vsrl(b1, xB(ctx->opcode)); \
1392 tcg_gen_shri_i64(a0, a0, 32); \
1393 tcg_gen_shri_i64(b0, b0, 32); \
1394 tcg_gen_deposit_i64(tmp, b0, a0, 32, 32); \
1395 set_cpu_vsrh(xT(ctx->opcode), tmp); \
1396 tcg_gen_deposit_i64(tmp, b1, a1, 32, 32); \
1397 set_cpu_vsrl(xT(ctx->opcode), tmp); \
1398 tcg_temp_free_i64(a0); \
1399 tcg_temp_free_i64(a1); \
1400 tcg_temp_free_i64(b0); \
1401 tcg_temp_free_i64(b1); \
1402 tcg_temp_free_i64(tmp); \
1405 VSX_XXMRG(xxmrghw, 1)
1406 VSX_XXMRG(xxmrglw, 0)
1408 static void gen_xxsel(DisasContext *ctx)
1410 int rt = xT(ctx->opcode);
1411 int ra = xA(ctx->opcode);
1412 int rb = xB(ctx->opcode);
1413 int rc = xC(ctx->opcode);
1415 if (unlikely(!ctx->vsx_enabled)) {
1416 gen_exception(ctx, POWERPC_EXCP_VSXU);
1417 return;
1419 tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(rt), vsr_full_offset(rc),
1420 vsr_full_offset(rb), vsr_full_offset(ra), 16, 16);
1423 static void gen_xxspltw(DisasContext *ctx)
1425 int rt = xT(ctx->opcode);
1426 int rb = xB(ctx->opcode);
1427 int uim = UIM(ctx->opcode);
1428 int tofs, bofs;
1430 if (unlikely(!ctx->vsx_enabled)) {
1431 gen_exception(ctx, POWERPC_EXCP_VSXU);
1432 return;
1435 tofs = vsr_full_offset(rt);
1436 bofs = vsr_full_offset(rb);
1437 bofs += uim << MO_32;
1438 #ifndef HOST_WORDS_BIG_ENDIAN
1439 bofs ^= 8 | 4;
1440 #endif
1442 tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1445 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1447 static void gen_xxspltib(DisasContext *ctx)
1449 uint8_t uim8 = IMM8(ctx->opcode);
1450 int rt = xT(ctx->opcode);
1452 if (rt < 32) {
1453 if (unlikely(!ctx->vsx_enabled)) {
1454 gen_exception(ctx, POWERPC_EXCP_VSXU);
1455 return;
1457 } else {
1458 if (unlikely(!ctx->altivec_enabled)) {
1459 gen_exception(ctx, POWERPC_EXCP_VPU);
1460 return;
1463 tcg_gen_gvec_dup8i(vsr_full_offset(rt), 16, 16, uim8);
1466 static void gen_xxsldwi(DisasContext *ctx)
1468 TCGv_i64 xth, xtl;
1469 if (unlikely(!ctx->vsx_enabled)) {
1470 gen_exception(ctx, POWERPC_EXCP_VSXU);
1471 return;
1473 xth = tcg_temp_new_i64();
1474 xtl = tcg_temp_new_i64();
1476 switch (SHW(ctx->opcode)) {
1477 case 0: {
1478 get_cpu_vsrh(xth, xA(ctx->opcode));
1479 get_cpu_vsrl(xtl, xA(ctx->opcode));
1480 break;
1482 case 1: {
1483 TCGv_i64 t0 = tcg_temp_new_i64();
1484 get_cpu_vsrh(xth, xA(ctx->opcode));
1485 tcg_gen_shli_i64(xth, xth, 32);
1486 get_cpu_vsrl(t0, xA(ctx->opcode));
1487 tcg_gen_shri_i64(t0, t0, 32);
1488 tcg_gen_or_i64(xth, xth, t0);
1489 get_cpu_vsrl(xtl, xA(ctx->opcode));
1490 tcg_gen_shli_i64(xtl, xtl, 32);
1491 get_cpu_vsrh(t0, xB(ctx->opcode));
1492 tcg_gen_shri_i64(t0, t0, 32);
1493 tcg_gen_or_i64(xtl, xtl, t0);
1494 tcg_temp_free_i64(t0);
1495 break;
1497 case 2: {
1498 get_cpu_vsrl(xth, xA(ctx->opcode));
1499 get_cpu_vsrh(xtl, xB(ctx->opcode));
1500 break;
1502 case 3: {
1503 TCGv_i64 t0 = tcg_temp_new_i64();
1504 get_cpu_vsrl(xth, xA(ctx->opcode));
1505 tcg_gen_shli_i64(xth, xth, 32);
1506 get_cpu_vsrh(t0, xB(ctx->opcode));
1507 tcg_gen_shri_i64(t0, t0, 32);
1508 tcg_gen_or_i64(xth, xth, t0);
1509 get_cpu_vsrh(xtl, xB(ctx->opcode));
1510 tcg_gen_shli_i64(xtl, xtl, 32);
1511 get_cpu_vsrl(t0, xB(ctx->opcode));
1512 tcg_gen_shri_i64(t0, t0, 32);
1513 tcg_gen_or_i64(xtl, xtl, t0);
1514 tcg_temp_free_i64(t0);
1515 break;
1519 set_cpu_vsrh(xT(ctx->opcode), xth);
1520 set_cpu_vsrl(xT(ctx->opcode), xtl);
1522 tcg_temp_free_i64(xth);
1523 tcg_temp_free_i64(xtl);
1526 #define VSX_EXTRACT_INSERT(name) \
1527 static void gen_##name(DisasContext *ctx) \
1529 TCGv xt, xb; \
1530 TCGv_i32 t0; \
1531 TCGv_i64 t1; \
1532 uint8_t uimm = UIMM4(ctx->opcode); \
1534 if (unlikely(!ctx->vsx_enabled)) { \
1535 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1536 return; \
1538 xt = tcg_const_tl(xT(ctx->opcode)); \
1539 xb = tcg_const_tl(xB(ctx->opcode)); \
1540 t0 = tcg_temp_new_i32(); \
1541 t1 = tcg_temp_new_i64(); \
1542 /* \
1543 * uimm > 15 out of bound and for \
1544 * uimm > 12 handle as per hardware in helper \
1545 */ \
1546 if (uimm > 15) { \
1547 tcg_gen_movi_i64(t1, 0); \
1548 set_cpu_vsrh(xT(ctx->opcode), t1); \
1549 set_cpu_vsrl(xT(ctx->opcode), t1); \
1550 return; \
1552 tcg_gen_movi_i32(t0, uimm); \
1553 gen_helper_##name(cpu_env, xt, xb, t0); \
1554 tcg_temp_free(xb); \
1555 tcg_temp_free(xt); \
1556 tcg_temp_free_i32(t0); \
1557 tcg_temp_free_i64(t1); \
1560 VSX_EXTRACT_INSERT(xxextractuw)
1561 VSX_EXTRACT_INSERT(xxinsertw)
1563 #ifdef TARGET_PPC64
1564 static void gen_xsxexpdp(DisasContext *ctx)
1566 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1567 TCGv_i64 t0;
1568 if (unlikely(!ctx->vsx_enabled)) {
1569 gen_exception(ctx, POWERPC_EXCP_VSXU);
1570 return;
1572 t0 = tcg_temp_new_i64();
1573 get_cpu_vsrh(t0, xB(ctx->opcode));
1574 tcg_gen_extract_i64(rt, t0, 52, 11);
1575 tcg_temp_free_i64(t0);
1578 static void gen_xsxexpqp(DisasContext *ctx)
1580 TCGv_i64 xth;
1581 TCGv_i64 xtl;
1582 TCGv_i64 xbh;
1584 if (unlikely(!ctx->vsx_enabled)) {
1585 gen_exception(ctx, POWERPC_EXCP_VSXU);
1586 return;
1588 xth = tcg_temp_new_i64();
1589 xtl = tcg_temp_new_i64();
1590 xbh = tcg_temp_new_i64();
1591 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1593 tcg_gen_extract_i64(xth, xbh, 48, 15);
1594 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1595 tcg_gen_movi_i64(xtl, 0);
1596 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1598 tcg_temp_free_i64(xbh);
1599 tcg_temp_free_i64(xth);
1600 tcg_temp_free_i64(xtl);
1603 static void gen_xsiexpdp(DisasContext *ctx)
1605 TCGv_i64 xth;
1606 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1607 TCGv rb = cpu_gpr[rB(ctx->opcode)];
1608 TCGv_i64 t0;
1610 if (unlikely(!ctx->vsx_enabled)) {
1611 gen_exception(ctx, POWERPC_EXCP_VSXU);
1612 return;
1614 t0 = tcg_temp_new_i64();
1615 xth = tcg_temp_new_i64();
1616 tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1617 tcg_gen_andi_i64(t0, rb, 0x7FF);
1618 tcg_gen_shli_i64(t0, t0, 52);
1619 tcg_gen_or_i64(xth, xth, t0);
1620 set_cpu_vsrh(xT(ctx->opcode), xth);
1621 /* dword[1] is undefined */
1622 tcg_temp_free_i64(t0);
1623 tcg_temp_free_i64(xth);
1626 static void gen_xsiexpqp(DisasContext *ctx)
1628 TCGv_i64 xth;
1629 TCGv_i64 xtl;
1630 TCGv_i64 xah;
1631 TCGv_i64 xal;
1632 TCGv_i64 xbh;
1633 TCGv_i64 t0;
1635 if (unlikely(!ctx->vsx_enabled)) {
1636 gen_exception(ctx, POWERPC_EXCP_VSXU);
1637 return;
1639 xth = tcg_temp_new_i64();
1640 xtl = tcg_temp_new_i64();
1641 xah = tcg_temp_new_i64();
1642 xal = tcg_temp_new_i64();
1643 get_cpu_vsrh(xah, rA(ctx->opcode) + 32);
1644 get_cpu_vsrl(xal, rA(ctx->opcode) + 32);
1645 xbh = tcg_temp_new_i64();
1646 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1647 t0 = tcg_temp_new_i64();
1649 tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1650 tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1651 tcg_gen_shli_i64(t0, t0, 48);
1652 tcg_gen_or_i64(xth, xth, t0);
1653 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1654 tcg_gen_mov_i64(xtl, xal);
1655 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1657 tcg_temp_free_i64(t0);
1658 tcg_temp_free_i64(xth);
1659 tcg_temp_free_i64(xtl);
1660 tcg_temp_free_i64(xah);
1661 tcg_temp_free_i64(xal);
1662 tcg_temp_free_i64(xbh);
1665 static void gen_xsxsigdp(DisasContext *ctx)
1667 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1668 TCGv_i64 t0, t1, zr, nan, exp;
1670 if (unlikely(!ctx->vsx_enabled)) {
1671 gen_exception(ctx, POWERPC_EXCP_VSXU);
1672 return;
1674 exp = tcg_temp_new_i64();
1675 t0 = tcg_temp_new_i64();
1676 t1 = tcg_temp_new_i64();
1677 zr = tcg_const_i64(0);
1678 nan = tcg_const_i64(2047);
1680 get_cpu_vsrh(t1, xB(ctx->opcode));
1681 tcg_gen_extract_i64(exp, t1, 52, 11);
1682 tcg_gen_movi_i64(t0, 0x0010000000000000);
1683 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1684 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1685 get_cpu_vsrh(t1, xB(ctx->opcode));
1686 tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1688 tcg_temp_free_i64(t0);
1689 tcg_temp_free_i64(t1);
1690 tcg_temp_free_i64(exp);
1691 tcg_temp_free_i64(zr);
1692 tcg_temp_free_i64(nan);
1695 static void gen_xsxsigqp(DisasContext *ctx)
1697 TCGv_i64 t0, zr, nan, exp;
1698 TCGv_i64 xth;
1699 TCGv_i64 xtl;
1700 TCGv_i64 xbh;
1701 TCGv_i64 xbl;
1703 if (unlikely(!ctx->vsx_enabled)) {
1704 gen_exception(ctx, POWERPC_EXCP_VSXU);
1705 return;
1707 xth = tcg_temp_new_i64();
1708 xtl = tcg_temp_new_i64();
1709 xbh = tcg_temp_new_i64();
1710 xbl = tcg_temp_new_i64();
1711 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1712 get_cpu_vsrl(xbl, rB(ctx->opcode) + 32);
1713 exp = tcg_temp_new_i64();
1714 t0 = tcg_temp_new_i64();
1715 zr = tcg_const_i64(0);
1716 nan = tcg_const_i64(32767);
1718 tcg_gen_extract_i64(exp, xbh, 48, 15);
1719 tcg_gen_movi_i64(t0, 0x0001000000000000);
1720 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1721 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1722 tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
1723 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1724 tcg_gen_mov_i64(xtl, xbl);
1725 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1727 tcg_temp_free_i64(t0);
1728 tcg_temp_free_i64(exp);
1729 tcg_temp_free_i64(zr);
1730 tcg_temp_free_i64(nan);
1731 tcg_temp_free_i64(xth);
1732 tcg_temp_free_i64(xtl);
1733 tcg_temp_free_i64(xbh);
1734 tcg_temp_free_i64(xbl);
1736 #endif
1738 static void gen_xviexpsp(DisasContext *ctx)
1740 TCGv_i64 xth;
1741 TCGv_i64 xtl;
1742 TCGv_i64 xah;
1743 TCGv_i64 xal;
1744 TCGv_i64 xbh;
1745 TCGv_i64 xbl;
1746 TCGv_i64 t0;
1748 if (unlikely(!ctx->vsx_enabled)) {
1749 gen_exception(ctx, POWERPC_EXCP_VSXU);
1750 return;
1752 xth = tcg_temp_new_i64();
1753 xtl = tcg_temp_new_i64();
1754 xah = tcg_temp_new_i64();
1755 xal = tcg_temp_new_i64();
1756 xbh = tcg_temp_new_i64();
1757 xbl = tcg_temp_new_i64();
1758 get_cpu_vsrh(xah, xA(ctx->opcode));
1759 get_cpu_vsrl(xal, xA(ctx->opcode));
1760 get_cpu_vsrh(xbh, xB(ctx->opcode));
1761 get_cpu_vsrl(xbl, xB(ctx->opcode));
1762 t0 = tcg_temp_new_i64();
1764 tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1765 tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1766 tcg_gen_shli_i64(t0, t0, 23);
1767 tcg_gen_or_i64(xth, xth, t0);
1768 set_cpu_vsrh(xT(ctx->opcode), xth);
1769 tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1770 tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1771 tcg_gen_shli_i64(t0, t0, 23);
1772 tcg_gen_or_i64(xtl, xtl, t0);
1773 set_cpu_vsrl(xT(ctx->opcode), xtl);
1775 tcg_temp_free_i64(t0);
1776 tcg_temp_free_i64(xth);
1777 tcg_temp_free_i64(xtl);
1778 tcg_temp_free_i64(xah);
1779 tcg_temp_free_i64(xal);
1780 tcg_temp_free_i64(xbh);
1781 tcg_temp_free_i64(xbl);
1784 static void gen_xviexpdp(DisasContext *ctx)
1786 TCGv_i64 xth;
1787 TCGv_i64 xtl;
1788 TCGv_i64 xah;
1789 TCGv_i64 xal;
1790 TCGv_i64 xbh;
1791 TCGv_i64 xbl;
1793 if (unlikely(!ctx->vsx_enabled)) {
1794 gen_exception(ctx, POWERPC_EXCP_VSXU);
1795 return;
1797 xth = tcg_temp_new_i64();
1798 xtl = tcg_temp_new_i64();
1799 xah = tcg_temp_new_i64();
1800 xal = tcg_temp_new_i64();
1801 xbh = tcg_temp_new_i64();
1802 xbl = tcg_temp_new_i64();
1803 get_cpu_vsrh(xah, xA(ctx->opcode));
1804 get_cpu_vsrl(xal, xA(ctx->opcode));
1805 get_cpu_vsrh(xbh, xB(ctx->opcode));
1806 get_cpu_vsrl(xbl, xB(ctx->opcode));
1808 tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
1809 set_cpu_vsrh(xT(ctx->opcode), xth);
1811 tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
1812 set_cpu_vsrl(xT(ctx->opcode), xtl);
1814 tcg_temp_free_i64(xth);
1815 tcg_temp_free_i64(xtl);
1816 tcg_temp_free_i64(xah);
1817 tcg_temp_free_i64(xal);
1818 tcg_temp_free_i64(xbh);
1819 tcg_temp_free_i64(xbl);
1822 static void gen_xvxexpsp(DisasContext *ctx)
1824 TCGv_i64 xth;
1825 TCGv_i64 xtl;
1826 TCGv_i64 xbh;
1827 TCGv_i64 xbl;
1829 if (unlikely(!ctx->vsx_enabled)) {
1830 gen_exception(ctx, POWERPC_EXCP_VSXU);
1831 return;
1833 xth = tcg_temp_new_i64();
1834 xtl = tcg_temp_new_i64();
1835 xbh = tcg_temp_new_i64();
1836 xbl = tcg_temp_new_i64();
1837 get_cpu_vsrh(xbh, xB(ctx->opcode));
1838 get_cpu_vsrl(xbl, xB(ctx->opcode));
1840 tcg_gen_shri_i64(xth, xbh, 23);
1841 tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
1842 set_cpu_vsrh(xT(ctx->opcode), xth);
1843 tcg_gen_shri_i64(xtl, xbl, 23);
1844 tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
1845 set_cpu_vsrl(xT(ctx->opcode), xtl);
1847 tcg_temp_free_i64(xth);
1848 tcg_temp_free_i64(xtl);
1849 tcg_temp_free_i64(xbh);
1850 tcg_temp_free_i64(xbl);
1853 static void gen_xvxexpdp(DisasContext *ctx)
1855 TCGv_i64 xth;
1856 TCGv_i64 xtl;
1857 TCGv_i64 xbh;
1858 TCGv_i64 xbl;
1860 if (unlikely(!ctx->vsx_enabled)) {
1861 gen_exception(ctx, POWERPC_EXCP_VSXU);
1862 return;
1864 xth = tcg_temp_new_i64();
1865 xtl = tcg_temp_new_i64();
1866 xbh = tcg_temp_new_i64();
1867 xbl = tcg_temp_new_i64();
1868 get_cpu_vsrh(xbh, xB(ctx->opcode));
1869 get_cpu_vsrl(xbl, xB(ctx->opcode));
1871 tcg_gen_extract_i64(xth, xbh, 52, 11);
1872 set_cpu_vsrh(xT(ctx->opcode), xth);
1873 tcg_gen_extract_i64(xtl, xbl, 52, 11);
1874 set_cpu_vsrl(xT(ctx->opcode), xtl);
1876 tcg_temp_free_i64(xth);
1877 tcg_temp_free_i64(xtl);
1878 tcg_temp_free_i64(xbh);
1879 tcg_temp_free_i64(xbl);
1882 GEN_VSX_HELPER_2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
1884 static void gen_xvxsigdp(DisasContext *ctx)
1886 TCGv_i64 xth;
1887 TCGv_i64 xtl;
1888 TCGv_i64 xbh;
1889 TCGv_i64 xbl;
1890 TCGv_i64 t0, zr, nan, exp;
1892 if (unlikely(!ctx->vsx_enabled)) {
1893 gen_exception(ctx, POWERPC_EXCP_VSXU);
1894 return;
1896 xth = tcg_temp_new_i64();
1897 xtl = tcg_temp_new_i64();
1898 xbh = tcg_temp_new_i64();
1899 xbl = tcg_temp_new_i64();
1900 get_cpu_vsrh(xbh, xB(ctx->opcode));
1901 get_cpu_vsrl(xbl, xB(ctx->opcode));
1902 exp = tcg_temp_new_i64();
1903 t0 = tcg_temp_new_i64();
1904 zr = tcg_const_i64(0);
1905 nan = tcg_const_i64(2047);
1907 tcg_gen_extract_i64(exp, xbh, 52, 11);
1908 tcg_gen_movi_i64(t0, 0x0010000000000000);
1909 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1910 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1911 tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
1912 set_cpu_vsrh(xT(ctx->opcode), xth);
1914 tcg_gen_extract_i64(exp, xbl, 52, 11);
1915 tcg_gen_movi_i64(t0, 0x0010000000000000);
1916 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1917 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1918 tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
1919 set_cpu_vsrl(xT(ctx->opcode), xtl);
1921 tcg_temp_free_i64(t0);
1922 tcg_temp_free_i64(exp);
1923 tcg_temp_free_i64(zr);
1924 tcg_temp_free_i64(nan);
1925 tcg_temp_free_i64(xth);
1926 tcg_temp_free_i64(xtl);
1927 tcg_temp_free_i64(xbh);
1928 tcg_temp_free_i64(xbl);
1931 #undef GEN_XX2FORM
1932 #undef GEN_XX3FORM
1933 #undef GEN_XX2IFORM
1934 #undef GEN_XX3_RC_FORM
1935 #undef GEN_XX3FORM_DM
1936 #undef VSX_LOGICAL