2 ;; Copyright (C) 2002-2024 Free Software Foundation, Inc.
3 ;; Contributed by Aldy Hernandez (aldy@quesejoda.com)
5 ;; This file is part of GCC.
7 ;; GCC is free software; you can redistribute it and/or modify it
8 ;; under the terms of the GNU General Public License as published
9 ;; by the Free Software Foundation; either version 3, or (at your
10 ;; option) any later version.
12 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
13 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 ;; License for more details.
17 ;; You should have received a copy of the GNU General Public License
18 ;; along with GCC; see the file COPYING3. If not see
19 ;; <http://www.gnu.org/licenses/>.
21 (define_c_enum "unspec"
51 UNSPEC_VPACK_SIGN_SIGN_SAT
52 UNSPEC_VPACK_SIGN_UNS_SAT
53 UNSPEC_VPACK_UNS_UNS_SAT
54 UNSPEC_VPACK_UNS_UNS_MOD
55 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT
77 UNSPEC_VUNPACK_HI_SIGN
78 UNSPEC_VUNPACK_LO_SIGN
79 UNSPEC_VUNPACK_HI_SIGN_DIRECT
80 UNSPEC_VUNPACK_LO_SIGN_DIRECT
83 UNSPEC_CONVERT_4F32_8I16
84 UNSPEC_CONVERT_4F32_8F16
146 UNSPEC_VSUMSWS_DIRECT
176 (define_c_enum "unspecv"
184 ;; Short vec int modes
185 (define_mode_iterator VIshort [V8HI V16QI])
187 (define_mode_iterator VF [V4SF])
188 ;; Vec modes, pity mode iterators are not composable
189 (define_mode_iterator V [V4SI V8HI V16QI V4SF])
190 ;; Vec modes for move/logical/permute ops, include vector types for move not
191 ;; otherwise handled by altivec (v2df, v2di, ti)
192 (define_mode_iterator VM [V4SI
200 (KF "FLOAT128_VECTOR_P (KFmode)")
201 (TF "FLOAT128_VECTOR_P (TFmode)")])
203 ;; Like VM, except don't do TImode
204 (define_mode_iterator VM2 [V4SI
211 (KF "FLOAT128_VECTOR_P (KFmode)")
212 (TF "FLOAT128_VECTOR_P (TFmode)")])
214 ;; Map the Vector convert single precision to double precision for integer
215 ;; versus floating point
216 (define_mode_attr VS_sxwsp [(V4SI "sxw") (V4SF "sp")])
218 ;; Specific iterator for parity which does not have a byte/half-word form, but
219 ;; does have a quad word form
220 (define_mode_iterator VParity [V4SI
225 (define_mode_attr VI_char [(V2DI "d") (V4SI "w") (V8HI "h") (V16QI "b")])
226 (define_mode_attr VI_scalar [(V2DI "DI") (V4SI "SI") (V8HI "HI") (V16QI "QI")])
227 (define_mode_attr VI_unit [(V16QI "VECTOR_UNIT_ALTIVEC_P (V16QImode)")
228 (V8HI "VECTOR_UNIT_ALTIVEC_P (V8HImode)")
229 (V4SI "VECTOR_UNIT_ALTIVEC_P (V4SImode)")
230 (V2DI "VECTOR_UNIT_P8_VECTOR_P (V2DImode)")
231 (V1TI "VECTOR_UNIT_ALTIVEC_P (V1TImode)")])
233 ;; Vector pack/unpack
234 (define_mode_iterator VP [V2DI V4SI V8HI])
235 (define_mode_attr VP_small [(V2DI "V4SI") (V4SI "V8HI") (V8HI "V16QI")])
236 (define_mode_attr VP_small_lc [(V2DI "v4si") (V4SI "v8hi") (V8HI "v16qi")])
237 (define_mode_attr VU_char [(V2DI "w") (V4SI "h") (V8HI "b")])
240 (define_mode_iterator VNEG [V4SI V2DI])
242 ;; Vector move instructions.
243 (define_insn "*altivec_mov<mode>"
244 [(set (match_operand:VM2 0 "nonimmediate_operand" "=Z,v,v,?Y,?*r,?*r,v,v,?*r")
245 (match_operand:VM2 1 "input_operand" "v,Z,v,*r,Y,*r,j,W,W"))]
246 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)
247 && (register_operand (operands[0], <MODE>mode)
248 || register_operand (operands[1], <MODE>mode))"
257 * return output_vec_const_move (operands);
259 [(set_attr "type" "vecstore,vecload,veclogical,store,load,*,veclogical,*,*")
260 (set_attr "length" "*,*,*,20,20,20,*,8,32")])
262 ;; Unlike other altivec moves, allow the GPRs, since a normal use of TImode
263 ;; is for unions. However for plain data movement, slightly favor the vector
265 (define_insn "*altivec_movti"
266 [(set (match_operand:TI 0 "nonimmediate_operand" "=Z,v,v,?Y,?r,?r,v,v")
267 (match_operand:TI 1 "input_operand" "v,Z,v,r,Y,r,j,W"))]
268 "VECTOR_MEM_ALTIVEC_P (TImode)
269 && (register_operand (operands[0], TImode)
270 || register_operand (operands[1], TImode))"
279 * return output_vec_const_move (operands);"
280 [(set_attr "type" "vecstore,vecload,veclogical,store,load,*,veclogical,*")])
282 ;; Load up a vector with the most significant bit set by loading up -1 and
283 ;; doing a shift left
285 [(set (match_operand:VM 0 "altivec_register_operand")
286 (match_operand:VM 1 "easy_vector_constant_msb"))]
287 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
290 rtx dest = operands[0];
295 switch (easy_altivec_constant (operands[1], <MODE>mode))
309 if (mode != <MODE>mode)
310 dest = gen_lowpart (mode, dest);
312 num_elements = GET_MODE_NUNITS (mode);
313 v = rtvec_alloc (num_elements);
314 for (i = 0; i < num_elements; i++)
315 RTVEC_ELT (v, i) = constm1_rtx;
317 rs6000_expand_vector_init (dest, gen_rtx_PARALLEL (mode, v));
318 emit_insn (gen_rtx_SET (dest, gen_rtx_ASHIFT (mode, dest, dest)));
323 [(set (match_operand:VM 0 "altivec_register_operand")
324 (match_operand:VM 1 "easy_vector_constant_add_self"))]
325 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
326 [(set (match_dup 0) (match_dup 3))
327 (set (match_dup 0) (match_dup 4))]
329 rtx dup = gen_easy_altivec_constant (operands[1]);
331 machine_mode op_mode = <MODE>mode;
333 /* Divide the operand of the resulting VEC_DUPLICATE, and use
334 simplify_rtx to make a CONST_VECTOR. */
335 XEXP (dup, 0) = simplify_const_binary_operation (ASHIFTRT, QImode,
336 XEXP (dup, 0), const1_rtx);
337 const_vec = simplify_rtx (dup);
339 if (op_mode == V4SFmode)
342 operands[0] = gen_lowpart (op_mode, operands[0]);
344 if (GET_MODE (const_vec) == op_mode)
345 operands[3] = const_vec;
347 operands[3] = gen_lowpart (op_mode, const_vec);
348 operands[4] = gen_rtx_PLUS (op_mode, operands[0], operands[0]);
352 [(set (match_operand:VM 0 "altivec_register_operand")
353 (match_operand:VM 1 "easy_vector_constant_vsldoi"))]
354 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && can_create_pseudo_p ()"
355 [(set (match_dup 2) (match_dup 3))
356 (set (match_dup 4) (match_dup 5))
358 (unspec:VM [(match_dup 2)
363 rtx op1 = operands[1];
364 int elt = (BYTES_BIG_ENDIAN) ? 0 : GET_MODE_NUNITS (<MODE>mode) - 1;
365 HOST_WIDE_INT val = const_vector_elt_as_int (op1, elt);
366 rtx rtx_val = GEN_INT (val);
367 int shift = vspltis_shifted (op1);
369 gcc_assert (shift != 0);
370 operands[2] = gen_reg_rtx (<MODE>mode);
371 operands[3] = gen_const_vec_duplicate (<MODE>mode, rtx_val);
372 operands[4] = gen_reg_rtx (<MODE>mode);
376 operands[5] = CONSTM1_RTX (<MODE>mode);
377 operands[6] = GEN_INT (-shift);
381 operands[5] = CONST0_RTX (<MODE>mode);
382 operands[6] = GEN_INT (shift);
386 (define_insn_and_split "sldoi_to_mov<mode>"
387 [(set (match_operand:VM 0 "altivec_register_operand")
388 (unspec:VM [(match_operand:VM 1 "const_vector_each_byte_same")
390 (match_operand:QI 2 "u5bit_cint_operand")]
392 "VECTOR_MEM_ALTIVEC_OR_VSX_P (<MODE>mode) && can_create_pseudo_p ()"
395 [(set (match_dup 0) (match_dup 1))]
397 if (!easy_vector_constant (operands[1], <MODE>mode))
399 rtx dest = gen_reg_rtx (<MODE>mode);
400 emit_move_insn (dest, operands[1]);
405 (define_insn "get_vrsave_internal"
406 [(set (match_operand:SI 0 "register_operand" "=r")
407 (unspec:SI [(reg:SI VRSAVE_REGNO)] UNSPEC_GET_VRSAVE))]
411 return "mfspr %0,256";
413 return "mfvrsave %0";
415 [(set_attr "type" "*")])
417 (define_insn "*set_vrsave_internal"
418 [(match_parallel 0 "vrsave_operation"
419 [(set (reg:SI VRSAVE_REGNO)
420 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "r")
421 (reg:SI VRSAVE_REGNO)] UNSPECV_SET_VRSAVE))])]
425 return "mtspr 256,%1";
427 return "mtvrsave %1";
429 [(set_attr "type" "*")])
431 (define_insn "*save_world"
432 [(match_parallel 0 "save_world_operation"
433 [(clobber (reg:SI LR_REGNO))
434 (use (match_operand:SI 1 "call_operand" "s"))])]
435 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
437 [(set_attr "type" "branch")])
439 (define_insn "*restore_world"
440 [(match_parallel 0 "restore_world_operation"
442 (use (match_operand:SI 1 "call_operand" "s"))
443 (clobber (match_operand:SI 2 "gpc_reg_operand" "=r"))])]
444 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
447 ;; The save_vregs and restore_vregs patterns don't use memory_operand
448 ;; because (plus (reg) (const_int)) is not a valid vector address.
449 ;; This way is more compact than describing exactly what happens in
450 ;; the out-of-line functions, ie. loading the constant into r11/r12
451 ;; then using indexed addressing, and requires less editing of rtl
452 ;; to describe the operation to dwarf2out_frame_debug_expr.
453 (define_insn "*save_vregs_<mode>_r11"
454 [(match_parallel 0 "any_parallel_operand"
455 [(clobber (reg:P LR_REGNO))
456 (use (match_operand:P 1 "symbol_ref_operand" "s"))
459 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
460 (match_operand:P 3 "short_cint_operand" "I")))
461 (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
464 [(set_attr "type" "branch")])
466 (define_insn "*save_vregs_<mode>_r12"
467 [(match_parallel 0 "any_parallel_operand"
468 [(clobber (reg:P LR_REGNO))
469 (use (match_operand:P 1 "symbol_ref_operand" "s"))
472 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
473 (match_operand:P 3 "short_cint_operand" "I")))
474 (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
477 [(set_attr "type" "branch")])
479 (define_insn "*restore_vregs_<mode>_r11"
480 [(match_parallel 0 "any_parallel_operand"
481 [(clobber (reg:P LR_REGNO))
482 (use (match_operand:P 1 "symbol_ref_operand" "s"))
485 (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
486 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
487 (match_operand:P 4 "short_cint_operand" "I"))))])]
490 [(set_attr "type" "branch")])
492 (define_insn "*restore_vregs_<mode>_r12"
493 [(match_parallel 0 "any_parallel_operand"
494 [(clobber (reg:P LR_REGNO))
495 (use (match_operand:P 1 "symbol_ref_operand" "s"))
498 (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
499 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
500 (match_operand:P 4 "short_cint_operand" "I"))))])]
503 [(set_attr "type" "branch")])
505 ;; Simple binary operations.
508 (define_insn "add<mode>3"
509 [(set (match_operand:VI2 0 "register_operand" "=v")
510 (plus:VI2 (match_operand:VI2 1 "register_operand" "v")
511 (match_operand:VI2 2 "register_operand" "v")))]
513 "vaddu<VI_char>m %0,%1,%2"
514 [(set_attr "type" "vecsimple")])
516 (define_insn "*altivec_addv4sf3"
517 [(set (match_operand:V4SF 0 "register_operand" "=v")
518 (plus:V4SF (match_operand:V4SF 1 "register_operand" "v")
519 (match_operand:V4SF 2 "register_operand" "v")))]
520 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
522 [(set_attr "type" "vecfloat")])
524 (define_insn "altivec_vaddcuw"
525 [(set (match_operand:V4SI 0 "register_operand" "=v")
526 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
527 (match_operand:V4SI 2 "register_operand" "v")]
529 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
531 [(set_attr "type" "vecsimple")])
533 (define_insn "altivec_vaddu<VI_char>s"
534 [(set (match_operand:VI 0 "register_operand" "=v")
535 (us_plus:VI (match_operand:VI 1 "register_operand" "v")
536 (match_operand:VI 2 "register_operand" "v")))
537 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
539 "vaddu<VI_char>s %0,%1,%2"
540 [(set_attr "type" "vecsimple")])
542 (define_insn "altivec_vadds<VI_char>s"
543 [(set (match_operand:VI 0 "register_operand" "=v")
544 (ss_plus:VI (match_operand:VI 1 "register_operand" "v")
545 (match_operand:VI 2 "register_operand" "v")))
546 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
547 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
548 "vadds<VI_char>s %0,%1,%2"
549 [(set_attr "type" "vecsimple")])
552 (define_insn "sub<mode>3"
553 [(set (match_operand:VI2 0 "register_operand" "=v")
554 (minus:VI2 (match_operand:VI2 1 "register_operand" "v")
555 (match_operand:VI2 2 "register_operand" "v")))]
557 "vsubu<VI_char>m %0,%1,%2"
558 [(set_attr "type" "vecsimple")])
560 (define_insn "*altivec_subv4sf3"
561 [(set (match_operand:V4SF 0 "register_operand" "=v")
562 (minus:V4SF (match_operand:V4SF 1 "register_operand" "v")
563 (match_operand:V4SF 2 "register_operand" "v")))]
564 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
566 [(set_attr "type" "vecfloat")])
568 (define_insn "altivec_vsubcuw"
569 [(set (match_operand:V4SI 0 "register_operand" "=v")
570 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
571 (match_operand:V4SI 2 "register_operand" "v")]
573 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
575 [(set_attr "type" "vecsimple")])
577 (define_insn "altivec_vsubu<VI_char>s"
578 [(set (match_operand:VI 0 "register_operand" "=v")
579 (us_minus:VI (match_operand:VI 1 "register_operand" "v")
580 (match_operand:VI 2 "register_operand" "v")))
581 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
582 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
583 "vsubu<VI_char>s %0,%1,%2"
584 [(set_attr "type" "vecsimple")])
586 (define_insn "altivec_vsubs<VI_char>s"
587 [(set (match_operand:VI 0 "register_operand" "=v")
588 (ss_minus:VI (match_operand:VI 1 "register_operand" "v")
589 (match_operand:VI 2 "register_operand" "v")))
590 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
591 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
592 "vsubs<VI_char>s %0,%1,%2"
593 [(set_attr "type" "vecsimple")])
596 (define_insn "uavg<mode>3_ceil"
597 [(set (match_operand:VI 0 "register_operand" "=v")
598 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
599 (match_operand:VI 2 "register_operand" "v")]
602 "vavgu<VI_char> %0,%1,%2"
603 [(set_attr "type" "vecsimple")])
605 (define_insn "avg<mode>3_ceil"
606 [(set (match_operand:VI 0 "register_operand" "=v")
607 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
608 (match_operand:VI 2 "register_operand" "v")]
610 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
611 "vavgs<VI_char> %0,%1,%2"
612 [(set_attr "type" "vecsimple")])
614 (define_insn "altivec_vcmpbfp"
615 [(set (match_operand:V4SI 0 "register_operand" "=v")
616 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
617 (match_operand:V4SF 2 "register_operand" "v")]
619 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
621 [(set_attr "type" "veccmp")])
623 (define_insn "altivec_eqv1ti"
624 [(set (match_operand:V1TI 0 "altivec_register_operand" "=v")
625 (eq:V1TI (match_operand:V1TI 1 "altivec_register_operand" "v")
626 (match_operand:V1TI 2 "altivec_register_operand" "v")))]
629 [(set_attr "type" "veccmpfx")])
631 (define_insn "altivec_eq<mode>"
632 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
633 (eq:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
634 (match_operand:VI2 2 "altivec_register_operand" "v")))]
636 "vcmpequ<VI_char> %0,%1,%2"
637 [(set_attr "type" "veccmpfx")])
639 (define_insn "*altivec_gt<mode>"
640 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
641 (gt:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
642 (match_operand:VI2 2 "altivec_register_operand" "v")))]
644 "vcmpgts<VI_char> %0,%1,%2"
645 [(set_attr "type" "veccmpfx")])
647 (define_insn "*altivec_gtv1ti"
648 [(set (match_operand:V1TI 0 "altivec_register_operand" "=v")
649 (gt:V1TI (match_operand:V1TI 1 "altivec_register_operand" "v")
650 (match_operand:V1TI 2 "altivec_register_operand" "v")))]
653 [(set_attr "type" "veccmpfx")])
655 (define_insn "*altivec_gtu<mode>"
656 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
657 (gtu:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
658 (match_operand:VI2 2 "altivec_register_operand" "v")))]
660 "vcmpgtu<VI_char> %0,%1,%2"
661 [(set_attr "type" "veccmpfx")])
663 (define_insn "*altivec_gtuv1ti"
664 [(set (match_operand:V1TI 0 "altivec_register_operand" "=v")
665 (gtu:V1TI (match_operand:V1TI 1 "altivec_register_operand" "v")
666 (match_operand:V1TI 2 "altivec_register_operand" "v")))]
669 [(set_attr "type" "veccmpfx")])
671 (define_insn "*altivec_eqv4sf"
672 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
673 (eq:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
674 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
675 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
677 [(set_attr "type" "veccmp")])
679 (define_insn "*altivec_gtv4sf"
680 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
681 (gt:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
682 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
683 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
685 [(set_attr "type" "veccmp")])
687 (define_insn "*altivec_gev4sf"
688 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
689 (ge:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
690 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
691 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
693 [(set_attr "type" "veccmp")])
695 (define_insn "altivec_vsel<mode>"
696 [(set (match_operand:VM 0 "register_operand" "=wa,v")
699 (not:VM (match_operand:VM 3 "register_operand" "wa,v"))
700 (match_operand:VM 1 "register_operand" "wa,v"))
703 (match_operand:VM 2 "register_operand" "wa,v"))))]
704 "VECTOR_MEM_ALTIVEC_OR_VSX_P (<MODE>mode)"
706 xxsel %x0,%x1,%x2,%x3
708 [(set_attr "type" "vecmove")
709 (set_attr "isa" "<VSisa>")])
711 (define_insn "altivec_vsel<mode>2"
712 [(set (match_operand:VM 0 "register_operand" "=wa,v")
715 (not:VM (match_operand:VM 3 "register_operand" "wa,v"))
716 (match_operand:VM 1 "register_operand" "wa,v"))
718 (match_operand:VM 2 "register_operand" "wa,v")
720 "VECTOR_MEM_ALTIVEC_OR_VSX_P (<MODE>mode)"
722 xxsel %x0,%x1,%x2,%x3
724 [(set_attr "type" "vecmove")
725 (set_attr "isa" "<VSisa>")])
727 (define_insn "altivec_vsel<mode>3"
728 [(set (match_operand:VM 0 "register_operand" "=wa,v")
731 (match_operand:VM 3 "register_operand" "wa,v")
732 (match_operand:VM 1 "register_operand" "wa,v"))
734 (not:VM (match_dup 3))
735 (match_operand:VM 2 "register_operand" "wa,v"))))]
736 "VECTOR_MEM_ALTIVEC_OR_VSX_P (<MODE>mode)"
738 xxsel %x0,%x2,%x1,%x3
740 [(set_attr "type" "vecmove")
741 (set_attr "isa" "<VSisa>")])
743 (define_insn "altivec_vsel<mode>4"
744 [(set (match_operand:VM 0 "register_operand" "=wa,v")
747 (match_operand:VM 1 "register_operand" "wa,v")
748 (match_operand:VM 3 "register_operand" "wa,v"))
750 (not:VM (match_dup 3))
751 (match_operand:VM 2 "register_operand" "wa,v"))))]
752 "VECTOR_MEM_ALTIVEC_OR_VSX_P (<MODE>mode)"
754 xxsel %x0,%x2,%x1,%x3
756 [(set_attr "type" "vecmove")
757 (set_attr "isa" "<VSisa>")])
759 ;; Fused multiply add.
761 (define_insn "*altivec_fmav4sf4"
762 [(set (match_operand:V4SF 0 "register_operand" "=v")
763 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
764 (match_operand:V4SF 2 "register_operand" "v")
765 (match_operand:V4SF 3 "register_operand" "v")))]
766 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
767 "vmaddfp %0,%1,%2,%3"
768 [(set_attr "type" "vecfloat")])
770 ;; We do multiply as a fused multiply-add with an add of a -0.0 vector.
772 (define_expand "altivec_mulv4sf3"
773 [(set (match_operand:V4SF 0 "register_operand")
774 (fma:V4SF (match_operand:V4SF 1 "register_operand")
775 (match_operand:V4SF 2 "register_operand")
777 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
781 /* Generate [-0.0, -0.0, -0.0, -0.0]. */
782 neg0 = gen_reg_rtx (V4SImode);
783 emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
784 emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
786 operands[3] = gen_lowpart (V4SFmode, neg0);
789 ;; 32-bit integer multiplication
790 ;; A_high = Operand_0 & 0xFFFF0000 >> 16
791 ;; A_low = Operand_0 & 0xFFFF
792 ;; B_high = Operand_1 & 0xFFFF0000 >> 16
793 ;; B_low = Operand_1 & 0xFFFF
794 ;; result = A_low * B_low + (A_high * B_low + B_high * A_low) << 16
796 ;; (define_insn "mulv4si3"
797 ;; [(set (match_operand:V4SI 0 "register_operand" "=v")
798 ;; (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
799 ;; (match_operand:V4SI 2 "register_operand" "v")))]
800 (define_insn "mulv4si3_p8"
801 [(set (match_operand:V4SI 0 "register_operand" "=v")
802 (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
803 (match_operand:V4SI 2 "register_operand" "v")))]
806 [(set_attr "type" "veccomplex")])
808 (define_expand "mulv4si3"
809 [(use (match_operand:V4SI 0 "register_operand"))
810 (use (match_operand:V4SI 1 "register_operand"))
811 (use (match_operand:V4SI 2 "register_operand"))]
823 if (TARGET_P8_VECTOR)
825 emit_insn (gen_mulv4si3_p8 (operands[0], operands[1], operands[2]));
829 zero = gen_reg_rtx (V4SImode);
830 emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
832 sixteen = gen_reg_rtx (V4SImode);
833 emit_insn (gen_altivec_vspltisw (sixteen, gen_rtx_CONST_INT (V4SImode, -16)));
835 swap = gen_reg_rtx (V4SImode);
836 emit_insn (gen_vrotlv4si3 (swap, operands[2], sixteen));
838 one = gen_reg_rtx (V8HImode);
839 convert_move (one, operands[1], 0);
841 two = gen_reg_rtx (V8HImode);
842 convert_move (two, operands[2], 0);
844 small_swap = gen_reg_rtx (V8HImode);
845 convert_move (small_swap, swap, 0);
847 low_product = gen_reg_rtx (V4SImode);
848 emit_insn (gen_altivec_vmulouh (low_product, one, two));
850 high_product = gen_reg_rtx (V4SImode);
851 emit_insn (gen_altivec_vmsumuhm (high_product, one, small_swap, zero));
853 emit_insn (gen_vashlv4si3 (high_product, high_product, sixteen));
855 emit_insn (gen_addv4si3 (operands[0], high_product, low_product));
860 (define_expand "mulv8hi3"
861 [(use (match_operand:V8HI 0 "register_operand"))
862 (use (match_operand:V8HI 1 "register_operand"))
863 (use (match_operand:V8HI 2 "register_operand"))]
866 rtx zero = gen_reg_rtx (V8HImode);
868 emit_insn (gen_altivec_vspltish (zero, const0_rtx));
869 emit_insn (gen_fmav8hi4 (operands[0], operands[1], operands[2], zero));
874 ;; Map UNSPEC_SLDB to "l" and UNSPEC_SRDB to "r".
875 (define_int_attr SLDB_lr [(UNSPEC_SLDB "l")
878 (define_int_iterator VSHIFT_DBL_LR [UNSPEC_SLDB UNSPEC_SRDB])
880 (define_insn "vs<SLDB_lr>db_<mode>"
881 [(set (match_operand:VEC_IC 0 "register_operand" "=v")
882 (unspec:VEC_IC [(match_operand:VEC_IC 1 "register_operand" "v")
883 (match_operand:VEC_IC 2 "register_operand" "v")
884 (match_operand:QI 3 "const_0_to_12_operand" "n")]
887 "vs<SLDB_lr>dbi %0,%1,%2,%3"
888 [(set_attr "type" "vecsimple")])
890 (define_expand "vstrir_<mode>"
891 [(set (match_operand:VIshort 0 "altivec_register_operand")
892 (unspec:VIshort [(match_operand:VIshort 1 "altivec_register_operand")]
896 if (BYTES_BIG_ENDIAN)
897 emit_insn (gen_vstrir_direct_<mode> (operands[0], operands[1]));
899 emit_insn (gen_vstril_direct_<mode> (operands[0], operands[1]));
903 (define_insn "vstrir_direct_<mode>"
904 [(set (match_operand:VIshort 0 "altivec_register_operand" "=v")
906 [(match_operand:VIshort 1 "altivec_register_operand" "v")]
910 [(set_attr "type" "vecsimple")])
912 ;; This expands into same code as vstrir<mode> followed by condition logic
913 ;; so that a single vstribr. or vstrihr. or vstribl. or vstrihl. instruction
914 ;; can, for example, satisfy the needs of a vec_strir () function paired
915 ;; with a vec_strir_p () function if both take the same incoming arguments.
916 (define_expand "vstrir_p_<mode>"
917 [(match_operand:SI 0 "gpc_reg_operand")
918 (match_operand:VIshort 1 "altivec_register_operand")]
921 rtx scratch = gen_reg_rtx (<MODE>mode);
922 if (BYTES_BIG_ENDIAN)
923 emit_insn (gen_vstrir_p_direct_<mode> (scratch, operands[1]));
925 emit_insn (gen_vstril_p_direct_<mode> (scratch, operands[1]));
926 emit_insn (gen_cr6_test_for_zero (operands[0]));
930 (define_insn "vstrir_p_direct_<mode>"
931 [(set (match_operand:VIshort 0 "altivec_register_operand" "=v")
933 [(match_operand:VIshort 1 "altivec_register_operand" "v")]
935 (set (reg:CC CR6_REGNO)
936 (unspec:CC [(match_dup 1)]
940 [(set_attr "type" "vecsimple")])
942 (define_expand "vstril_<mode>"
943 [(set (match_operand:VIshort 0 "altivec_register_operand")
944 (unspec:VIshort [(match_operand:VIshort 1 "altivec_register_operand")]
948 if (BYTES_BIG_ENDIAN)
949 emit_insn (gen_vstril_direct_<mode> (operands[0], operands[1]));
951 emit_insn (gen_vstrir_direct_<mode> (operands[0], operands[1]));
955 (define_insn "vstril_direct_<mode>"
956 [(set (match_operand:VIshort 0 "altivec_register_operand" "=v")
958 [(match_operand:VIshort 1 "altivec_register_operand" "v")]
962 [(set_attr "type" "vecsimple")])
964 ;; This expands into same code as vstril_<mode> followed by condition logic
965 ;; so that a single vstribr. or vstrihr. or vstribl. or vstrihl. instruction
966 ;; can, for example, satisfy the needs of a vec_stril () function paired
967 ;; with a vec_stril_p () function if both take the same incoming arguments.
968 (define_expand "vstril_p_<mode>"
969 [(match_operand:SI 0 "gpc_reg_operand")
970 (match_operand:VIshort 1 "altivec_register_operand")]
973 rtx scratch = gen_reg_rtx (<MODE>mode);
974 if (BYTES_BIG_ENDIAN)
975 emit_insn (gen_vstril_p_direct_<mode> (scratch, operands[1]));
977 emit_insn (gen_vstrir_p_direct_<mode> (scratch, operands[1]));
978 emit_insn (gen_cr6_test_for_zero (operands[0]));
982 (define_insn "vstril_p_direct_<mode>"
983 [(set (match_operand:VIshort 0 "altivec_register_operand" "=v")
985 [(match_operand:VIshort 1 "altivec_register_operand" "v")]
987 (set (reg:CC CR6_REGNO)
988 (unspec:CC [(match_dup 1)]
992 [(set_attr "type" "vecsimple")])
994 ;; Fused multiply subtract
995 (define_insn "*altivec_vnmsubfp"
996 [(set (match_operand:V4SF 0 "register_operand" "=v")
998 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
999 (match_operand:V4SF 2 "register_operand" "v")
1001 (match_operand:V4SF 3 "register_operand" "v")))))]
1002 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1003 "vnmsubfp %0,%1,%2,%3"
1004 [(set_attr "type" "vecfloat")])
1006 (define_insn "altivec_vmsumu<VI_char>m"
1007 [(set (match_operand:V4SI 0 "register_operand" "=v")
1008 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
1009 (match_operand:VIshort 2 "register_operand" "v")
1010 (match_operand:V4SI 3 "register_operand" "v")]
1013 "vmsumu<VI_char>m %0,%1,%2,%3"
1014 [(set_attr "type" "veccomplex")])
1016 (define_insn "altivec_vmsumudm"
1017 [(set (match_operand:V1TI 0 "register_operand" "=v")
1018 (unspec:V1TI [(match_operand:V2DI 1 "register_operand" "v")
1019 (match_operand:V2DI 2 "register_operand" "v")
1020 (match_operand:V1TI 3 "register_operand" "v")]
1023 "vmsumudm %0,%1,%2,%3"
1024 [(set_attr "type" "veccomplex")])
1026 (define_insn "altivec_vmsumm<VI_char>m"
1027 [(set (match_operand:V4SI 0 "register_operand" "=v")
1028 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
1029 (match_operand:VIshort 2 "register_operand" "v")
1030 (match_operand:V4SI 3 "register_operand" "v")]
1033 "vmsumm<VI_char>m %0,%1,%2,%3"
1034 [(set_attr "type" "veccomplex")])
1036 (define_insn "altivec_vmsumshm"
1037 [(set (match_operand:V4SI 0 "register_operand" "=v")
1038 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1039 (match_operand:V8HI 2 "register_operand" "v")
1040 (match_operand:V4SI 3 "register_operand" "v")]
1043 "vmsumshm %0,%1,%2,%3"
1044 [(set_attr "type" "veccomplex")])
1046 (define_insn "altivec_vmsumuhs"
1047 [(set (match_operand:V4SI 0 "register_operand" "=v")
1048 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1049 (match_operand:V8HI 2 "register_operand" "v")
1050 (match_operand:V4SI 3 "register_operand" "v")]
1052 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1054 "vmsumuhs %0,%1,%2,%3"
1055 [(set_attr "type" "veccomplex")])
1057 (define_insn "altivec_vmsumshs"
1058 [(set (match_operand:V4SI 0 "register_operand" "=v")
1059 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1060 (match_operand:V8HI 2 "register_operand" "v")
1061 (match_operand:V4SI 3 "register_operand" "v")]
1063 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1065 "vmsumshs %0,%1,%2,%3"
1066 [(set_attr "type" "veccomplex")])
1070 (define_insn "umax<mode>3"
1071 [(set (match_operand:VI2 0 "register_operand" "=v")
1072 (umax:VI2 (match_operand:VI2 1 "register_operand" "v")
1073 (match_operand:VI2 2 "register_operand" "v")))]
1075 "vmaxu<VI_char> %0,%1,%2"
1076 [(set_attr "type" "vecsimple")])
1078 (define_insn "smax<mode>3"
1079 [(set (match_operand:VI2 0 "register_operand" "=v")
1080 (smax:VI2 (match_operand:VI2 1 "register_operand" "v")
1081 (match_operand:VI2 2 "register_operand" "v")))]
1083 "vmaxs<VI_char> %0,%1,%2"
1084 [(set_attr "type" "vecsimple")])
1086 (define_insn "*altivec_smaxv4sf3"
1087 [(set (match_operand:V4SF 0 "register_operand" "=v")
1088 (smax:V4SF (match_operand:V4SF 1 "register_operand" "v")
1089 (match_operand:V4SF 2 "register_operand" "v")))]
1090 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1092 [(set_attr "type" "veccmp")])
1094 (define_insn "umin<mode>3"
1095 [(set (match_operand:VI2 0 "register_operand" "=v")
1096 (umin:VI2 (match_operand:VI2 1 "register_operand" "v")
1097 (match_operand:VI2 2 "register_operand" "v")))]
1099 "vminu<VI_char> %0,%1,%2"
1100 [(set_attr "type" "vecsimple")])
1102 (define_insn "smin<mode>3"
1103 [(set (match_operand:VI2 0 "register_operand" "=v")
1104 (smin:VI2 (match_operand:VI2 1 "register_operand" "v")
1105 (match_operand:VI2 2 "register_operand" "v")))]
1107 "vmins<VI_char> %0,%1,%2"
1108 [(set_attr "type" "vecsimple")])
1110 (define_insn "*altivec_sminv4sf3"
1111 [(set (match_operand:V4SF 0 "register_operand" "=v")
1112 (smin:V4SF (match_operand:V4SF 1 "register_operand" "v")
1113 (match_operand:V4SF 2 "register_operand" "v")))]
1114 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1116 [(set_attr "type" "veccmp")])
1118 (define_insn "altivec_vmhaddshs"
1119 [(set (match_operand:V8HI 0 "register_operand" "=v")
1120 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1121 (match_operand:V8HI 2 "register_operand" "v")
1122 (match_operand:V8HI 3 "register_operand" "v")]
1124 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1126 "vmhaddshs %0,%1,%2,%3"
1127 [(set_attr "type" "veccomplex")])
1129 (define_insn "altivec_vmhraddshs"
1130 [(set (match_operand:V8HI 0 "register_operand" "=v")
1131 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1132 (match_operand:V8HI 2 "register_operand" "v")
1133 (match_operand:V8HI 3 "register_operand" "v")]
1135 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1137 "vmhraddshs %0,%1,%2,%3"
1138 [(set_attr "type" "veccomplex")])
1140 (define_insn "fmav8hi4"
1141 [(set (match_operand:V8HI 0 "register_operand" "=v")
1142 (plus:V8HI (mult:V8HI (match_operand:V8HI 1 "register_operand" "v")
1143 (match_operand:V8HI 2 "register_operand" "v"))
1144 (match_operand:V8HI 3 "register_operand" "v")))]
1146 "vmladduhm %0,%1,%2,%3"
1147 [(set_attr "type" "veccomplex")])
1149 (define_expand "altivec_vmrghb"
1150 [(use (match_operand:V16QI 0 "register_operand"))
1151 (use (match_operand:V16QI 1 "register_operand"))
1152 (use (match_operand:V16QI 2 "register_operand"))]
1155 if (BYTES_BIG_ENDIAN)
1157 gen_altivec_vmrghb_direct_be (operands[0], operands[1], operands[2]));
1160 gen_altivec_vmrglb_direct_le (operands[0], operands[2], operands[1]));
1164 (define_insn "altivec_vmrghb_direct_be"
1165 [(set (match_operand:V16QI 0 "register_operand" "=v")
1168 (match_operand:V16QI 1 "register_operand" "v")
1169 (match_operand:V16QI 2 "register_operand" "v"))
1170 (parallel [(const_int 0) (const_int 16)
1171 (const_int 1) (const_int 17)
1172 (const_int 2) (const_int 18)
1173 (const_int 3) (const_int 19)
1174 (const_int 4) (const_int 20)
1175 (const_int 5) (const_int 21)
1176 (const_int 6) (const_int 22)
1177 (const_int 7) (const_int 23)])))]
1178 "TARGET_ALTIVEC && BYTES_BIG_ENDIAN"
1180 [(set_attr "type" "vecperm")])
1182 (define_insn "altivec_vmrghb_direct_le"
1183 [(set (match_operand:V16QI 0 "register_operand" "=v")
1186 (match_operand:V16QI 2 "register_operand" "v")
1187 (match_operand:V16QI 1 "register_operand" "v"))
1188 (parallel [(const_int 8) (const_int 24)
1189 (const_int 9) (const_int 25)
1190 (const_int 10) (const_int 26)
1191 (const_int 11) (const_int 27)
1192 (const_int 12) (const_int 28)
1193 (const_int 13) (const_int 29)
1194 (const_int 14) (const_int 30)
1195 (const_int 15) (const_int 31)])))]
1196 "TARGET_ALTIVEC && !BYTES_BIG_ENDIAN"
1198 [(set_attr "type" "vecperm")])
1200 (define_expand "altivec_vmrghh"
1201 [(use (match_operand:V8HI 0 "register_operand"))
1202 (use (match_operand:V8HI 1 "register_operand"))
1203 (use (match_operand:V8HI 2 "register_operand"))]
1206 if (BYTES_BIG_ENDIAN)
1208 gen_altivec_vmrghh_direct_be (operands[0], operands[1], operands[2]));
1211 gen_altivec_vmrglh_direct_le (operands[0], operands[2], operands[1]));
1215 (define_insn "altivec_vmrghh_direct_be"
1216 [(set (match_operand:V8HI 0 "register_operand" "=v")
1219 (match_operand:V8HI 1 "register_operand" "v")
1220 (match_operand:V8HI 2 "register_operand" "v"))
1221 (parallel [(const_int 0) (const_int 8)
1222 (const_int 1) (const_int 9)
1223 (const_int 2) (const_int 10)
1224 (const_int 3) (const_int 11)])))]
1225 "TARGET_ALTIVEC && BYTES_BIG_ENDIAN"
1227 [(set_attr "type" "vecperm")])
1229 (define_insn "altivec_vmrghh_direct_le"
1230 [(set (match_operand:V8HI 0 "register_operand" "=v")
1233 (match_operand:V8HI 2 "register_operand" "v")
1234 (match_operand:V8HI 1 "register_operand" "v"))
1235 (parallel [(const_int 4) (const_int 12)
1236 (const_int 5) (const_int 13)
1237 (const_int 6) (const_int 14)
1238 (const_int 7) (const_int 15)])))]
1239 "TARGET_ALTIVEC && !BYTES_BIG_ENDIAN"
1241 [(set_attr "type" "vecperm")])
1243 (define_expand "altivec_vmrghw"
1244 [(use (match_operand:V4SI 0 "register_operand"))
1245 (use (match_operand:V4SI 1 "register_operand"))
1246 (use (match_operand:V4SI 2 "register_operand"))]
1247 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1249 if (BYTES_BIG_ENDIAN)
1250 emit_insn (gen_altivec_vmrghw_direct_v4si_be (operands[0],
1254 emit_insn (gen_altivec_vmrglw_direct_v4si_le (operands[0],
1260 (define_insn "altivec_vmrghw_direct_<mode>_be"
1261 [(set (match_operand:VSX_W 0 "register_operand" "=wa,v")
1263 (vec_concat:<VS_double>
1264 (match_operand:VSX_W 1 "register_operand" "wa,v")
1265 (match_operand:VSX_W 2 "register_operand" "wa,v"))
1266 (parallel [(const_int 0) (const_int 4)
1267 (const_int 1) (const_int 5)])))]
1268 "TARGET_ALTIVEC && BYTES_BIG_ENDIAN"
1272 [(set_attr "type" "vecperm")])
1274 (define_insn "altivec_vmrghw_direct_<mode>_le"
1275 [(set (match_operand:VSX_W 0 "register_operand" "=wa,v")
1277 (vec_concat:<VS_double>
1278 (match_operand:VSX_W 2 "register_operand" "wa,v")
1279 (match_operand:VSX_W 1 "register_operand" "wa,v"))
1280 (parallel [(const_int 2) (const_int 6)
1281 (const_int 3) (const_int 7)])))]
1282 "TARGET_ALTIVEC && !BYTES_BIG_ENDIAN"
1286 [(set_attr "type" "vecperm")])
1288 (define_insn "*altivec_vmrghsf"
1289 [(set (match_operand:V4SF 0 "register_operand" "=v")
1292 (match_operand:V4SF 1 "register_operand" "v")
1293 (match_operand:V4SF 2 "register_operand" "v"))
1294 (parallel [(const_int 0) (const_int 4)
1295 (const_int 1) (const_int 5)])))]
1296 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1298 if (BYTES_BIG_ENDIAN)
1299 return "vmrghw %0,%1,%2";
1301 return "vmrglw %0,%2,%1";
1303 [(set_attr "type" "vecperm")])
1305 (define_expand "altivec_vmrglb"
1306 [(use (match_operand:V16QI 0 "register_operand"))
1307 (use (match_operand:V16QI 1 "register_operand"))
1308 (use (match_operand:V16QI 2 "register_operand"))]
1311 if (BYTES_BIG_ENDIAN)
1313 gen_altivec_vmrglb_direct_be (operands[0], operands[1], operands[2]));
1316 gen_altivec_vmrghb_direct_le (operands[0], operands[2], operands[1]));
1320 (define_insn "altivec_vmrglb_direct_be"
1321 [(set (match_operand:V16QI 0 "register_operand" "=v")
1324 (match_operand:V16QI 1 "register_operand" "v")
1325 (match_operand:V16QI 2 "register_operand" "v"))
1326 (parallel [(const_int 8) (const_int 24)
1327 (const_int 9) (const_int 25)
1328 (const_int 10) (const_int 26)
1329 (const_int 11) (const_int 27)
1330 (const_int 12) (const_int 28)
1331 (const_int 13) (const_int 29)
1332 (const_int 14) (const_int 30)
1333 (const_int 15) (const_int 31)])))]
1334 "TARGET_ALTIVEC && BYTES_BIG_ENDIAN"
1336 [(set_attr "type" "vecperm")])
1338 (define_insn "altivec_vmrglb_direct_le"
1339 [(set (match_operand:V16QI 0 "register_operand" "=v")
1342 (match_operand:V16QI 2 "register_operand" "v")
1343 (match_operand:V16QI 1 "register_operand" "v"))
1344 (parallel [(const_int 0) (const_int 16)
1345 (const_int 1) (const_int 17)
1346 (const_int 2) (const_int 18)
1347 (const_int 3) (const_int 19)
1348 (const_int 4) (const_int 20)
1349 (const_int 5) (const_int 21)
1350 (const_int 6) (const_int 22)
1351 (const_int 7) (const_int 23)])))]
1352 "TARGET_ALTIVEC && !BYTES_BIG_ENDIAN"
1354 [(set_attr "type" "vecperm")])
1356 (define_expand "altivec_vmrglh"
1357 [(use (match_operand:V8HI 0 "register_operand"))
1358 (use (match_operand:V8HI 1 "register_operand"))
1359 (use (match_operand:V8HI 2 "register_operand"))]
1362 if (BYTES_BIG_ENDIAN)
1364 gen_altivec_vmrglh_direct_be (operands[0], operands[1], operands[2]));
1367 gen_altivec_vmrghh_direct_le (operands[0], operands[2], operands[1]));
1371 (define_insn "altivec_vmrglh_direct_be"
1372 [(set (match_operand:V8HI 0 "register_operand" "=v")
1375 (match_operand:V8HI 1 "register_operand" "v")
1376 (match_operand:V8HI 2 "register_operand" "v"))
1377 (parallel [(const_int 4) (const_int 12)
1378 (const_int 5) (const_int 13)
1379 (const_int 6) (const_int 14)
1380 (const_int 7) (const_int 15)])))]
1381 "TARGET_ALTIVEC && BYTES_BIG_ENDIAN"
1383 [(set_attr "type" "vecperm")])
1385 (define_insn "altivec_vmrglh_direct_le"
1386 [(set (match_operand:V8HI 0 "register_operand" "=v")
1389 (match_operand:V8HI 2 "register_operand" "v")
1390 (match_operand:V8HI 1 "register_operand" "v"))
1391 (parallel [(const_int 0) (const_int 8)
1392 (const_int 1) (const_int 9)
1393 (const_int 2) (const_int 10)
1394 (const_int 3) (const_int 11)])))]
1395 "TARGET_ALTIVEC && !BYTES_BIG_ENDIAN"
1397 [(set_attr "type" "vecperm")])
1399 (define_expand "altivec_vmrglw"
1400 [(use (match_operand:V4SI 0 "register_operand"))
1401 (use (match_operand:V4SI 1 "register_operand"))
1402 (use (match_operand:V4SI 2 "register_operand"))]
1403 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1405 if (BYTES_BIG_ENDIAN)
1406 emit_insn (gen_altivec_vmrglw_direct_v4si_be (operands[0],
1410 emit_insn (gen_altivec_vmrghw_direct_v4si_le (operands[0],
1416 (define_insn "altivec_vmrglw_direct_<mode>_be"
1417 [(set (match_operand:VSX_W 0 "register_operand" "=wa,v")
1419 (vec_concat:<VS_double>
1420 (match_operand:VSX_W 1 "register_operand" "wa,v")
1421 (match_operand:VSX_W 2 "register_operand" "wa,v"))
1422 (parallel [(const_int 2) (const_int 6)
1423 (const_int 3) (const_int 7)])))]
1424 "TARGET_ALTIVEC && BYTES_BIG_ENDIAN"
1428 [(set_attr "type" "vecperm")])
1430 (define_insn "altivec_vmrglw_direct_<mode>_le"
1431 [(set (match_operand:VSX_W 0 "register_operand" "=wa,v")
1433 (vec_concat:<VS_double>
1434 (match_operand:VSX_W 2 "register_operand" "wa,v")
1435 (match_operand:VSX_W 1 "register_operand" "wa,v"))
1436 (parallel [(const_int 0) (const_int 4)
1437 (const_int 1) (const_int 5)])))]
1438 "TARGET_ALTIVEC && !BYTES_BIG_ENDIAN"
1442 [(set_attr "type" "vecperm")])
1444 (define_insn "*altivec_vmrglsf"
1445 [(set (match_operand:V4SF 0 "register_operand" "=v")
1448 (match_operand:V4SF 1 "register_operand" "v")
1449 (match_operand:V4SF 2 "register_operand" "v"))
1450 (parallel [(const_int 2) (const_int 6)
1451 (const_int 3) (const_int 7)])))]
1452 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1454 if (BYTES_BIG_ENDIAN)
1455 return "vmrglw %0,%1,%2";
1457 return "vmrghw %0,%2,%1";
1459 [(set_attr "type" "vecperm")])
1461 ;; Power8 vector merge two V2DF/V2DI even words to V2DF
1462 (define_expand "p8_vmrgew_<mode>"
1463 [(use (match_operand:VSX_D 0 "vsx_register_operand"))
1464 (use (match_operand:VSX_D 1 "vsx_register_operand"))
1465 (use (match_operand:VSX_D 2 "vsx_register_operand"))]
1466 "VECTOR_MEM_VSX_P (<MODE>mode)"
1471 v = gen_rtvec (2, GEN_INT (0), GEN_INT (2));
1472 x = gen_rtx_VEC_CONCAT (<VS_double>mode, operands[1], operands[2]);
1474 x = gen_rtx_VEC_SELECT (<MODE>mode, x, gen_rtx_PARALLEL (VOIDmode, v));
1475 emit_insn (gen_rtx_SET (operands[0], x));
1479 ;; Power8 vector merge two V4SF/V4SI even words to V4SF
1480 (define_insn "p8_vmrgew_<mode>"
1481 [(set (match_operand:VSX_W 0 "register_operand" "=v")
1483 (vec_concat:<VS_double>
1484 (match_operand:VSX_W 1 "register_operand" "v")
1485 (match_operand:VSX_W 2 "register_operand" "v"))
1486 (parallel [(const_int 0) (const_int 4)
1487 (const_int 2) (const_int 6)])))]
1490 if (BYTES_BIG_ENDIAN)
1491 return "vmrgew %0,%1,%2";
1493 return "vmrgow %0,%2,%1";
1495 [(set_attr "type" "vecperm")])
1497 (define_insn "p8_vmrgow_<mode>"
1498 [(set (match_operand:VSX_W 0 "register_operand" "=v")
1500 (vec_concat:<VS_double>
1501 (match_operand:VSX_W 1 "register_operand" "v")
1502 (match_operand:VSX_W 2 "register_operand" "v"))
1503 (parallel [(const_int 1) (const_int 5)
1504 (const_int 3) (const_int 7)])))]
1507 if (BYTES_BIG_ENDIAN)
1508 return "vmrgow %0,%1,%2";
1510 return "vmrgew %0,%2,%1";
1512 [(set_attr "type" "vecperm")])
1514 (define_expand "p8_vmrgow_<mode>"
1515 [(use (match_operand:VSX_D 0 "vsx_register_operand"))
1516 (use (match_operand:VSX_D 1 "vsx_register_operand"))
1517 (use (match_operand:VSX_D 2 "vsx_register_operand"))]
1518 "VECTOR_MEM_VSX_P (<MODE>mode)"
1523 v = gen_rtvec (2, GEN_INT (1), GEN_INT (3));
1524 x = gen_rtx_VEC_CONCAT (<VS_double>mode, operands[1], operands[2]);
1526 x = gen_rtx_VEC_SELECT (<MODE>mode, x, gen_rtx_PARALLEL (VOIDmode, v));
1527 emit_insn (gen_rtx_SET (operands[0], x));
1531 (define_insn "p8_vmrgew_<mode>_direct"
1532 [(set (match_operand:VSX_W 0 "register_operand" "=v")
1533 (unspec:VSX_W [(match_operand:VSX_W 1 "register_operand" "v")
1534 (match_operand:VSX_W 2 "register_operand" "v")]
1535 UNSPEC_VMRGEW_DIRECT))]
1538 [(set_attr "type" "vecperm")])
1540 (define_insn "p8_vmrgow_<mode>_direct"
1541 [(set (match_operand:VSX_W 0 "register_operand" "=v")
1542 (unspec:VSX_W [(match_operand:VSX_W 1 "register_operand" "v")
1543 (match_operand:VSX_W 2 "register_operand" "v")]
1544 UNSPEC_VMRGOW_DIRECT))]
1547 [(set_attr "type" "vecperm")])
1549 (define_expand "vec_widen_umult_even_v16qi"
1550 [(use (match_operand:V8HI 0 "register_operand"))
1551 (use (match_operand:V16QI 1 "register_operand"))
1552 (use (match_operand:V16QI 2 "register_operand"))]
1555 if (BYTES_BIG_ENDIAN)
1556 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1558 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1562 (define_expand "vec_widen_smult_even_v16qi"
1563 [(use (match_operand:V8HI 0 "register_operand"))
1564 (use (match_operand:V16QI 1 "register_operand"))
1565 (use (match_operand:V16QI 2 "register_operand"))]
1568 if (BYTES_BIG_ENDIAN)
1569 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1571 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1575 (define_expand "vec_widen_umult_even_v8hi"
1576 [(use (match_operand:V4SI 0 "register_operand"))
1577 (use (match_operand:V8HI 1 "register_operand"))
1578 (use (match_operand:V8HI 2 "register_operand"))]
1581 if (BYTES_BIG_ENDIAN)
1582 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1584 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1588 (define_expand "vec_widen_smult_even_v8hi"
1589 [(use (match_operand:V4SI 0 "register_operand"))
1590 (use (match_operand:V8HI 1 "register_operand"))
1591 (use (match_operand:V8HI 2 "register_operand"))]
1594 if (BYTES_BIG_ENDIAN)
1595 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1597 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1601 (define_expand "vec_widen_umult_even_v4si"
1602 [(use (match_operand:V2DI 0 "register_operand"))
1603 (use (match_operand:V4SI 1 "register_operand"))
1604 (use (match_operand:V4SI 2 "register_operand"))]
1607 if (BYTES_BIG_ENDIAN)
1608 emit_insn (gen_altivec_vmuleuw (operands[0], operands[1], operands[2]));
1610 emit_insn (gen_altivec_vmulouw (operands[0], operands[1], operands[2]));
1614 (define_expand "vec_widen_umult_even_v2di"
1615 [(use (match_operand:V1TI 0 "register_operand"))
1616 (use (match_operand:V2DI 1 "register_operand"))
1617 (use (match_operand:V2DI 2 "register_operand"))]
1620 if (BYTES_BIG_ENDIAN)
1621 emit_insn (gen_altivec_vmuleud (operands[0], operands[1], operands[2]));
1623 emit_insn (gen_altivec_vmuloud (operands[0], operands[1], operands[2]));
1627 (define_expand "vec_widen_smult_even_v4si"
1628 [(use (match_operand:V2DI 0 "register_operand"))
1629 (use (match_operand:V4SI 1 "register_operand"))
1630 (use (match_operand:V4SI 2 "register_operand"))]
1633 if (BYTES_BIG_ENDIAN)
1634 emit_insn (gen_altivec_vmulesw (operands[0], operands[1], operands[2]));
1636 emit_insn (gen_altivec_vmulosw (operands[0], operands[1], operands[2]));
1640 (define_expand "vec_widen_smult_even_v2di"
1641 [(use (match_operand:V1TI 0 "register_operand"))
1642 (use (match_operand:V2DI 1 "register_operand"))
1643 (use (match_operand:V2DI 2 "register_operand"))]
1646 if (BYTES_BIG_ENDIAN)
1647 emit_insn (gen_altivec_vmulesd (operands[0], operands[1], operands[2]));
1649 emit_insn (gen_altivec_vmulosd (operands[0], operands[1], operands[2]));
1653 (define_expand "vec_widen_umult_odd_v16qi"
1654 [(use (match_operand:V8HI 0 "register_operand"))
1655 (use (match_operand:V16QI 1 "register_operand"))
1656 (use (match_operand:V16QI 2 "register_operand"))]
1659 if (BYTES_BIG_ENDIAN)
1660 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1662 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1666 (define_expand "vec_widen_smult_odd_v16qi"
1667 [(use (match_operand:V8HI 0 "register_operand"))
1668 (use (match_operand:V16QI 1 "register_operand"))
1669 (use (match_operand:V16QI 2 "register_operand"))]
1672 if (BYTES_BIG_ENDIAN)
1673 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1675 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1679 (define_expand "vec_widen_umult_odd_v8hi"
1680 [(use (match_operand:V4SI 0 "register_operand"))
1681 (use (match_operand:V8HI 1 "register_operand"))
1682 (use (match_operand:V8HI 2 "register_operand"))]
1685 if (BYTES_BIG_ENDIAN)
1686 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1688 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1692 (define_expand "vec_widen_smult_odd_v8hi"
1693 [(use (match_operand:V4SI 0 "register_operand"))
1694 (use (match_operand:V8HI 1 "register_operand"))
1695 (use (match_operand:V8HI 2 "register_operand"))]
1698 if (BYTES_BIG_ENDIAN)
1699 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1701 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1705 (define_expand "vec_widen_umult_odd_v4si"
1706 [(use (match_operand:V2DI 0 "register_operand"))
1707 (use (match_operand:V4SI 1 "register_operand"))
1708 (use (match_operand:V4SI 2 "register_operand"))]
1711 if (BYTES_BIG_ENDIAN)
1712 emit_insn (gen_altivec_vmulouw (operands[0], operands[1], operands[2]));
1714 emit_insn (gen_altivec_vmuleuw (operands[0], operands[1], operands[2]));
1718 (define_expand "vec_widen_umult_odd_v2di"
1719 [(use (match_operand:V1TI 0 "register_operand"))
1720 (use (match_operand:V2DI 1 "register_operand"))
1721 (use (match_operand:V2DI 2 "register_operand"))]
1724 if (BYTES_BIG_ENDIAN)
1725 emit_insn (gen_altivec_vmuloud (operands[0], operands[1], operands[2]));
1727 emit_insn (gen_altivec_vmuleud (operands[0], operands[1], operands[2]));
1731 (define_expand "vec_widen_smult_odd_v4si"
1732 [(use (match_operand:V2DI 0 "register_operand"))
1733 (use (match_operand:V4SI 1 "register_operand"))
1734 (use (match_operand:V4SI 2 "register_operand"))]
1737 if (BYTES_BIG_ENDIAN)
1738 emit_insn (gen_altivec_vmulosw (operands[0], operands[1], operands[2]));
1740 emit_insn (gen_altivec_vmulesw (operands[0], operands[1], operands[2]));
1744 (define_expand "vec_widen_smult_odd_v2di"
1745 [(use (match_operand:V1TI 0 "register_operand"))
1746 (use (match_operand:V2DI 1 "register_operand"))
1747 (use (match_operand:V2DI 2 "register_operand"))]
1750 if (BYTES_BIG_ENDIAN)
1751 emit_insn (gen_altivec_vmulosd (operands[0], operands[1], operands[2]));
1753 emit_insn (gen_altivec_vmulesd (operands[0], operands[1], operands[2]));
1757 (define_insn "altivec_vmuleub"
1758 [(set (match_operand:V8HI 0 "register_operand" "=v")
1759 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1760 (match_operand:V16QI 2 "register_operand" "v")]
1764 [(set_attr "type" "veccomplex")])
1766 (define_insn "altivec_vmuloub"
1767 [(set (match_operand:V8HI 0 "register_operand" "=v")
1768 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1769 (match_operand:V16QI 2 "register_operand" "v")]
1773 [(set_attr "type" "veccomplex")])
1775 (define_insn "altivec_vmulesb"
1776 [(set (match_operand:V8HI 0 "register_operand" "=v")
1777 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1778 (match_operand:V16QI 2 "register_operand" "v")]
1782 [(set_attr "type" "veccomplex")])
1784 (define_insn "altivec_vmulosb"
1785 [(set (match_operand:V8HI 0 "register_operand" "=v")
1786 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1787 (match_operand:V16QI 2 "register_operand" "v")]
1791 [(set_attr "type" "veccomplex")])
1793 (define_insn "altivec_vmuleuh"
1794 [(set (match_operand:V4SI 0 "register_operand" "=v")
1795 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1796 (match_operand:V8HI 2 "register_operand" "v")]
1800 [(set_attr "type" "veccomplex")])
1802 (define_insn "altivec_vmulouh"
1803 [(set (match_operand:V4SI 0 "register_operand" "=v")
1804 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1805 (match_operand:V8HI 2 "register_operand" "v")]
1809 [(set_attr "type" "veccomplex")])
1811 (define_insn "altivec_vmulesh"
1812 [(set (match_operand:V4SI 0 "register_operand" "=v")
1813 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1814 (match_operand:V8HI 2 "register_operand" "v")]
1818 [(set_attr "type" "veccomplex")])
1820 (define_insn "altivec_vmulosh"
1821 [(set (match_operand:V4SI 0 "register_operand" "=v")
1822 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1823 (match_operand:V8HI 2 "register_operand" "v")]
1827 [(set_attr "type" "veccomplex")])
1829 (define_insn "altivec_vmuleuw"
1830 [(set (match_operand:V2DI 0 "register_operand" "=v")
1831 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1832 (match_operand:V4SI 2 "register_operand" "v")]
1836 [(set_attr "type" "veccomplex")])
1838 (define_insn "altivec_vmuleud"
1839 [(set (match_operand:V1TI 0 "register_operand" "=v")
1840 (unspec:V1TI [(match_operand:V2DI 1 "register_operand" "v")
1841 (match_operand:V2DI 2 "register_operand" "v")]
1845 [(set_attr "type" "veccomplex")])
1847 (define_insn "altivec_vmulouw"
1848 [(set (match_operand:V2DI 0 "register_operand" "=v")
1849 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1850 (match_operand:V4SI 2 "register_operand" "v")]
1854 [(set_attr "type" "veccomplex")])
1856 (define_insn "altivec_vmuloud"
1857 [(set (match_operand:V1TI 0 "register_operand" "=v")
1858 (unspec:V1TI [(match_operand:V2DI 1 "register_operand" "v")
1859 (match_operand:V2DI 2 "register_operand" "v")]
1863 [(set_attr "type" "veccomplex")])
1865 (define_insn "altivec_vmulesw"
1866 [(set (match_operand:V2DI 0 "register_operand" "=v")
1867 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1868 (match_operand:V4SI 2 "register_operand" "v")]
1872 [(set_attr "type" "veccomplex")])
1874 (define_insn "altivec_vmulesd"
1875 [(set (match_operand:V1TI 0 "register_operand" "=v")
1876 (unspec:V1TI [(match_operand:V2DI 1 "register_operand" "v")
1877 (match_operand:V2DI 2 "register_operand" "v")]
1881 [(set_attr "type" "veccomplex")])
1883 (define_insn "altivec_vmulosw"
1884 [(set (match_operand:V2DI 0 "register_operand" "=v")
1885 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1886 (match_operand:V4SI 2 "register_operand" "v")]
1890 [(set_attr "type" "veccomplex")])
1892 (define_insn "altivec_vmulosd"
1893 [(set (match_operand:V1TI 0 "register_operand" "=v")
1894 (unspec:V1TI [(match_operand:V2DI 1 "register_operand" "v")
1895 (match_operand:V2DI 2 "register_operand" "v")]
1899 [(set_attr "type" "veccomplex")])
1901 ;; Vector pack/unpack
1902 (define_insn "altivec_vpkpx"
1903 [(set (match_operand:V8HI 0 "register_operand" "=v")
1904 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v")
1905 (match_operand:V4SI 2 "register_operand" "v")]
1909 if (BYTES_BIG_ENDIAN)
1910 return "vpkpx %0,%1,%2";
1912 return "vpkpx %0,%2,%1";
1914 [(set_attr "type" "vecperm")])
1916 (define_insn "altivec_vpks<VI_char>ss"
1917 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1918 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1919 (match_operand:VP 2 "register_operand" "v")]
1920 UNSPEC_VPACK_SIGN_SIGN_SAT))]
1923 if (BYTES_BIG_ENDIAN)
1924 return "vpks<VI_char>ss %0,%1,%2";
1926 return "vpks<VI_char>ss %0,%2,%1";
1928 [(set_attr "type" "vecperm")])
1930 (define_insn "altivec_vpks<VI_char>us"
1931 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1932 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1933 (match_operand:VP 2 "register_operand" "v")]
1934 UNSPEC_VPACK_SIGN_UNS_SAT))]
1937 if (BYTES_BIG_ENDIAN)
1938 return "vpks<VI_char>us %0,%1,%2";
1940 return "vpks<VI_char>us %0,%2,%1";
1942 [(set_attr "type" "vecperm")])
1944 (define_insn "altivec_vpku<VI_char>us"
1945 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1946 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1947 (match_operand:VP 2 "register_operand" "v")]
1948 UNSPEC_VPACK_UNS_UNS_SAT))]
1951 if (BYTES_BIG_ENDIAN)
1952 return "vpku<VI_char>us %0,%1,%2";
1954 return "vpku<VI_char>us %0,%2,%1";
1956 [(set_attr "type" "vecperm")])
1958 (define_insn "altivec_vpku<VI_char>um"
1959 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1960 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1961 (match_operand:VP 2 "register_operand" "v")]
1962 UNSPEC_VPACK_UNS_UNS_MOD))]
1965 if (BYTES_BIG_ENDIAN)
1966 return "vpku<VI_char>um %0,%1,%2";
1968 return "vpku<VI_char>um %0,%2,%1";
1970 [(set_attr "type" "vecperm")])
1972 (define_insn "altivec_vpku<VI_char>um_direct"
1973 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1974 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1975 (match_operand:VP 2 "register_operand" "v")]
1976 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT))]
1979 if (BYTES_BIG_ENDIAN)
1980 return "vpku<VI_char>um %0,%1,%2";
1982 return "vpku<VI_char>um %0,%2,%1";
1984 [(set_attr "type" "vecperm")])
1986 (define_insn "altivec_vrl<VI_char>"
1987 [(set (match_operand:VI2 0 "register_operand" "=v")
1988 (rotate:VI2 (match_operand:VI2 1 "register_operand" "v")
1989 (match_operand:VI2 2 "register_operand" "v")))]
1991 "vrl<VI_char> %0,%1,%2"
1992 [(set_attr "type" "vecsimple")])
1994 (define_insn "altivec_vrlq"
1995 [(set (match_operand:V1TI 0 "vsx_register_operand" "=v")
1996 (rotate:V1TI (match_operand:V1TI 1 "vsx_register_operand" "v")
1997 (match_operand:V1TI 2 "vsx_register_operand" "v")))]
1999 ;; rotate amount in needs to be in bits[57:63] of operand2.
2001 [(set_attr "type" "vecsimple")])
2003 (define_insn "altivec_vrl<VI_char>mi"
2004 [(set (match_operand:VIlong 0 "register_operand" "=v")
2005 (unspec:VIlong [(match_operand:VIlong 1 "register_operand" "v")
2006 (match_operand:VIlong 2 "register_operand" "0")
2007 (match_operand:VIlong 3 "register_operand" "v")]
2010 "vrl<VI_char>mi %0,%1,%3"
2011 [(set_attr "type" "veclogical")])
2013 (define_expand "altivec_vrlqmi"
2014 [(set (match_operand:V1TI 0 "vsx_register_operand")
2015 (unspec:V1TI [(match_operand:V1TI 1 "vsx_register_operand")
2016 (match_operand:V1TI 2 "vsx_register_operand")
2017 (match_operand:V1TI 3 "vsx_register_operand")]
2021 /* Mask bit begin, end fields need to be in bits [41:55] of 128-bit operand2.
2022 Shift amount in needs to be put in bits[57:63] of 128-bit operand2. */
2023 rtx tmp = gen_reg_rtx (V1TImode);
2025 emit_insn (gen_xxswapd_v1ti (tmp, operands[3]));
2026 emit_insn (gen_altivec_vrlqmi_inst (operands[0], operands[1], operands[2],
2031 (define_insn "altivec_vrlqmi_inst"
2032 [(set (match_operand:V1TI 0 "vsx_register_operand" "=v")
2033 (unspec:V1TI [(match_operand:V1TI 1 "vsx_register_operand" "v")
2034 (match_operand:V1TI 2 "vsx_register_operand" "0")
2035 (match_operand:V1TI 3 "vsx_register_operand" "v")]
2039 [(set_attr "type" "veclogical")])
2041 (define_insn "altivec_vrl<VI_char>nm"
2042 [(set (match_operand:VIlong 0 "register_operand" "=v")
2043 (unspec:VIlong [(match_operand:VIlong 1 "register_operand" "v")
2044 (match_operand:VIlong 2 "register_operand" "v")]
2047 "vrl<VI_char>nm %0,%1,%2"
2048 [(set_attr "type" "veclogical")])
2050 (define_expand "altivec_vrlqnm"
2051 [(set (match_operand:V1TI 0 "vsx_register_operand")
2052 (unspec:V1TI [(match_operand:V1TI 1 "vsx_register_operand")
2053 (match_operand:V1TI 2 "vsx_register_operand")]
2057 /* Shift amount in needs to be put in bits[57:63] of 128-bit operand2. */
2058 rtx tmp = gen_reg_rtx (V1TImode);
2060 emit_insn (gen_xxswapd_v1ti (tmp, operands[2]));
2061 emit_insn (gen_altivec_vrlqnm_inst (operands[0], operands[1], tmp));
2065 (define_insn "altivec_vrlqnm_inst"
2066 [(set (match_operand:V1TI 0 "vsx_register_operand" "=v")
2067 (unspec:V1TI [(match_operand:V1TI 1 "vsx_register_operand" "v")
2068 (match_operand:V1TI 2 "vsx_register_operand" "v")]
2071 ;; rotate and mask bits need to be in upper 64-bits of operand2.
2073 [(set_attr "type" "veclogical")])
2075 (define_insn "altivec_vsl"
2076 [(set (match_operand:V4SI 0 "register_operand" "=v")
2077 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2078 (match_operand:V4SI 2 "register_operand" "v")]
2082 [(set_attr "type" "vecperm")])
2084 (define_insn "altivec_vslo"
2085 [(set (match_operand:V4SI 0 "register_operand" "=v")
2086 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2087 (match_operand:V4SI 2 "register_operand" "v")]
2091 [(set_attr "type" "vecperm")])
2094 [(set (match_operand:V16QI 0 "register_operand" "=v")
2095 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
2096 (match_operand:V16QI 2 "register_operand" "v")]
2100 [(set_attr "type" "vecsimple")])
2103 [(set (match_operand:V16QI 0 "register_operand" "=v")
2104 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
2105 (match_operand:V16QI 2 "register_operand" "v")]
2109 [(set_attr "type" "vecsimple")])
2111 (define_insn "*altivec_vsl<VI_char>"
2112 [(set (match_operand:VI2 0 "register_operand" "=v")
2113 (ashift:VI2 (match_operand:VI2 1 "register_operand" "v")
2114 (match_operand:VI2 2 "register_operand" "v")))]
2116 "vsl<VI_char> %0,%1,%2"
2117 [(set_attr "type" "vecsimple")])
2119 (define_insn "altivec_vslq_<mode>"
2120 [(set (match_operand:VEC_TI 0 "vsx_register_operand" "=v")
2121 (ashift:VEC_TI (match_operand:VEC_TI 1 "vsx_register_operand" "v")
2122 (match_operand:VEC_TI 2 "vsx_register_operand" "v")))]
2124 /* Shift amount in needs to be in bits[57:63] of 128-bit operand. */
2126 [(set_attr "type" "vecsimple")])
2128 (define_insn "*altivec_vsr<VI_char>"
2129 [(set (match_operand:VI2 0 "register_operand" "=v")
2130 (lshiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
2131 (match_operand:VI2 2 "register_operand" "v")))]
2133 "vsr<VI_char> %0,%1,%2"
2134 [(set_attr "type" "vecsimple")])
2136 (define_insn "altivec_vsrq_<mode>"
2137 [(set (match_operand:VEC_TI 0 "vsx_register_operand" "=v")
2138 (lshiftrt:VEC_TI (match_operand:VEC_TI 1 "vsx_register_operand" "v")
2139 (match_operand:VEC_TI 2 "vsx_register_operand" "v")))]
2141 /* Shift amount in needs to be in bits[57:63] of 128-bit operand. */
2143 [(set_attr "type" "vecsimple")])
2145 (define_insn "*altivec_vsra<VI_char>"
2146 [(set (match_operand:VI2 0 "register_operand" "=v")
2147 (ashiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
2148 (match_operand:VI2 2 "register_operand" "v")))]
2150 "vsra<VI_char> %0,%1,%2"
2151 [(set_attr "type" "vecsimple")])
2153 (define_insn "altivec_vsraq"
2154 [(set (match_operand:V1TI 0 "vsx_register_operand" "=v")
2155 (ashiftrt:V1TI (match_operand:V1TI 1 "vsx_register_operand" "v")
2156 (match_operand:V1TI 2 "vsx_register_operand" "v")))]
2158 /* Shift amount in needs to be in bits[57:63] of 128-bit operand. */
2160 [(set_attr "type" "vecsimple")])
2162 (define_insn "altivec_vsr"
2163 [(set (match_operand:V4SI 0 "register_operand" "=v")
2164 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2165 (match_operand:V4SI 2 "register_operand" "v")]
2169 [(set_attr "type" "vecperm")])
2171 (define_insn "altivec_vsro"
2172 [(set (match_operand:V4SI 0 "register_operand" "=v")
2173 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2174 (match_operand:V4SI 2 "register_operand" "v")]
2178 [(set_attr "type" "vecperm")])
2180 ;; Optimize V2DI shifts by constants. This relies on the shift instructions
2181 ;; only looking at the bits needed to do the shift. This means we can use
2182 ;; VSPLTISW or XXSPLTIB to load up the constant, and not worry about the bits
2183 ;; that the vector shift instructions will not use.
2184 (define_mode_iterator VSHIFT_MODE [(V4SI "TARGET_P9_VECTOR")
2185 (V2DI "TARGET_P8_VECTOR")])
2187 (define_code_iterator vshift_code [ashift ashiftrt lshiftrt])
2188 (define_code_attr vshift_attr [(ashift "ashift")
2189 (ashiftrt "ashiftrt")
2190 (lshiftrt "lshiftrt")])
2192 (define_insn_and_split "*altivec_<mode>_<vshift_attr>_const"
2193 [(set (match_operand:VSHIFT_MODE 0 "register_operand" "=v")
2194 (vshift_code:VSHIFT_MODE
2195 (match_operand:VSHIFT_MODE 1 "register_operand" "v")
2196 (match_operand:VSHIFT_MODE 2 "vector_shift_constant" "")))
2197 (clobber (match_scratch:VSHIFT_MODE 3 "=&v"))]
2198 "((<MODE>mode == V2DImode && TARGET_P8_VECTOR)
2199 || (<MODE>mode == V4SImode && TARGET_P9_VECTOR))"
2203 (unspec:VSHIFT_MODE [(match_dup 4)] UNSPEC_VECTOR_SHIFT))
2205 (vshift_code:VSHIFT_MODE (match_dup 1)
2208 if (GET_CODE (operands[3]) == SCRATCH)
2209 operands[3] = gen_reg_rtx (<MODE>mode);
2211 operands[4] = GET_CODE (operands[2]) == CONST_VECTOR
2212 ? CONST_VECTOR_ELT (operands[2], 0)
2213 : XEXP (operands[2], 0);
2216 (define_insn "*altivec_<mode>_shift_const"
2217 [(set (match_operand:VSHIFT_MODE 0 "register_operand" "=v")
2218 (unspec:VSHIFT_MODE [(match_operand 1 "const_int_operand" "n")]
2219 UNSPEC_VECTOR_SHIFT))]
2222 if (UINTVAL (operands[1]) <= 15)
2223 return "vspltisw %0,%1";
2224 else if (TARGET_P9_VECTOR)
2225 return "xxspltib %x0,%1";
2230 (define_insn "altivec_vsum4ubs"
2231 [(set (match_operand:V4SI 0 "register_operand" "=v")
2232 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")
2233 (match_operand:V4SI 2 "register_operand" "v")]
2235 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2238 [(set_attr "type" "veccomplex")])
2240 (define_insn "altivec_vsum4s<VI_char>s"
2241 [(set (match_operand:V4SI 0 "register_operand" "=v")
2242 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
2243 (match_operand:V4SI 2 "register_operand" "v")]
2245 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2247 "vsum4s<VI_char>s %0,%1,%2"
2248 [(set_attr "type" "veccomplex")])
2250 (define_expand "altivec_vsum2sws"
2251 [(use (match_operand:V4SI 0 "register_operand"))
2252 (use (match_operand:V4SI 1 "register_operand"))
2253 (use (match_operand:V4SI 2 "register_operand"))]
2256 if (BYTES_BIG_ENDIAN)
2257 emit_insn (gen_altivec_vsum2sws_direct (operands[0], operands[1],
2261 rtx tmp1 = gen_reg_rtx (V4SImode);
2262 rtx tmp2 = gen_reg_rtx (V4SImode);
2263 emit_insn (gen_altivec_vsldoi_v4si (tmp1, operands[2],
2264 operands[2], GEN_INT (12)));
2265 emit_insn (gen_altivec_vsum2sws_direct (tmp2, operands[1], tmp1));
2266 emit_insn (gen_altivec_vsldoi_v4si (operands[0], tmp2, tmp2,
2272 ; FIXME: This can probably be expressed without an UNSPEC.
2273 (define_insn "altivec_vsum2sws_direct"
2274 [(set (match_operand:V4SI 0 "register_operand" "=v")
2275 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2276 (match_operand:V4SI 2 "register_operand" "v")]
2278 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2281 [(set_attr "type" "veccomplex")])
2283 (define_expand "altivec_vsumsws"
2284 [(use (match_operand:V4SI 0 "register_operand"))
2285 (use (match_operand:V4SI 1 "register_operand"))
2286 (use (match_operand:V4SI 2 "register_operand"))]
2289 if (BYTES_BIG_ENDIAN)
2290 emit_insn (gen_altivec_vsumsws_direct (operands[0], operands[1],
2294 rtx tmp1 = gen_reg_rtx (V4SImode);
2295 rtx tmp2 = gen_reg_rtx (V4SImode);
2296 emit_insn (gen_altivec_vspltw_direct (tmp1, operands[2], const0_rtx));
2297 emit_insn (gen_altivec_vsumsws_direct (tmp2, operands[1], tmp1));
2298 emit_insn (gen_altivec_vsldoi_v4si (operands[0], tmp2, tmp2,
2304 ; FIXME: This can probably be expressed without an UNSPEC.
2305 (define_insn "altivec_vsumsws_direct"
2306 [(set (match_operand:V4SI 0 "register_operand" "=v")
2307 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2308 (match_operand:V4SI 2 "register_operand" "v")]
2309 UNSPEC_VSUMSWS_DIRECT))
2310 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2313 [(set_attr "type" "veccomplex")])
2315 (define_expand "altivec_vspltb"
2316 [(use (match_operand:V16QI 0 "register_operand"))
2317 (use (match_operand:V16QI 1 "register_operand"))
2318 (use (match_operand:QI 2 "const_0_to_15_operand"))]
2321 rtvec v = gen_rtvec (1, operands[2]);
2323 x = gen_rtx_VEC_SELECT (QImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
2324 x = gen_rtx_VEC_DUPLICATE (V16QImode, x);
2325 emit_insn (gen_rtx_SET (operands[0], x));
2329 (define_insn "*altivec_vspltb_internal"
2330 [(set (match_operand:V16QI 0 "register_operand" "=v")
2331 (vec_duplicate:V16QI
2332 (vec_select:QI (match_operand:V16QI 1 "register_operand" "v")
2334 [(match_operand:QI 2 "const_0_to_15_operand" "")]))))]
2337 if (!BYTES_BIG_ENDIAN)
2338 operands[2] = GEN_INT (15 - INTVAL (operands[2]));
2340 return "vspltb %0,%1,%2";
2342 [(set_attr "type" "vecperm")])
2344 (define_insn "altivec_vspltb_direct"
2345 [(set (match_operand:V16QI 0 "register_operand" "=v")
2346 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
2347 (match_operand:QI 2 "const_0_to_15_operand" "i")]
2348 UNSPEC_VSPLT_DIRECT))]
2351 [(set_attr "type" "vecperm")])
2353 (define_expand "altivec_vsplth"
2354 [(use (match_operand:V8HI 0 "register_operand"))
2355 (use (match_operand:V8HI 1 "register_operand"))
2356 (use (match_operand:QI 2 "const_0_to_7_operand"))]
2359 rtvec v = gen_rtvec (1, operands[2]);
2361 x = gen_rtx_VEC_SELECT (HImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
2362 x = gen_rtx_VEC_DUPLICATE (V8HImode, x);
2363 emit_insn (gen_rtx_SET (operands[0], x));
2367 (define_insn "*altivec_vsplth_internal"
2368 [(set (match_operand:V8HI 0 "register_operand" "=v")
2370 (vec_select:HI (match_operand:V8HI 1 "register_operand" "v")
2372 [(match_operand:QI 2 "const_0_to_7_operand" "")]))))]
2375 if (!BYTES_BIG_ENDIAN)
2376 operands[2] = GEN_INT (7 - INTVAL (operands[2]));
2378 return "vsplth %0,%1,%2";
2380 [(set_attr "type" "vecperm")])
2382 (define_insn "altivec_vsplth_direct"
2383 [(set (match_operand:V8HI 0 "register_operand" "=v")
2384 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
2385 (match_operand:QI 2 "const_0_to_7_operand" "i")]
2386 UNSPEC_VSPLT_DIRECT))]
2389 [(set_attr "type" "vecperm")])
2391 (define_expand "altivec_vspltw"
2392 [(use (match_operand:V4SI 0 "register_operand"))
2393 (use (match_operand:V4SI 1 "register_operand"))
2394 (use (match_operand:QI 2 "const_0_to_3_operand"))]
2397 rtvec v = gen_rtvec (1, operands[2]);
2399 x = gen_rtx_VEC_SELECT (SImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
2400 x = gen_rtx_VEC_DUPLICATE (V4SImode, x);
2401 emit_insn (gen_rtx_SET (operands[0], x));
2405 (define_insn "*altivec_vspltw_internal"
2406 [(set (match_operand:V4SI 0 "register_operand" "=v")
2408 (vec_select:SI (match_operand:V4SI 1 "register_operand" "v")
2410 [(match_operand:QI 2 "const_0_to_3_operand" "i")]))))]
2413 if (!BYTES_BIG_ENDIAN)
2414 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
2416 return "vspltw %0,%1,%2";
2418 [(set_attr "type" "vecperm")])
2420 (define_insn "altivec_vspltw_direct"
2421 [(set (match_operand:V4SI 0 "register_operand" "=v")
2422 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2423 (match_operand:QI 2 "const_0_to_3_operand" "i")]
2424 UNSPEC_VSPLT_DIRECT))]
2427 [(set_attr "type" "vecperm")])
2429 (define_expand "altivec_vspltsf"
2430 [(use (match_operand:V4SF 0 "register_operand"))
2431 (use (match_operand:V4SF 1 "register_operand"))
2432 (use (match_operand:QI 2 "const_0_to_3_operand"))]
2435 rtvec v = gen_rtvec (1, operands[2]);
2437 x = gen_rtx_VEC_SELECT (SFmode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
2438 x = gen_rtx_VEC_DUPLICATE (V4SFmode, x);
2439 emit_insn (gen_rtx_SET (operands[0], x));
2443 (define_insn "*altivec_vspltsf_internal"
2444 [(set (match_operand:V4SF 0 "register_operand" "=v")
2446 (vec_select:SF (match_operand:V4SF 1 "register_operand" "v")
2448 [(match_operand:QI 2 "const_0_to_3_operand" "i")]))))]
2449 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2451 if (!BYTES_BIG_ENDIAN)
2452 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
2454 return "vspltw %0,%1,%2";
2456 [(set_attr "type" "vecperm")])
2458 (define_insn "altivec_vspltis<VI_char>"
2459 [(set (match_operand:VI 0 "register_operand" "=v")
2461 (match_operand:QI 1 "s5bit_cint_operand" "i")))]
2463 "vspltis<VI_char> %0,%1"
2464 [(set_attr "type" "vecperm")])
2466 (define_insn "*altivec_vrfiz"
2467 [(set (match_operand:V4SF 0 "register_operand" "=v")
2468 (fix:V4SF (match_operand:V4SF 1 "register_operand" "v")))]
2469 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2471 [(set_attr "type" "vecfloat")])
2473 (define_expand "altivec_vperm_<mode>"
2474 [(set (match_operand:VM 0 "register_operand")
2475 (unspec:VM [(match_operand:VM 1 "register_operand")
2476 (match_operand:VM 2 "register_operand")
2477 (match_operand:V16QI 3 "register_operand")]
2481 if (!BYTES_BIG_ENDIAN)
2483 altivec_expand_vec_perm_le (operands);
2488 ;; Slightly prefer vperm, since the target does not overlap the source
2489 (define_insn "altivec_vperm_<mode>_direct"
2490 [(set (match_operand:VM 0 "register_operand" "=?wa,v")
2491 (unspec:VM [(match_operand:VM 1 "register_operand" "wa,v")
2492 (match_operand:VM 2 "register_operand" "0,v")
2493 (match_operand:V16QI 3 "register_operand" "wa,v")]
2499 [(set_attr "type" "vecperm")
2500 (set_attr "isa" "p9v,*")])
2502 (define_insn "altivec_vperm_v8hiv16qi"
2503 [(set (match_operand:V16QI 0 "register_operand" "=?wa,v")
2504 (unspec:V16QI [(match_operand:V8HI 1 "register_operand" "wa,v")
2505 (match_operand:V8HI 2 "register_operand" "0,v")
2506 (match_operand:V16QI 3 "register_operand" "wa,v")]
2512 [(set_attr "type" "vecperm")
2513 (set_attr "isa" "p9v,*")])
2515 (define_expand "altivec_vperm_<mode>_uns"
2516 [(set (match_operand:VM 0 "register_operand")
2517 (unspec:VM [(match_operand:VM 1 "register_operand")
2518 (match_operand:VM 2 "register_operand")
2519 (match_operand:V16QI 3 "register_operand")]
2523 if (!BYTES_BIG_ENDIAN)
2525 altivec_expand_vec_perm_le (operands);
2530 (define_insn "*altivec_vperm_<mode>_uns_internal"
2531 [(set (match_operand:VM 0 "register_operand" "=?wa,v")
2532 (unspec:VM [(match_operand:VM 1 "register_operand" "wa,v")
2533 (match_operand:VM 2 "register_operand" "0,v")
2534 (match_operand:V16QI 3 "register_operand" "wa,v")]
2540 [(set_attr "type" "vecperm")
2541 (set_attr "isa" "p9v,*")])
2543 (define_expand "vec_permv16qi"
2544 [(set (match_operand:V16QI 0 "register_operand")
2545 (unspec:V16QI [(match_operand:V16QI 1 "register_operand")
2546 (match_operand:V16QI 2 "register_operand")
2547 (match_operand:V16QI 3 "register_operand")]
2551 if (!BYTES_BIG_ENDIAN) {
2552 altivec_expand_vec_perm_le (operands);
2557 (define_insn "*altivec_vpermr_<mode>_internal"
2558 [(set (match_operand:VM 0 "register_operand" "=?wa,v")
2559 (unspec:VM [(match_operand:VM 1 "register_operand" "wa,v")
2560 (match_operand:VM 2 "register_operand" "0,v")
2561 (match_operand:V16QI 3 "register_operand" "wa,v")]
2567 [(set_attr "type" "vecperm")
2568 (set_attr "isa" "p9v,*")])
2570 (define_insn "altivec_vrfip" ; ceil
2571 [(set (match_operand:V4SF 0 "register_operand" "=v")
2572 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2576 [(set_attr "type" "vecfloat")])
2578 (define_insn "altivec_vrfin"
2579 [(set (match_operand:V4SF 0 "register_operand" "=v")
2580 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2584 [(set_attr "type" "vecfloat")])
2586 (define_insn "*altivec_vrfim" ; floor
2587 [(set (match_operand:V4SF 0 "register_operand" "=v")
2588 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2592 [(set_attr "type" "vecfloat")])
2594 (define_insn "altivec_vcfux"
2595 [(set (match_operand:V4SF 0 "register_operand" "=v")
2596 (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
2597 (match_operand:QI 2 "immediate_operand" "i")]
2601 [(set_attr "type" "vecfloat")])
2603 (define_insn "altivec_vcfsx"
2604 [(set (match_operand:V4SF 0 "register_operand" "=v")
2605 (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
2606 (match_operand:QI 2 "immediate_operand" "i")]
2610 [(set_attr "type" "vecfloat")])
2612 (define_insn "altivec_vctuxs"
2613 [(set (match_operand:V4SI 0 "register_operand" "=v")
2614 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2615 (match_operand:QI 2 "immediate_operand" "i")]
2617 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2620 [(set_attr "type" "vecfloat")])
2622 (define_insn "altivec_vctsxs"
2623 [(set (match_operand:V4SI 0 "register_operand" "=v")
2624 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2625 (match_operand:QI 2 "immediate_operand" "i")]
2627 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2630 [(set_attr "type" "vecfloat")])
2632 (define_insn "altivec_vlogefp"
2633 [(set (match_operand:V4SF 0 "register_operand" "=v")
2634 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2638 [(set_attr "type" "vecfloat")])
2640 (define_insn "altivec_vexptefp"
2641 [(set (match_operand:V4SF 0 "register_operand" "=v")
2642 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2646 [(set_attr "type" "vecfloat")])
2648 (define_insn "*altivec_vrsqrtefp"
2649 [(set (match_operand:V4SF 0 "register_operand" "=v")
2650 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2652 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2654 [(set_attr "type" "vecfloat")])
2656 (define_insn "altivec_vrefp"
2657 [(set (match_operand:V4SF 0 "register_operand" "=v")
2658 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2660 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2662 [(set_attr "type" "vecfloat")])
2664 (define_expand "altivec_copysign_v4sf3"
2665 [(use (match_operand:V4SF 0 "register_operand"))
2666 (use (match_operand:V4SF 1 "register_operand"))
2667 (use (match_operand:V4SF 2 "register_operand"))]
2668 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2670 rtx mask = gen_reg_rtx (V4SImode);
2671 rtx mask_val = gen_int_mode (HOST_WIDE_INT_1U << 31, SImode);
2672 rtvec v = gen_rtvec (4, mask_val, mask_val, mask_val, mask_val);
2674 emit_insn (gen_vec_initv4sisi (mask, gen_rtx_PARALLEL (V4SImode, v)));
2675 emit_insn (gen_vector_select_v4sf (operands[0], operands[1], operands[2],
2676 gen_lowpart (V4SFmode, mask)));
2680 (define_insn "altivec_vsldoi_<mode>"
2681 [(set (match_operand:VM 0 "register_operand" "=v")
2682 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
2683 (match_operand:VM 2 "register_operand" "v")
2684 (match_operand:QI 3 "immediate_operand" "i")]
2687 "vsldoi %0,%1,%2,%3"
2688 [(set_attr "type" "vecperm")])
2690 (define_insn "altivec_vupkhs<VU_char>"
2691 [(set (match_operand:VP 0 "register_operand" "=v")
2692 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2693 UNSPEC_VUNPACK_HI_SIGN))]
2696 if (BYTES_BIG_ENDIAN)
2697 return "vupkhs<VU_char> %0,%1";
2699 return "vupkls<VU_char> %0,%1";
2701 [(set_attr "type" "vecperm")])
2703 (define_insn "altivec_vupkhs<VU_char>_direct"
2704 [(set (match_operand:VP 0 "register_operand" "=v")
2705 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2706 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2708 "vupkhs<VU_char> %0,%1"
2709 [(set_attr "type" "vecperm")])
2711 (define_insn "altivec_vupkls<VU_char>"
2712 [(set (match_operand:VP 0 "register_operand" "=v")
2713 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2714 UNSPEC_VUNPACK_LO_SIGN))]
2717 if (BYTES_BIG_ENDIAN)
2718 return "vupkls<VU_char> %0,%1";
2720 return "vupkhs<VU_char> %0,%1";
2722 [(set_attr "type" "vecperm")])
2724 (define_insn "*altivec_vupkls<VU_char>_direct"
2725 [(set (match_operand:VP 0 "register_operand" "=v")
2726 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2727 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2729 "vupkls<VU_char> %0,%1"
2730 [(set_attr "type" "vecperm")])
2732 (define_insn "altivec_vupkhpx"
2733 [(set (match_operand:V4SI 0 "register_operand" "=v")
2734 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2738 if (BYTES_BIG_ENDIAN)
2739 return "vupkhpx %0,%1";
2741 return "vupklpx %0,%1";
2743 [(set_attr "type" "vecperm")])
2745 (define_insn "altivec_vupklpx"
2746 [(set (match_operand:V4SI 0 "register_operand" "=v")
2747 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2751 if (BYTES_BIG_ENDIAN)
2752 return "vupklpx %0,%1";
2754 return "vupkhpx %0,%1";
2756 [(set_attr "type" "vecperm")])
2758 /* The cbranch_optab doesn't allow FAIL, so old cpus which are
2759 inefficient on unaligned vsx are disabled as the cost is high
2760 for unaligned load/store. */
2761 (define_expand "cbranchv16qi4"
2762 [(use (match_operator 0 "equality_operator"
2763 [(match_operand:V16QI 1 "reg_or_mem_operand")
2764 (match_operand:V16QI 2 "reg_or_mem_operand")]))
2765 (use (match_operand 3))]
2766 "VECTOR_MEM_VSX_P (V16QImode)
2767 && TARGET_EFFICIENT_UNALIGNED_VSX"
2769 /* Use direct move for P8 LE to skip doubleword swap, as the byte
2770 order doesn't matter for equality compare. If any operands are
2771 altivec indexed or indirect operands, the load can be implemented
2772 directly by altivec aligned load instruction and swap is no
2774 if (!TARGET_P9_VECTOR
2775 && !BYTES_BIG_ENDIAN
2776 && MEM_P (operands[1])
2777 && !altivec_indexed_or_indirect_operand (operands[1], V16QImode)
2778 && MEM_P (operands[2])
2779 && !altivec_indexed_or_indirect_operand (operands[2], V16QImode))
2781 rtx reg_op1 = gen_reg_rtx (V16QImode);
2782 rtx reg_op2 = gen_reg_rtx (V16QImode);
2783 rs6000_emit_le_vsx_permute (reg_op1, operands[1], V16QImode);
2784 rs6000_emit_le_vsx_permute (reg_op2, operands[2], V16QImode);
2785 operands[1] = reg_op1;
2786 operands[2] = reg_op2;
2790 operands[1] = force_reg (V16QImode, operands[1]);
2791 operands[2] = force_reg (V16QImode, operands[2]);
2794 rtx_code code = GET_CODE (operands[0]);
2795 operands[0] = gen_rtx_fmt_ee (code, V16QImode, operands[1], operands[2]);
2796 rs6000_emit_cbranch (V16QImode, operands);
2800 ;; Compare vectors producing a vector result and a predicate, setting CR6 to
2801 ;; indicate a combined status
2802 (define_insn "altivec_vcmpequ<VI_char>_p"
2803 [(set (reg:CC CR6_REGNO)
2804 (unspec:CC [(eq:CC (match_operand:VI2 1 "register_operand" "v")
2805 (match_operand:VI2 2 "register_operand" "v"))]
2807 (set (match_operand:VI2 0 "register_operand" "=v")
2808 (eq:VI2 (match_dup 1)
2811 "vcmpequ<VI_char>. %0,%1,%2"
2812 [(set_attr "type" "veccmpfx")])
2814 (define_insn "altivec_vcmpequt_p"
2815 [(set (reg:CC CR6_REGNO)
2816 (unspec:CC [(eq:CC (match_operand:V1TI 1 "altivec_register_operand" "v")
2817 (match_operand:V1TI 2 "altivec_register_operand" "v"))]
2819 (set (match_operand:V1TI 0 "altivec_register_operand" "=v")
2820 (eq:V1TI (match_dup 1)
2823 "vcmpequq. %0,%1,%2"
2824 [(set_attr "type" "veccmpfx")])
2826 ;; Expand for builtin vcmpne{b,h,w}
2827 (define_expand "altivec_vcmpne_<mode>"
2828 [(set (match_operand:VSX_EXTRACT_I 3 "altivec_register_operand" "=v")
2829 (eq:VSX_EXTRACT_I (match_operand:VSX_EXTRACT_I 1 "altivec_register_operand" "v")
2830 (match_operand:VSX_EXTRACT_I 2 "altivec_register_operand" "v")))
2831 (set (match_operand:VSX_EXTRACT_I 0 "altivec_register_operand" "=v")
2832 (not:VSX_EXTRACT_I (match_dup 3)))]
2835 operands[3] = gen_reg_rtx (GET_MODE (operands[0]));
2838 (define_insn "*altivec_vcmpgts<VI_char>_p"
2839 [(set (reg:CC CR6_REGNO)
2840 (unspec:CC [(gt:CC (match_operand:VI2 1 "register_operand" "v")
2841 (match_operand:VI2 2 "register_operand" "v"))]
2843 (set (match_operand:VI2 0 "register_operand" "=v")
2844 (gt:VI2 (match_dup 1)
2847 "vcmpgts<VI_char>. %0,%1,%2"
2848 [(set_attr "type" "veccmpfx")])
2850 (define_insn "*altivec_vcmpgtst_p"
2851 [(set (reg:CC CR6_REGNO)
2852 (unspec:CC [(gt:CC (match_operand:V1TI 1 "register_operand" "v")
2853 (match_operand:V1TI 2 "register_operand" "v"))]
2855 (set (match_operand:V1TI 0 "register_operand" "=v")
2856 (gt:V1TI (match_dup 1)
2859 "vcmpgtsq. %0,%1,%2"
2860 [(set_attr "type" "veccmpfx")])
2862 (define_insn "*altivec_vcmpgtu<VI_char>_p"
2863 [(set (reg:CC CR6_REGNO)
2864 (unspec:CC [(gtu:CC (match_operand:VI2 1 "register_operand" "v")
2865 (match_operand:VI2 2 "register_operand" "v"))]
2867 (set (match_operand:VI2 0 "register_operand" "=v")
2868 (gtu:VI2 (match_dup 1)
2871 "vcmpgtu<VI_char>. %0,%1,%2"
2872 [(set_attr "type" "veccmpfx")])
2874 (define_insn "*altivec_vcmpgtut_p"
2875 [(set (reg:CC CR6_REGNO)
2876 (unspec:CC [(gtu:CC (match_operand:V1TI 1 "register_operand" "v")
2877 (match_operand:V1TI 2 "register_operand" "v"))]
2879 (set (match_operand:V1TI 0 "register_operand" "=v")
2880 (gtu:V1TI (match_dup 1)
2883 "vcmpgtuq. %0,%1,%2"
2884 [(set_attr "type" "veccmpfx")])
2886 (define_insn "*altivec_vcmpeqfp_p"
2887 [(set (reg:CC CR6_REGNO)
2888 (unspec:CC [(eq:CC (match_operand:V4SF 1 "register_operand" "v")
2889 (match_operand:V4SF 2 "register_operand" "v"))]
2891 (set (match_operand:V4SF 0 "register_operand" "=v")
2892 (eq:V4SF (match_dup 1)
2894 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2895 "vcmpeqfp. %0,%1,%2"
2896 [(set_attr "type" "veccmp")])
2898 (define_insn "*altivec_vcmpgtfp_p"
2899 [(set (reg:CC CR6_REGNO)
2900 (unspec:CC [(gt:CC (match_operand:V4SF 1 "register_operand" "v")
2901 (match_operand:V4SF 2 "register_operand" "v"))]
2903 (set (match_operand:V4SF 0 "register_operand" "=v")
2904 (gt:V4SF (match_dup 1)
2906 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2907 "vcmpgtfp. %0,%1,%2"
2908 [(set_attr "type" "veccmp")])
2910 (define_insn "*altivec_vcmpgefp_p"
2911 [(set (reg:CC CR6_REGNO)
2912 (unspec:CC [(ge:CC (match_operand:V4SF 1 "register_operand" "v")
2913 (match_operand:V4SF 2 "register_operand" "v"))]
2915 (set (match_operand:V4SF 0 "register_operand" "=v")
2916 (ge:V4SF (match_dup 1)
2918 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2919 "vcmpgefp. %0,%1,%2"
2920 [(set_attr "type" "veccmp")])
2922 (define_insn "altivec_vcmpbfp_p"
2923 [(set (reg:CC CR6_REGNO)
2924 (unspec:CC [(match_operand:V4SF 1 "register_operand" "v")
2925 (match_operand:V4SF 2 "register_operand" "v")]
2927 (set (match_operand:V4SF 0 "register_operand" "=v")
2928 (unspec:V4SF [(match_dup 1)
2931 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode)"
2933 [(set_attr "type" "veccmp")])
2935 (define_insn "altivec_mtvscr"
2936 [(set (reg:SI VSCR_REGNO)
2938 [(match_operand:V4SI 0 "register_operand" "v")] UNSPECV_MTVSCR))]
2941 [(set_attr "type" "vecsimple")])
2943 (define_insn "altivec_mfvscr"
2944 [(set (match_operand:V8HI 0 "register_operand" "=v")
2945 (unspec_volatile:V8HI [(reg:SI VSCR_REGNO)] UNSPECV_MFVSCR))]
2948 [(set_attr "type" "vecsimple")])
2950 (define_insn "altivec_dssall"
2951 [(unspec_volatile [(const_int 0)] UNSPECV_DSSALL)]
2954 [(set_attr "type" "vecsimple")])
2956 (define_insn "altivec_dss"
2957 [(unspec_volatile [(match_operand:QI 0 "immediate_operand" "i")]
2961 [(set_attr "type" "vecsimple")])
2963 (define_insn "altivec_dst"
2964 [(unspec [(match_operand 0 "register_operand" "b")
2965 (match_operand:SI 1 "register_operand" "r")
2966 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DST)]
2967 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2969 [(set_attr "type" "vecsimple")])
2971 (define_insn "altivec_dstt"
2972 [(unspec [(match_operand 0 "register_operand" "b")
2973 (match_operand:SI 1 "register_operand" "r")
2974 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTT)]
2975 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2977 [(set_attr "type" "vecsimple")])
2979 (define_insn "altivec_dstst"
2980 [(unspec [(match_operand 0 "register_operand" "b")
2981 (match_operand:SI 1 "register_operand" "r")
2982 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTST)]
2983 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2985 [(set_attr "type" "vecsimple")])
2987 (define_insn "altivec_dststt"
2988 [(unspec [(match_operand 0 "register_operand" "b")
2989 (match_operand:SI 1 "register_operand" "r")
2990 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTSTT)]
2991 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2993 [(set_attr "type" "vecsimple")])
2995 (define_expand "altivec_lvsl"
2996 [(use (match_operand:V16QI 0 "register_operand"))
2997 (use (match_operand:V16QI 1 "memory_operand"))]
3000 if (BYTES_BIG_ENDIAN)
3001 emit_insn (gen_altivec_lvsl_direct (operands[0], operands[1]));
3004 rtx mask, constv, vperm;
3005 mask = gen_reg_rtx (V16QImode);
3006 emit_insn (gen_altivec_lvsl_direct (mask, operands[1]));
3007 constv = gen_const_vec_series (V16QImode, const0_rtx, const1_rtx);
3008 constv = force_reg (V16QImode, constv);
3009 vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
3011 emit_insn (gen_rtx_SET (operands[0], vperm));
3016 (define_insn "altivec_lvsl_reg_<mode>"
3017 [(set (match_operand:V16QI 0 "altivec_register_operand" "=v")
3019 [(match_operand:GPR 1 "gpc_reg_operand" "b")]
3023 [(set_attr "type" "vecload")])
3025 (define_insn "altivec_lvsl_direct"
3026 [(set (match_operand:V16QI 0 "register_operand" "=v")
3027 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
3031 [(set_attr "type" "vecload")])
3033 (define_expand "altivec_lvsr"
3034 [(use (match_operand:V16QI 0 "altivec_register_operand"))
3035 (use (match_operand:V16QI 1 "memory_operand"))]
3038 if (BYTES_BIG_ENDIAN)
3039 emit_insn (gen_altivec_lvsr_direct (operands[0], operands[1]));
3042 rtx mask, constv, vperm;
3043 mask = gen_reg_rtx (V16QImode);
3044 emit_insn (gen_altivec_lvsr_direct (mask, operands[1]));
3045 constv = gen_const_vec_series (V16QImode, const0_rtx, const1_rtx);
3046 constv = force_reg (V16QImode, constv);
3047 vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
3049 emit_insn (gen_rtx_SET (operands[0], vperm));
3054 (define_insn "altivec_lvsr_reg_<mode>"
3055 [(set (match_operand:V16QI 0 "altivec_register_operand" "=v")
3057 [(match_operand:GPR 1 "gpc_reg_operand" "b")]
3061 [(set_attr "type" "vecload")])
3063 (define_insn "altivec_lvsr_direct"
3064 [(set (match_operand:V16QI 0 "register_operand" "=v")
3065 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
3069 [(set_attr "type" "vecload")])
3071 (define_expand "build_vector_mask_for_load"
3072 [(set (match_operand:V16QI 0 "register_operand")
3073 (unspec:V16QI [(match_operand 1 "memory_operand")] UNSPEC_LVSR))]
3079 gcc_assert (MEM_P (operands[1]));
3081 addr = XEXP (operands[1], 0);
3082 temp = gen_reg_rtx (GET_MODE (addr));
3083 emit_insn (gen_rtx_SET (temp, gen_rtx_NEG (GET_MODE (addr), addr)));
3084 emit_insn (gen_altivec_lvsr (operands[0],
3085 replace_equiv_address (operands[1], temp)));
3089 ;; Parallel some of the LVE* and STV*'s with unspecs because some have
3090 ;; identical rtl but different instructions-- and gcc gets confused.
3092 (define_insn "altivec_lve<VI_char>x"
3094 [(set (match_operand:VI 0 "register_operand" "=v")
3095 (match_operand:VI 1 "memory_operand" "Z"))
3096 (unspec [(const_int 0)] UNSPEC_LVE)])]
3098 "lve<VI_char>x %0,%y1"
3099 [(set_attr "type" "vecload")])
3101 (define_insn "*altivec_lvesfx"
3103 [(set (match_operand:V4SF 0 "register_operand" "=v")
3104 (match_operand:V4SF 1 "memory_operand" "Z"))
3105 (unspec [(const_int 0)] UNSPEC_LVE)])]
3108 [(set_attr "type" "vecload")])
3110 (define_insn "altivec_lvxl_<mode>"
3112 [(set (match_operand:VM2 0 "register_operand" "=v")
3113 (match_operand:VM2 1 "memory_operand" "Z"))
3114 (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
3117 [(set_attr "type" "vecload")])
3119 ; This version of lvx is used only in cases where we need to force an lvx
3120 ; over any other load, and we don't care about losing CSE opportunities.
3121 ; Its primary use is for prologue register saves.
3122 (define_insn "altivec_lvx_<mode>_internal"
3124 [(set (match_operand:VM2 0 "register_operand" "=v")
3125 (match_operand:VM2 1 "memory_operand" "Z"))
3126 (unspec [(const_int 0)] UNSPEC_LVX)])]
3129 [(set_attr "type" "vecload")])
3131 ; The following patterns embody what lvx should usually look like.
3132 (define_expand "altivec_lvx_<VM2:mode>"
3133 [(set (match_operand:VM2 0 "register_operand")
3134 (match_operand:VM2 1 "altivec_indexed_or_indirect_operand"))]
3137 rtx addr = XEXP (operand1, 0);
3138 if (rs6000_sum_of_two_registers_p (addr))
3140 rtx op1 = XEXP (addr, 0);
3141 rtx op2 = XEXP (addr, 1);
3143 emit_insn (gen_altivec_lvx_<VM2:mode>_2op_di (operand0, op1, op2));
3145 emit_insn (gen_altivec_lvx_<VM2:mode>_2op_si (operand0, op1, op2));
3150 emit_insn (gen_altivec_lvx_<VM2:mode>_1op_di (operand0, addr));
3152 emit_insn (gen_altivec_lvx_<VM2:mode>_1op_si (operand0, addr));
3157 ; The next two patterns embody what lvx should usually look like.
3158 (define_insn "altivec_lvx_<VM2:mode>_2op_<P:mptrsize>"
3159 [(set (match_operand:VM2 0 "register_operand" "=v")
3160 (mem:VM2 (and:P (plus:P (match_operand:P 1 "register_operand" "b")
3161 (match_operand:P 2 "register_operand" "r"))
3165 [(set_attr "type" "vecload")])
3167 (define_insn "altivec_lvx_<VM2:mode>_1op_<P:mptrsize>"
3168 [(set (match_operand:VM2 0 "register_operand" "=v")
3169 (mem:VM2 (and:P (match_operand:P 1 "register_operand" "r")
3173 [(set_attr "type" "vecload")])
3175 ; This version of stvx is used only in cases where we need to force an stvx
3176 ; over any other store, and we don't care about losing CSE opportunities.
3177 ; Its primary use is for epilogue register restores.
3178 (define_insn "altivec_stvx_<mode>_internal"
3180 [(set (match_operand:VM2 0 "memory_operand" "=Z")
3181 (match_operand:VM2 1 "register_operand" "v"))
3182 (unspec [(const_int 0)] UNSPEC_STVX)])]
3185 [(set_attr "type" "vecstore")])
3187 ; The following patterns embody what stvx should usually look like.
3188 (define_expand "altivec_stvx_<VM2:mode>"
3189 [(set (match_operand:VM2 1 "altivec_indexed_or_indirect_operand")
3190 (match_operand:VM2 0 "register_operand"))]
3193 rtx addr = XEXP (operand1, 0);
3194 if (rs6000_sum_of_two_registers_p (addr))
3196 rtx op1 = XEXP (addr, 0);
3197 rtx op2 = XEXP (addr, 1);
3199 emit_insn (gen_altivec_stvx_<VM2:mode>_2op_di (operand0, op1, op2));
3201 emit_insn (gen_altivec_stvx_<VM2:mode>_2op_si (operand0, op1, op2));
3206 emit_insn (gen_altivec_stvx_<VM2:mode>_1op_di (operand0, addr));
3208 emit_insn (gen_altivec_stvx_<VM2:mode>_1op_si (operand0, addr));
3213 ; The next two patterns embody what stvx should usually look like.
3214 (define_insn "altivec_stvx_<VM2:mode>_2op_<P:mptrsize>"
3215 [(set (mem:VM2 (and:P (plus:P (match_operand:P 1 "register_operand" "b")
3216 (match_operand:P 2 "register_operand" "r"))
3218 (match_operand:VM2 0 "register_operand" "v"))]
3221 [(set_attr "type" "vecstore")])
3223 (define_insn "altivec_stvx_<VM2:mode>_1op_<P:mptrsize>"
3224 [(set (mem:VM2 (and:P (match_operand:P 1 "register_operand" "r")
3226 (match_operand:VM2 0 "register_operand" "v"))]
3229 [(set_attr "type" "vecstore")])
3231 (define_insn "altivec_stvxl_<mode>"
3233 [(set (match_operand:VM2 0 "memory_operand" "=Z")
3234 (match_operand:VM2 1 "register_operand" "v"))
3235 (unspec [(const_int 0)] UNSPEC_STVXL)])]
3238 [(set_attr "type" "vecstore")])
3240 (define_insn "altivec_stve<VI_char>x"
3241 [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
3242 (unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
3244 "stve<VI_char>x %1,%y0"
3245 [(set_attr "type" "vecstore")])
3247 (define_insn "*altivec_stvesfx"
3248 [(set (match_operand:SF 0 "memory_operand" "=Z")
3249 (unspec:SF [(match_operand:V4SF 1 "register_operand" "v")] UNSPEC_STVE))]
3252 [(set_attr "type" "vecstore")])
3255 ;; signed int/float to double convert words 0 and 2
3256 (define_expand "doublee<mode>2"
3257 [(set (match_operand:V2DF 0 "register_operand" "=v")
3258 (match_operand:VSX_W 1 "register_operand" "v"))]
3261 machine_mode op_mode = GET_MODE (operands[1]);
3263 if (BYTES_BIG_ENDIAN)
3265 /* Big endian word numbering for words in operand is 0 1 2 3.
3266 Input words 0 and 2 are where they need to be. */
3267 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], operands[1]));
3271 /* Little endian word numbering for operand is 3 2 1 0.
3272 take (operand[1] operand[1]) and shift left one word
3273 3 2 1 0 3 2 1 0 => 2 1 0 3
3274 Input words 2 and 0 are now where they need to be for the
3277 rtx rtx_val = GEN_INT (1);
3279 rtx_tmp = gen_reg_rtx (op_mode);
3280 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3281 operands[1], rtx_val));
3282 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3286 [(set_attr "type" "veccomplex")])
3288 ;; Generate unsdoublee
3289 ;; unsigned int to double convert words 0 and 2
3290 (define_expand "unsdoubleev4si2"
3291 [(set (match_operand:V2DF 0 "register_operand" "=v")
3292 (match_operand:V4SI 1 "register_operand" "v"))]
3295 if (BYTES_BIG_ENDIAN)
3297 /* Big endian word numbering for words in operand is 0 1 2 3.
3298 Input words 0 and 2 are where they need to be. */
3299 emit_insn (gen_vsx_xvcvuxwdp (operands[0], operands[1]));
3303 /* Little endian word numbering for operand is 3 2 1 0.
3304 take (operand[1] operand[1]) and shift left one word
3305 3 2 1 0 3 2 1 0 => 2 1 0 3
3306 Input words 2 and 0 are now where they need to be for the
3309 rtx rtx_val = GEN_INT (1);
3311 rtx_tmp = gen_reg_rtx (V4SImode);
3312 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3313 operands[1], rtx_val));
3314 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3318 [(set_attr "type" "veccomplex")])
3320 ;; Generate doubleov
3321 ;; signed int/float to double convert words 1 and 3
3322 (define_expand "doubleo<mode>2"
3323 [(set (match_operand:V2DF 0 "register_operand" "=v")
3324 (match_operand:VSX_W 1 "register_operand" "v"))]
3327 machine_mode op_mode = GET_MODE (operands[1]);
3329 if (BYTES_BIG_ENDIAN)
3331 /* Big endian word numbering for words in operand is 0 1 2 3.
3332 take (operand[1] operand[1]) and shift left one word
3333 0 1 2 3 0 1 2 3 => 1 2 3 0
3334 Input words 1 and 3 are now where they need to be for the
3337 rtx rtx_val = GEN_INT (1);
3339 rtx_tmp = gen_reg_rtx (op_mode);
3340 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3341 operands[1], rtx_val));
3342 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3346 /* Little endian word numbering for operand is 3 2 1 0.
3347 Input words 3 and 1 are where they need to be. */
3348 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], operands[1]));
3352 [(set_attr "type" "veccomplex")])
3354 ;; Generate unsdoubleov
3355 ;; unsigned int to double convert words 1 and 3
3356 (define_expand "unsdoubleov4si2"
3357 [(set (match_operand:V2DF 0 "register_operand" "=v")
3358 (match_operand:V4SI 1 "register_operand" "v"))]
3361 if (BYTES_BIG_ENDIAN)
3363 /* Big endian word numbering for words in operand is 0 1 2 3.
3364 take (operand[1] operand[1]) and shift left one word
3365 0 1 2 3 0 1 2 3 => 1 2 3 0
3366 Input words 1 and 3 are now where they need to be for the
3369 rtx rtx_val = GEN_INT (1);
3371 rtx_tmp = gen_reg_rtx (V4SImode);
3372 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3373 operands[1], rtx_val));
3374 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3378 /* Want to convert the words 1 and 3.
3379 Little endian word numbering for operand is 3 2 1 0.
3380 Input words 3 and 1 are where they need to be. */
3381 emit_insn (gen_vsx_xvcvuxwdp (operands[0], operands[1]));
3385 [(set_attr "type" "veccomplex")])
3387 ;; Generate doublehv
3388 ;; signed int/float to double convert words 0 and 1
3389 (define_expand "doubleh<mode>2"
3390 [(set (match_operand:V2DF 0 "register_operand" "=v")
3391 (match_operand:VSX_W 1 "register_operand" "v"))]
3397 machine_mode op_mode = GET_MODE (operands[1]);
3398 rtx_tmp = gen_reg_rtx (op_mode);
3400 if (BYTES_BIG_ENDIAN)
3402 /* Big endian word numbering for words in operand is 0 1 2 3.
3403 Shift operand left one word, rtx_tmp word order is now 1 2 3 0.
3404 take (rts_tmp operand[1]) and shift left three words
3405 1 2 3 0 0 1 2 3 => 0 0 1 2
3406 Input words 0 and 1 are now where they need to be for the
3408 rtx_val = GEN_INT (1);
3409 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3410 operands[1], rtx_val));
3412 rtx_val = GEN_INT (3);
3413 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, rtx_tmp,
3414 operands[1], rtx_val));
3415 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3419 /* Little endian word numbering for operand is 3 2 1 0.
3420 Shift operand left three words, rtx_tmp word order is now 0 3 2 1.
3421 take (operand[1] rts_tmp) and shift left two words
3422 3 2 1 0 0 3 2 1 => 1 0 0 3
3423 Input words 0 and 1 are now where they need to be for the
3425 rtx_val = GEN_INT (3);
3426 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3427 operands[1], rtx_val));
3429 rtx_val = GEN_INT (2);
3430 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3432 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3436 [(set_attr "type" "veccomplex")])
3438 ;; Generate unsdoublehv
3439 ;; unsigned int to double convert words 0 and 1
3440 (define_expand "unsdoublehv4si2"
3441 [(set (match_operand:V2DF 0 "register_operand" "=v")
3442 (match_operand:V4SI 1 "register_operand" "v"))]
3445 rtx rtx_tmp = gen_reg_rtx (V4SImode);
3446 rtx rtx_val = GEN_INT (12);
3448 if (BYTES_BIG_ENDIAN)
3450 /* Big endian word numbering for words in operand is 0 1 2 3.
3451 Shift operand left one word, rtx_tmp word order is now 1 2 3 0.
3452 take (rts_tmp operand[1]) and shift left three words
3453 1 2 3 0 0 1 2 3 => 0 0 1 2
3454 Input words 0 and 1 are now where they need to be for the
3456 rtx_val = GEN_INT (1);
3457 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3458 operands[1], rtx_val));
3460 rtx_val = GEN_INT (3);
3461 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, rtx_tmp,
3462 operands[1], rtx_val));
3463 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3467 /* Little endian word numbering for operand is 3 2 1 0.
3468 Shift operand left three words, rtx_tmp word order is now 0 3 2 1.
3469 take (operand[1] rts_tmp) and shift left two words
3470 3 2 1 0 0 3 2 1 => 1 0 0 3
3471 Input words 1 and 0 are now where they need to be for the
3473 rtx_val = GEN_INT (3);
3475 rtx_tmp = gen_reg_rtx (V4SImode);
3476 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3477 operands[1], rtx_val));
3479 rtx_val = GEN_INT (2);
3480 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3482 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3486 [(set_attr "type" "veccomplex")])
3488 ;; Generate doublelv
3489 ;; signed int/float to double convert words 2 and 3
3490 (define_expand "doublel<mode>2"
3491 [(set (match_operand:V2DF 0 "register_operand" "=v")
3492 (match_operand:VSX_W 1 "register_operand" "v"))]
3496 rtx rtx_val = GEN_INT (3);
3498 machine_mode op_mode = GET_MODE (operands[1]);
3499 rtx_tmp = gen_reg_rtx (op_mode);
3501 if (BYTES_BIG_ENDIAN)
3503 /* Big endian word numbering for operand is 0 1 2 3.
3504 Shift operand left three words, rtx_tmp word order is now 3 0 1 2.
3505 take (operand[1] rtx_tmp) and shift left two words
3506 0 1 2 3 3 0 1 2 => 2 3 3 0
3507 now use convert instruction to convert word 2 and 3 in the
3509 rtx_val = GEN_INT (3);
3510 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3511 operands[1], rtx_val));
3513 rtx_val = GEN_INT (2);
3514 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3516 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3520 /* Little endian word numbering for operand is 3 2 1 0.
3521 Shift operand left one word, rtx_tmp word order is now 2 1 0 3.
3522 take (rtx_tmp operand[1]) and shift left three words
3523 2 1 0 3 3 2 1 0 => 3 3 2 1
3524 now use convert instruction to convert word 3 and 2 in the
3526 rtx_val = GEN_INT (1);
3527 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3528 operands[1], rtx_val));
3530 rtx_val = GEN_INT (3);
3531 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, rtx_tmp,
3532 operands[1], rtx_val));
3533 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3537 [(set_attr "type" "veccomplex")])
3539 ;; Generate unsdoublelv
3540 ;; unsigned int to double convert convert 2 and 3
3541 (define_expand "unsdoublelv4si2"
3542 [(set (match_operand:V2DF 0 "register_operand" "=v")
3543 (match_operand:V4SI 1 "register_operand" "v"))]
3546 rtx rtx_tmp = gen_reg_rtx (V4SImode);
3547 rtx rtx_val = GEN_INT (12);
3549 if (BYTES_BIG_ENDIAN)
3551 /* Big endian word numbering for operand is 0 1 2 3.
3552 Shift operand left three words, rtx_tmp word order is now 3 0 1 2.
3553 take (operand[1] rtx_tmp) and shift left two words
3554 0 1 2 3 3 0 1 2 => 2 3 3 0
3555 now use convert instruction to convert word 2 and 3 in the
3557 rtx_val = GEN_INT (3);
3558 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3559 operands[1], rtx_val));
3561 rtx_val = GEN_INT (2);
3562 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3564 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3568 /* Little endian word numbering for operand is 3 2 1 0.
3569 Shift operand left one word, rtx_tmp word order is now 2 1 0 3.
3570 take (rtx_tmp operand[1]) and shift left three words
3571 2 1 0 3 3 2 1 0 => 3 3 2 1
3572 now use convert instruction to convert word 3 and 2 in the
3574 rtx_val = GEN_INT (1);
3575 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp,
3576 operands[1], operands[1], rtx_val));
3578 rtx_val = GEN_INT (3);
3579 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, rtx_tmp,
3580 operands[1], rtx_val));
3581 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3585 [(set_attr "type" "veccomplex")])
3587 ;; Generate two vector F32 converted to packed vector I16 vector
3588 (define_expand "convert_4f32_8i16"
3589 [(set (match_operand:V8HI 0 "register_operand" "=v")
3590 (unspec:V8HI [(match_operand:V4SF 1 "register_operand" "v")
3591 (match_operand:V4SF 2 "register_operand" "v")]
3592 UNSPEC_CONVERT_4F32_8I16))]
3595 rtx rtx_tmp_hi = gen_reg_rtx (V4SImode);
3596 rtx rtx_tmp_lo = gen_reg_rtx (V4SImode);
3598 emit_insn (gen_altivec_vctuxs (rtx_tmp_hi, operands[1], const0_rtx));
3599 emit_insn (gen_altivec_vctuxs (rtx_tmp_lo, operands[2], const0_rtx));
3600 emit_insn (gen_altivec_vpkswss (operands[0], rtx_tmp_hi, rtx_tmp_lo));
3605 ;; Convert two vector F32 to packed vector F16.
3606 ;; This builtin packs 32-bit floating-point values into a packed
3607 ;; 16-bit floating point values (stored in 16bit integer type).
3608 ;; (vector unsigned short r = vec_pack_to_short_fp32 (a, b);
3609 ;; The expected codegen for this builtin is
3612 ;; if (little endian)
3617 (define_expand "convert_4f32_8f16"
3618 [(set (match_operand:V8HI 0 "register_operand" "=v")
3619 (unspec:V8HI [(match_operand:V4SF 1 "register_operand" "v")
3620 (match_operand:V4SF 2 "register_operand" "v")]
3621 UNSPEC_CONVERT_4F32_8F16))]
3624 rtx rtx_tmp_hi = gen_reg_rtx (V4SImode);
3625 rtx rtx_tmp_lo = gen_reg_rtx (V4SImode);
3627 emit_insn (gen_vsx_xvcvsphp (rtx_tmp_hi, operands[1]));
3628 emit_insn (gen_vsx_xvcvsphp (rtx_tmp_lo, operands[2]));
3629 if (!BYTES_BIG_ENDIAN)
3630 emit_insn (gen_altivec_vpkuwum (operands[0], rtx_tmp_hi, rtx_tmp_lo));
3632 emit_insn (gen_altivec_vpkuwum (operands[0], rtx_tmp_lo, rtx_tmp_hi));
3638 ;; xxlxor/vxor SCRATCH0,SCRATCH0,SCRATCH0
3639 ;; vsubu?m SCRATCH2,SCRATCH1,%1
3640 ;; vmaxs? %0,%1,SCRATCH2"
3641 (define_expand "abs<mode>2"
3642 [(set (match_dup 2) (match_dup 3))
3644 (minus:VI2 (match_dup 2)
3645 (match_operand:VI2 1 "register_operand" "v")))
3646 (set (match_operand:VI2 0 "register_operand" "=v")
3647 (smax:VI2 (match_dup 1) (match_dup 4)))]
3650 operands[2] = gen_reg_rtx (<MODE>mode);
3651 operands[3] = CONST0_RTX (<MODE>mode);
3652 operands[4] = gen_reg_rtx (<MODE>mode);
3656 ;; vspltisw SCRATCH1,0
3657 ;; vsubu?m SCRATCH2,SCRATCH1,%1
3658 ;; vmins? %0,%1,SCRATCH2"
3659 (define_expand "nabs<mode>2"
3660 [(set (match_dup 2) (match_dup 3))
3662 (minus:VI2 (match_dup 2)
3663 (match_operand:VI2 1 "register_operand" "v")))
3664 (set (match_operand:VI2 0 "register_operand" "=v")
3665 (smin:VI2 (match_dup 1) (match_dup 4)))]
3668 operands[2] = gen_reg_rtx (<MODE>mode);
3669 operands[3] = CONST0_RTX (<MODE>mode);
3670 operands[4] = gen_reg_rtx (<MODE>mode);
3674 ;; vspltisw SCRATCH1,-1
3675 ;; vslw SCRATCH2,SCRATCH1,SCRATCH1
3676 ;; vandc %0,%1,SCRATCH2
3677 (define_expand "altivec_absv4sf2"
3679 (vec_duplicate:V4SI (const_int -1)))
3681 (ashift:V4SI (match_dup 2) (match_dup 2)))
3682 (set (match_operand:V4SF 0 "register_operand" "=v")
3683 (and:V4SF (not:V4SF (subreg:V4SF (match_dup 3) 0))
3684 (match_operand:V4SF 1 "register_operand" "v")))]
3687 operands[2] = gen_reg_rtx (V4SImode);
3688 operands[3] = gen_reg_rtx (V4SImode);
3692 ;; vspltis? SCRATCH0,0
3693 ;; vsubs?s SCRATCH2,SCRATCH1,%1
3694 ;; vmaxs? %0,%1,SCRATCH2"
3695 (define_expand "altivec_abss_<mode>"
3696 [(set (match_dup 2) (vec_duplicate:VI (const_int 0)))
3697 (parallel [(set (match_dup 3)
3698 (ss_minus:VI (match_dup 2)
3699 (match_operand:VI 1 "register_operand" "v")))
3700 (set (reg:SI VSCR_REGNO)
3701 (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))])
3702 (set (match_operand:VI 0 "register_operand" "=v")
3703 (smax:VI (match_dup 1) (match_dup 3)))]
3706 operands[2] = gen_reg_rtx (GET_MODE (operands[0]));
3707 operands[3] = gen_reg_rtx (GET_MODE (operands[0]));
3710 (define_expand "reduc_plus_scal_<mode>"
3711 [(set (match_operand:<VI_scalar> 0 "register_operand" "=v")
3712 (unspec:VIshort [(match_operand:VIshort 1 "register_operand" "v")]
3713 UNSPEC_REDUC_PLUS))]
3716 rtx vzero = gen_reg_rtx (V4SImode);
3717 rtx vtmp1 = gen_reg_rtx (V4SImode);
3718 rtx vtmp2 = gen_reg_rtx (<MODE>mode);
3719 rtx dest = gen_lowpart (V4SImode, vtmp2);
3720 int elt = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (<MODE>mode) - 1 : 0;
3722 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
3723 emit_insn (gen_altivec_vsum4s<VI_char>s (vtmp1, operands[1], vzero));
3724 emit_insn (gen_altivec_vsumsws_direct (dest, vtmp1, vzero));
3725 rs6000_expand_vector_extract (operands[0], vtmp2, GEN_INT (elt));
3729 (define_insn "*p9_neg<mode>2"
3730 [(set (match_operand:VNEG 0 "altivec_register_operand" "=v")
3731 (neg:VNEG (match_operand:VNEG 1 "altivec_register_operand" "v")))]
3733 "vneg<VI_char> %0,%1"
3734 [(set_attr "type" "vecsimple")])
3736 (define_expand "neg<mode>2"
3737 [(set (match_operand:VI2 0 "register_operand")
3738 (neg:VI2 (match_operand:VI2 1 "register_operand")))]
3741 if (!TARGET_P9_VECTOR || (<MODE>mode != V4SImode && <MODE>mode != V2DImode))
3745 vzero = gen_reg_rtx (GET_MODE (operands[0]));
3746 emit_move_insn (vzero, CONST0_RTX (<MODE>mode));
3747 emit_insn (gen_sub<mode>3 (operands[0], vzero, operands[1]));
3752 (define_expand "udot_prod<mode>"
3753 [(set (match_operand:V4SI 0 "register_operand" "=v")
3754 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
3755 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
3756 (match_operand:VIshort 2 "register_operand" "v")]
3760 emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], operands[2], operands[3]));
3764 (define_expand "sdot_prodv8hi"
3765 [(set (match_operand:V4SI 0 "register_operand" "=v")
3766 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
3767 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3768 (match_operand:V8HI 2 "register_operand" "v")]
3772 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], operands[2], operands[3]));
3776 (define_expand "widen_usum<mode>3"
3777 [(set (match_operand:V4SI 0 "register_operand" "=v")
3778 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
3779 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")]
3783 rtx vones = gen_reg_rtx (GET_MODE (operands[1]));
3785 emit_insn (gen_altivec_vspltis<VI_char> (vones, const1_rtx));
3786 emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], vones, operands[2]));
3790 (define_expand "widen_ssumv16qi3"
3791 [(set (match_operand:V4SI 0 "register_operand" "=v")
3792 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
3793 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")]
3797 rtx vones = gen_reg_rtx (V16QImode);
3799 emit_insn (gen_altivec_vspltisb (vones, const1_rtx));
3800 emit_insn (gen_altivec_vmsummbm (operands[0], operands[1], vones, operands[2]));
3804 (define_expand "widen_ssumv8hi3"
3805 [(set (match_operand:V4SI 0 "register_operand" "=v")
3806 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
3807 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
3811 rtx vones = gen_reg_rtx (V8HImode);
3813 emit_insn (gen_altivec_vspltish (vones, const1_rtx));
3814 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], vones, operands[2]));
3818 (define_expand "vec_unpacks_hi_<VP_small_lc>"
3819 [(set (match_operand:VP 0 "register_operand" "=v")
3820 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
3821 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
3825 (define_expand "vec_unpacks_lo_<VP_small_lc>"
3826 [(set (match_operand:VP 0 "register_operand" "=v")
3827 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
3828 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
3832 (define_insn "vperm_v8hiv4si"
3833 [(set (match_operand:V4SI 0 "register_operand" "=?wa,v")
3834 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "wa,v")
3835 (match_operand:V4SI 2 "register_operand" "0,v")
3836 (match_operand:V16QI 3 "register_operand" "wa,v")]
3842 [(set_attr "type" "vecperm")
3843 (set_attr "isa" "p9v,*")])
3845 (define_insn "vperm_v16qiv8hi"
3846 [(set (match_operand:V8HI 0 "register_operand" "=?wa,v")
3847 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "wa,v")
3848 (match_operand:V8HI 2 "register_operand" "0,v")
3849 (match_operand:V16QI 3 "register_operand" "wa,v")]
3855 [(set_attr "type" "vecperm")
3856 (set_attr "isa" "p9v,*")])
3858 (define_expand "vec_unpacku_hi_<VP_small_lc>"
3859 [(set (match_operand:VP 0 "register_operand" "=v")
3860 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
3864 rtx vzero = gen_reg_rtx (<VP_small>mode);
3865 emit_insn (gen_altivec_vspltis<VU_char> (vzero, const0_rtx));
3867 rtx res = gen_reg_rtx (<VP_small>mode);
3868 rtx op1 = operands[1];
3870 if (BYTES_BIG_ENDIAN)
3871 emit_insn (gen_altivec_vmrgh<VU_char> (res, vzero, op1));
3873 emit_insn (gen_altivec_vmrgl<VU_char> (res, op1, vzero));
3875 emit_insn (gen_move_insn (operands[0], gen_lowpart (<MODE>mode, res)));
3879 (define_expand "vec_unpacku_lo_<VP_small_lc>"
3880 [(set (match_operand:VP 0 "register_operand" "=v")
3881 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
3885 rtx vzero = gen_reg_rtx (<VP_small>mode);
3886 emit_insn (gen_altivec_vspltis<VU_char> (vzero, const0_rtx));
3888 rtx res = gen_reg_rtx (<VP_small>mode);
3889 rtx op1 = operands[1];
3891 if (BYTES_BIG_ENDIAN)
3892 emit_insn (gen_altivec_vmrgl<VU_char> (res, vzero, op1));
3894 emit_insn (gen_altivec_vmrgh<VU_char> (res, op1, vzero));
3896 emit_insn (gen_move_insn (operands[0], gen_lowpart (<MODE>mode, res)));
3900 (define_expand "vec_widen_umult_hi_v16qi"
3901 [(set (match_operand:V8HI 0 "register_operand" "=v")
3902 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3903 (match_operand:V16QI 2 "register_operand" "v")]
3907 rtx ve = gen_reg_rtx (V8HImode);
3908 rtx vo = gen_reg_rtx (V8HImode);
3910 if (BYTES_BIG_ENDIAN)
3912 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
3913 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
3914 emit_insn (gen_altivec_vmrghh (operands[0], ve, vo));
3918 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3919 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3920 emit_insn (gen_altivec_vmrglh (operands[0], ve, vo));
3925 (define_expand "vec_widen_umult_lo_v16qi"
3926 [(set (match_operand:V8HI 0 "register_operand" "=v")
3927 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3928 (match_operand:V16QI 2 "register_operand" "v")]
3932 rtx ve = gen_reg_rtx (V8HImode);
3933 rtx vo = gen_reg_rtx (V8HImode);
3935 if (BYTES_BIG_ENDIAN)
3937 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
3938 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
3939 emit_insn (gen_altivec_vmrglh (operands[0], ve, vo));
3943 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3944 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3945 emit_insn (gen_altivec_vmrghh (operands[0], ve, vo));
3950 (define_expand "vec_widen_smult_hi_v16qi"
3951 [(set (match_operand:V8HI 0 "register_operand" "=v")
3952 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3953 (match_operand:V16QI 2 "register_operand" "v")]
3957 rtx ve = gen_reg_rtx (V8HImode);
3958 rtx vo = gen_reg_rtx (V8HImode);
3960 if (BYTES_BIG_ENDIAN)
3962 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3963 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3964 emit_insn (gen_altivec_vmrghh (operands[0], ve, vo));
3968 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3969 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3970 emit_insn (gen_altivec_vmrglh (operands[0], ve, vo));
3975 (define_expand "vec_widen_smult_lo_v16qi"
3976 [(set (match_operand:V8HI 0 "register_operand" "=v")
3977 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3978 (match_operand:V16QI 2 "register_operand" "v")]
3982 rtx ve = gen_reg_rtx (V8HImode);
3983 rtx vo = gen_reg_rtx (V8HImode);
3985 if (BYTES_BIG_ENDIAN)
3987 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3988 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3989 emit_insn (gen_altivec_vmrglh (operands[0], ve, vo));
3993 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3994 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3995 emit_insn (gen_altivec_vmrghh (operands[0], ve, vo));
4000 (define_expand "vec_widen_umult_hi_v8hi"
4001 [(set (match_operand:V4SI 0 "register_operand" "=v")
4002 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
4003 (match_operand:V8HI 2 "register_operand" "v")]
4007 rtx ve = gen_reg_rtx (V4SImode);
4008 rtx vo = gen_reg_rtx (V4SImode);
4010 if (BYTES_BIG_ENDIAN)
4012 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
4013 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
4014 emit_insn (gen_altivec_vmrghw (operands[0], ve, vo));
4018 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
4019 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
4020 emit_insn (gen_altivec_vmrglw (operands[0], ve, vo));
4025 (define_expand "vec_widen_umult_lo_v8hi"
4026 [(set (match_operand:V4SI 0 "register_operand" "=v")
4027 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
4028 (match_operand:V8HI 2 "register_operand" "v")]
4032 rtx ve = gen_reg_rtx (V4SImode);
4033 rtx vo = gen_reg_rtx (V4SImode);
4035 if (BYTES_BIG_ENDIAN)
4037 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
4038 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
4039 emit_insn (gen_altivec_vmrglw (operands[0], ve, vo));
4043 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
4044 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
4045 emit_insn (gen_altivec_vmrghw (operands[0], ve, vo));
4050 (define_expand "vec_widen_smult_hi_v8hi"
4051 [(set (match_operand:V4SI 0 "register_operand" "=v")
4052 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
4053 (match_operand:V8HI 2 "register_operand" "v")]
4057 rtx ve = gen_reg_rtx (V4SImode);
4058 rtx vo = gen_reg_rtx (V4SImode);
4060 if (BYTES_BIG_ENDIAN)
4062 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
4063 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
4064 emit_insn (gen_altivec_vmrghw (operands[0], ve, vo));
4068 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
4069 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
4070 emit_insn (gen_altivec_vmrglw (operands[0], ve, vo));
4075 (define_expand "vec_widen_smult_lo_v8hi"
4076 [(set (match_operand:V4SI 0 "register_operand" "=v")
4077 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
4078 (match_operand:V8HI 2 "register_operand" "v")]
4082 rtx ve = gen_reg_rtx (V4SImode);
4083 rtx vo = gen_reg_rtx (V4SImode);
4085 if (BYTES_BIG_ENDIAN)
4087 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
4088 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
4089 emit_insn (gen_altivec_vmrglw (operands[0], ve, vo));
4093 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
4094 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
4095 emit_insn (gen_altivec_vmrghw (operands[0], ve, vo));
4100 (define_expand "vec_pack_trunc_<mode>"
4101 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
4102 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
4103 (match_operand:VP 2 "register_operand" "v")]
4104 UNSPEC_VPACK_UNS_UNS_MOD))]
4108 (define_expand "mulv16qi3"
4109 [(set (match_operand:V16QI 0 "register_operand" "=v")
4110 (mult:V16QI (match_operand:V16QI 1 "register_operand" "v")
4111 (match_operand:V16QI 2 "register_operand" "v")))]
4114 rtx even = gen_reg_rtx (V8HImode);
4115 rtx odd = gen_reg_rtx (V8HImode);
4116 rtx mask = gen_reg_rtx (V16QImode);
4117 rtvec v = rtvec_alloc (16);
4120 for (i = 0; i < 8; ++i) {
4121 RTVEC_ELT (v, 2 * i)
4122 = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 1 : 31 - 2 * i);
4123 RTVEC_ELT (v, 2 * i + 1)
4124 = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 17 : 15 - 2 * i);
4127 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
4128 emit_insn (gen_altivec_vmulesb (even, operands[1], operands[2]));
4129 emit_insn (gen_altivec_vmulosb (odd, operands[1], operands[2]));
4130 emit_insn (gen_altivec_vperm_v8hiv16qi (operands[0], even, odd, mask));
4134 (define_expand "altivec_vpermxor"
4135 [(use (match_operand:V16QI 0 "register_operand"))
4136 (use (match_operand:V16QI 1 "register_operand"))
4137 (use (match_operand:V16QI 2 "register_operand"))
4138 (use (match_operand:V16QI 3 "register_operand"))]
4141 if (!BYTES_BIG_ENDIAN)
4143 /* vpermxor indexes the bytes using Big Endian numbering. If LE,
4144 change indexing in operand[3] to BE index. */
4145 rtx be_index = gen_reg_rtx (V16QImode);
4147 emit_insn (gen_one_cmplv16qi2 (be_index, operands[3]));
4148 emit_insn (gen_crypto_vpermxor_v16qi (operands[0], operands[1],
4149 operands[2], be_index));
4152 emit_insn (gen_crypto_vpermxor_v16qi (operands[0], operands[1],
4153 operands[2], operands[3]));
4157 (define_expand "altivec_negv4sf2"
4158 [(use (match_operand:V4SF 0 "register_operand"))
4159 (use (match_operand:V4SF 1 "register_operand"))]
4164 /* Generate [-0.0, -0.0, -0.0, -0.0]. */
4165 neg0 = gen_reg_rtx (V4SImode);
4166 emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
4167 emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
4170 emit_insn (gen_xorv4sf3 (operands[0],
4171 gen_lowpart (V4SFmode, neg0), operands[1]));
4176 ;; Vector reverse elements for V16QI V8HI V4SI V4SF
4177 (define_expand "altivec_vreve<mode>2"
4178 [(set (match_operand:VEC_K 0 "register_operand" "=v")
4179 (unspec:VEC_K [(match_operand:VEC_K 1 "register_operand" "v")]
4183 if (TARGET_P9_VECTOR)
4185 if (<MODE>mode == V16QImode)
4186 emit_insn (gen_p9_xxbrq_v16qi (operands[0], operands[1]));
4187 else if (<MODE>mode == V8HImode)
4189 rtx subreg1 = simplify_gen_subreg (V1TImode, operands[1],
4191 rtx temp = gen_reg_rtx (V1TImode);
4192 emit_insn (gen_p9_xxbrq_v1ti (temp, subreg1));
4193 rtx subreg2 = simplify_gen_subreg (<MODE>mode, temp,
4195 emit_insn (gen_p9_xxbrh_v8hi (operands[0], subreg2));
4197 else /* V4SI and V4SF. */
4199 rtx subreg1 = simplify_gen_subreg (V1TImode, operands[1],
4201 rtx temp = gen_reg_rtx (V1TImode);
4202 emit_insn (gen_p9_xxbrq_v1ti (temp, subreg1));
4203 rtx subreg2 = simplify_gen_subreg (<MODE>mode, temp,
4205 if (<MODE>mode == V4SImode)
4206 emit_insn (gen_p9_xxbrw_v4si (operands[0], subreg2));
4208 emit_insn (gen_p9_xxbrw_v4sf (operands[0], subreg2));
4213 int i, j, size, num_elements;
4214 rtvec v = rtvec_alloc (16);
4215 rtx mask = gen_reg_rtx (V16QImode);
4217 size = GET_MODE_UNIT_SIZE (<MODE>mode);
4218 num_elements = GET_MODE_NUNITS (<MODE>mode);
4220 for (j = 0; j < num_elements; j++)
4221 for (i = 0; i < size; i++)
4222 RTVEC_ELT (v, i + j * size)
4223 = GEN_INT (i + (num_elements - 1 - j) * size);
4225 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
4226 emit_insn (gen_altivec_vperm_<mode> (operands[0], operands[1],
4227 operands[1], mask));
4231 ;; Vector reverse elements for V2DI V2DF
4232 (define_expand "altivec_vreve<mode>2"
4233 [(set (match_operand:VEC_64 0 "register_operand" "=v")
4234 (unspec:VEC_64 [(match_operand:VEC_64 1 "register_operand" "v")]
4238 emit_insn (gen_xxswapd_<mode> (operands[0], operands[1]));
4242 ;; Vector SIMD PEM v2.06c defines LVLX, LVLXL, LVRX, LVRXL,
4243 ;; STVLX, STVLXL, STVVRX, STVRXL are available only on Cell.
4244 (define_insn "altivec_lvlx"
4245 [(set (match_operand:V16QI 0 "register_operand" "=v")
4246 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
4248 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4250 [(set_attr "type" "vecload")])
4252 (define_insn "altivec_lvlxl"
4253 [(set (match_operand:V16QI 0 "register_operand" "=v")
4254 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
4256 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4258 [(set_attr "type" "vecload")])
4260 (define_insn "altivec_lvrx"
4261 [(set (match_operand:V16QI 0 "register_operand" "=v")
4262 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
4264 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4266 [(set_attr "type" "vecload")])
4268 (define_insn "altivec_lvrxl"
4269 [(set (match_operand:V16QI 0 "register_operand" "=v")
4270 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
4272 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4274 [(set_attr "type" "vecload")])
4276 (define_insn "altivec_stvlx"
4278 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
4279 (match_operand:V16QI 1 "register_operand" "v"))
4280 (unspec [(const_int 0)] UNSPEC_STVLX)])]
4281 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4283 [(set_attr "type" "vecstore")])
4285 (define_insn "altivec_stvlxl"
4287 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
4288 (match_operand:V16QI 1 "register_operand" "v"))
4289 (unspec [(const_int 0)] UNSPEC_STVLXL)])]
4290 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4292 [(set_attr "type" "vecstore")])
4294 (define_insn "altivec_stvrx"
4296 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
4297 (match_operand:V16QI 1 "register_operand" "v"))
4298 (unspec [(const_int 0)] UNSPEC_STVRX)])]
4299 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4301 [(set_attr "type" "vecstore")])
4303 (define_insn "altivec_stvrxl"
4305 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
4306 (match_operand:V16QI 1 "register_operand" "v"))
4307 (unspec [(const_int 0)] UNSPEC_STVRXL)])]
4308 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4310 [(set_attr "type" "vecstore")])
4312 (define_expand "vec_unpacks_float_hi_v8hi"
4313 [(set (match_operand:V4SF 0 "register_operand")
4314 (unspec:V4SF [(match_operand:V8HI 1 "register_operand")]
4315 UNSPEC_VUPKHS_V4SF))]
4318 rtx tmp = gen_reg_rtx (V4SImode);
4320 emit_insn (gen_vec_unpacks_hi_v8hi (tmp, operands[1]));
4321 emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
4325 (define_expand "vec_unpacks_float_lo_v8hi"
4326 [(set (match_operand:V4SF 0 "register_operand")
4327 (unspec:V4SF [(match_operand:V8HI 1 "register_operand")]
4328 UNSPEC_VUPKLS_V4SF))]
4331 rtx tmp = gen_reg_rtx (V4SImode);
4333 emit_insn (gen_vec_unpacks_lo_v8hi (tmp, operands[1]));
4334 emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
4338 (define_expand "vec_unpacku_float_hi_v8hi"
4339 [(set (match_operand:V4SF 0 "register_operand")
4340 (unspec:V4SF [(match_operand:V8HI 1 "register_operand")]
4341 UNSPEC_VUPKHU_V4SF))]
4344 rtx tmp = gen_reg_rtx (V4SImode);
4346 emit_insn (gen_vec_unpacku_hi_v8hi (tmp, operands[1]));
4347 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
4351 (define_expand "vec_unpacku_float_lo_v8hi"
4352 [(set (match_operand:V4SF 0 "register_operand")
4353 (unspec:V4SF [(match_operand:V8HI 1 "register_operand")]
4354 UNSPEC_VUPKLU_V4SF))]
4357 rtx tmp = gen_reg_rtx (V4SImode);
4359 emit_insn (gen_vec_unpacku_lo_v8hi (tmp, operands[1]));
4360 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
4365 ;; Power8/power9 vector instructions encoded as Altivec instructions
4367 ;; Vector count leading zeros
4368 (define_insn "*p8v_clz<mode>2"
4369 [(set (match_operand:VI2 0 "register_operand" "=v")
4370 (clz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
4373 [(set_attr "type" "vecsimple")])
4375 ;; Vector absolute difference unsigned
4376 (define_insn "uabd<mode>3"
4377 [(set (match_operand:VI 0 "register_operand" "=v")
4380 (match_operand:VI 1 "register_operand" "v")
4381 (match_operand:VI 2 "register_operand" "v"))
4386 "vabsdu<wd> %0,%1,%2"
4387 [(set_attr "type" "vecsimple")])
4389 ;; Vector count trailing zeros
4390 (define_insn "*p9v_ctz<mode>2"
4391 [(set (match_operand:VI2 0 "register_operand" "=v")
4392 (ctz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
4395 [(set_attr "type" "vecsimple")])
4397 ;; Vector population count
4398 (define_insn "*p8v_popcount<mode>2"
4399 [(set (match_operand:VI2 0 "register_operand" "=v")
4400 (popcount:VI2 (match_operand:VI2 1 "register_operand" "v")))]
4403 [(set_attr "type" "vecsimple")])
4406 (define_insn "rs6000_vprtyb<mode>2"
4407 [(set (match_operand:VEC_IP 0 "register_operand" "=v")
4409 [(match_operand:VEC_IP 1 "register_operand" "v")]
4413 [(set_attr "type" "vecsimple")])
4415 ;; Vector Gather Bits by Bytes by Doubleword
4416 (define_insn "p8v_vgbbd"
4417 [(set (match_operand:V16QI 0 "register_operand" "=v")
4418 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")]
4422 [(set_attr "type" "vecsimple")])
4425 ;; 128-bit binary integer arithmetic
4426 ;; We have a special container type (V1TImode) to allow operations using the
4427 ;; ISA 2.07 128-bit binary support to target the VMX/altivec registers without
4428 ;; having to worry about the register allocator deciding GPRs are better.
4430 (define_insn "altivec_vadduqm"
4431 [(set (match_operand:V1TI 0 "register_operand" "=v")
4432 (plus:V1TI (match_operand:V1TI 1 "register_operand" "v")
4433 (match_operand:V1TI 2 "register_operand" "v")))]
4436 [(set_attr "type" "vecsimple")])
4438 (define_insn "altivec_vaddcuq"
4439 [(set (match_operand:V1TI 0 "register_operand" "=v")
4440 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4441 (match_operand:V1TI 2 "register_operand" "v")]
4445 [(set_attr "type" "vecsimple")])
4447 (define_insn "altivec_vsubuqm"
4448 [(set (match_operand:V1TI 0 "register_operand" "=v")
4449 (minus:V1TI (match_operand:V1TI 1 "register_operand" "v")
4450 (match_operand:V1TI 2 "register_operand" "v")))]
4453 [(set_attr "type" "vecsimple")])
4455 (define_insn "altivec_vsubcuq"
4456 [(set (match_operand:V1TI 0 "register_operand" "=v")
4457 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4458 (match_operand:V1TI 2 "register_operand" "v")]
4462 [(set_attr "type" "vecsimple")])
4464 (define_insn "altivec_vaddeuqm"
4465 [(set (match_operand:V1TI 0 "register_operand" "=v")
4466 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4467 (match_operand:V1TI 2 "register_operand" "v")
4468 (match_operand:V1TI 3 "register_operand" "v")]
4471 "vaddeuqm %0,%1,%2,%3"
4472 [(set_attr "type" "vecsimple")])
4474 (define_insn "altivec_vaddecuq"
4475 [(set (match_operand:V1TI 0 "register_operand" "=v")
4476 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4477 (match_operand:V1TI 2 "register_operand" "v")
4478 (match_operand:V1TI 3 "register_operand" "v")]
4481 "vaddecuq %0,%1,%2,%3"
4482 [(set_attr "type" "vecsimple")])
4484 (define_insn "altivec_vsubeuqm"
4485 [(set (match_operand:V1TI 0 "register_operand" "=v")
4486 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4487 (match_operand:V1TI 2 "register_operand" "v")
4488 (match_operand:V1TI 3 "register_operand" "v")]
4491 "vsubeuqm %0,%1,%2,%3"
4492 [(set_attr "type" "vecsimple")])
4494 (define_insn "altivec_vsubecuq"
4495 [(set (match_operand:V1TI 0 "register_operand" "=v")
4496 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4497 (match_operand:V1TI 2 "register_operand" "v")
4498 (match_operand:V1TI 3 "register_operand" "v")]
4501 "vsubecuq %0,%1,%2,%3"
4502 [(set_attr "type" "vecsimple")])
4504 ;; We use V2DI as the output type to simplify converting the permute
4505 ;; bits into an integer
4506 (define_insn "altivec_vbpermq"
4507 [(set (match_operand:V2DI 0 "register_operand" "=v")
4508 (unspec:V2DI [(match_operand:V16QI 1 "register_operand" "v")
4509 (match_operand:V16QI 2 "register_operand" "v")]
4513 [(set_attr "type" "vecperm")])
4515 ; One of the vector API interfaces requires returning vector unsigned char.
4516 (define_insn "altivec_vbpermq2"
4517 [(set (match_operand:V16QI 0 "register_operand" "=v")
4518 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
4519 (match_operand:V16QI 2 "register_operand" "v")]
4523 [(set_attr "type" "vecperm")])
4525 (define_insn "altivec_vbpermd"
4526 [(set (match_operand:V2DI 0 "register_operand" "=v")
4527 (unspec:V2DI [(match_operand:V2DI 1 "register_operand" "v")
4528 (match_operand:V16QI 2 "register_operand" "v")]
4532 [(set_attr "type" "vecsimple")])
4534 ;; Support for SAD (sum of absolute differences).
4536 ;; Due to saturating semantics, we can't combine the sum-across
4537 ;; with the vector accumulate in vsum4ubs. A vadduwm is needed.
4538 (define_expand "usadv16qi"
4539 [(use (match_operand:V4SI 0 "register_operand"))
4540 (use (match_operand:V16QI 1 "register_operand"))
4541 (use (match_operand:V16QI 2 "register_operand"))
4542 (use (match_operand:V4SI 3 "register_operand"))]
4545 rtx absd = gen_reg_rtx (V16QImode);
4546 rtx zero = gen_reg_rtx (V4SImode);
4547 rtx psum = gen_reg_rtx (V4SImode);
4549 emit_insn (gen_uabdv16qi3 (absd, operands[1], operands[2]));
4550 emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
4551 emit_insn (gen_altivec_vsum4ubs (psum, absd, zero));
4552 emit_insn (gen_addv4si3 (operands[0], psum, operands[3]));
4556 ;; Since vsum4shs is saturating and further performs signed
4557 ;; arithmetic, we can't combine the sum-across with the vector
4558 ;; accumulate in vsum4shs. A vadduwm is needed.
4559 (define_expand "usadv8hi"
4560 [(use (match_operand:V4SI 0 "register_operand"))
4561 (use (match_operand:V8HI 1 "register_operand"))
4562 (use (match_operand:V8HI 2 "register_operand"))
4563 (use (match_operand:V4SI 3 "register_operand"))]
4566 rtx absd = gen_reg_rtx (V8HImode);
4567 rtx zero = gen_reg_rtx (V4SImode);
4568 rtx psum = gen_reg_rtx (V4SImode);
4570 emit_insn (gen_uabdv8hi3 (absd, operands[1], operands[2]));
4571 emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
4572 emit_insn (gen_altivec_vsum4shs (psum, absd, zero));
4573 emit_insn (gen_addv4si3 (operands[0], psum, operands[3]));
4577 ;; Decimal Integer operations
4578 (define_int_iterator UNSPEC_BCD_ADD_SUB [UNSPEC_BCDADD UNSPEC_BCDSUB])
4580 (define_int_attr bcd_add_sub [(UNSPEC_BCDADD "add")
4581 (UNSPEC_BCDSUB "sub")])
4583 (define_code_iterator BCD_TEST [eq lt le gt ge unordered])
4584 (define_mode_iterator VBCD [V1TI V16QI])
4586 (define_insn "bcd<bcd_add_sub>_<mode>"
4587 [(set (match_operand:VBCD 0 "register_operand" "=v")
4588 (unspec:VBCD [(match_operand:VBCD 1 "register_operand" "v")
4589 (match_operand:VBCD 2 "register_operand" "v")
4590 (match_operand:QI 3 "const_0_to_1_operand" "n")]
4591 UNSPEC_BCD_ADD_SUB))
4592 (clobber (reg:CCFP CR6_REGNO))]
4594 "bcd<bcd_add_sub>. %0,%1,%2,%3"
4595 [(set_attr "type" "vecsimple")])
4597 ;; Use a floating point type (V2DFmode) for the compare to set CR6 so that we
4598 ;; can use the unordered test for BCD nans and add/subtracts that overflow. An
4599 ;; UNORDERED test on an integer type (like V1TImode) is not defined. The type
4600 ;; probably should be one that can go in the VMX (Altivec) registers, so we
4601 ;; can't use DDmode or DFmode.
4602 (define_insn "*bcd<bcd_add_sub>_test_<mode>"
4603 [(set (reg:CCFP CR6_REGNO)
4605 (unspec:V2DF [(match_operand:VBCD 1 "register_operand" "v")
4606 (match_operand:VBCD 2 "register_operand" "v")
4607 (match_operand:QI 3 "const_0_to_1_operand" "i")]
4609 (match_operand:V2DF 4 "zero_constant" "j")))
4610 (clobber (match_scratch:VBCD 0 "=v"))]
4612 "bcd<bcd_add_sub>. %0,%1,%2,%3"
4613 [(set_attr "type" "vecsimple")])
4615 (define_insn "*bcd<bcd_add_sub>_test2_<mode>"
4616 [(set (match_operand:VBCD 0 "register_operand" "=v")
4617 (unspec:VBCD [(match_operand:VBCD 1 "register_operand" "v")
4618 (match_operand:VBCD 2 "register_operand" "v")
4619 (match_operand:QI 3 "const_0_to_1_operand" "i")]
4620 UNSPEC_BCD_ADD_SUB))
4621 (set (reg:CCFP CR6_REGNO)
4623 (unspec:V2DF [(match_dup 1)
4627 (match_operand:V2DF 4 "zero_constant" "j")))]
4629 "bcd<bcd_add_sub>. %0,%1,%2,%3"
4630 [(set_attr "type" "vecsimple")])
4632 (define_insn "vcfuged"
4633 [(set (match_operand:V2DI 0 "altivec_register_operand" "=v")
4634 (unspec:V2DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4635 (match_operand:V2DI 2 "altivec_register_operand" "v")]
4639 [(set_attr "type" "vecsimple")])
4641 (define_insn "vclzdm"
4642 [(set (match_operand:V2DI 0 "altivec_register_operand" "=v")
4643 (unspec:V2DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4644 (match_operand:V2DI 2 "altivec_register_operand" "v")]
4648 [(set_attr "type" "vecsimple")])
4650 (define_insn "vctzdm"
4651 [(set (match_operand:V2DI 0 "altivec_register_operand" "=v")
4652 (unspec:V2DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4653 (match_operand:V2DI 2 "altivec_register_operand" "v")]
4657 [(set_attr "type" "vecsimple")])
4659 (define_insn "vpdepd"
4660 [(set (match_operand:V2DI 0 "altivec_register_operand" "=v")
4661 (unspec:V2DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4662 (match_operand:V2DI 2 "altivec_register_operand" "v")]
4666 [(set_attr "type" "vecsimple")])
4668 (define_insn "vpextd"
4669 [(set (match_operand:V2DI 0 "altivec_register_operand" "=v")
4670 (unspec:V2DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4671 (match_operand:V2DI 2 "altivec_register_operand" "v")]
4675 [(set_attr "type" "vecsimple")])
4678 [(set (match_operand:DI 0 "register_operand" "=r")
4679 (unspec:DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4680 (match_operand:QI 2 "u3bit_cint_operand" "n")]
4684 [(set_attr "type" "vecsimple")])
4686 (define_insn "vclrlb"
4687 [(set (match_operand:V16QI 0 "altivec_register_operand" "=v")
4688 (unspec:V16QI [(match_operand:V16QI 1 "altivec_register_operand" "v")
4689 (match_operand:SI 2 "gpc_reg_operand" "r")]
4693 if (BYTES_BIG_ENDIAN)
4694 return "vclrlb %0,%1,%2";
4696 return "vclrrb %0,%1,%2";
4698 [(set_attr "type" "vecsimple")])
4700 (define_insn "vclrrb"
4701 [(set (match_operand:V16QI 0 "altivec_register_operand" "=v")
4702 (unspec:V16QI [(match_operand:V16QI 1 "altivec_register_operand" "v")
4703 (match_operand:SI 2 "gpc_reg_operand" "r")]
4707 if (BYTES_BIG_ENDIAN)
4708 return "vclrrb %0,%1,%2";
4710 return "vclrlb %0,%1,%2";
4712 [(set_attr "type" "vecsimple")])
4714 (define_expand "bcd<bcd_add_sub>_<code>_<mode>"
4715 [(parallel [(set (reg:CCFP CR6_REGNO)
4717 (unspec:V2DF [(match_operand:VBCD 1 "register_operand")
4718 (match_operand:VBCD 2 "register_operand")
4719 (match_operand:QI 3 "const_0_to_1_operand")]
4722 (clobber (match_scratch:VBCD 5))])
4723 (set (match_operand:SI 0 "register_operand")
4724 (BCD_TEST:SI (reg:CCFP CR6_REGNO)
4728 operands[4] = CONST0_RTX (V2DFmode);
4731 (define_insn "*bcdinvalid_<mode>"
4732 [(set (reg:CCFP CR6_REGNO)
4734 (unspec:V2DF [(match_operand:VBCD 1 "register_operand" "v")]
4736 (match_operand:V2DF 2 "zero_constant" "j")))
4737 (clobber (match_scratch:VBCD 0 "=v"))]
4739 "bcdsub. %0,%1,%1,0"
4740 [(set_attr "type" "vecsimple")])
4742 (define_expand "bcdinvalid_<mode>"
4743 [(parallel [(set (reg:CCFP CR6_REGNO)
4745 (unspec:V2DF [(match_operand:VBCD 1 "register_operand")]
4748 (clobber (match_scratch:VBCD 3))])
4749 (set (match_operand:SI 0 "register_operand")
4750 (unordered:SI (reg:CCFP CR6_REGNO)
4754 operands[2] = CONST0_RTX (V2DFmode);
4757 (define_insn "bcdshift_v16qi"
4758 [(set (match_operand:V16QI 0 "register_operand" "=v")
4759 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
4760 (match_operand:V16QI 2 "register_operand" "v")
4761 (match_operand:QI 3 "const_0_to_1_operand" "n")]
4763 (clobber (reg:CCFP CR6_REGNO))]
4766 [(set_attr "type" "vecsimple")])
4768 (define_expand "bcdmul10_v16qi"
4769 [(set (match_operand:V16QI 0 "register_operand")
4770 (unspec:V16QI [(match_operand:V16QI 1 "register_operand")]
4772 (clobber (reg:CCFP CR6_REGNO))]
4775 rtx one = gen_reg_rtx (V16QImode);
4777 emit_insn (gen_altivec_vspltisb (one, const1_rtx));
4778 emit_insn (gen_bcdshift_v16qi (operands[0], one, operands[1], const0_rtx));
4783 (define_expand "bcddiv10_v16qi"
4784 [(set (match_operand:V16QI 0 "register_operand")
4785 (unspec:V16QI [(match_operand:V16QI 1 "register_operand")]
4787 (clobber (reg:CCFP CR6_REGNO))]
4790 rtx one = gen_reg_rtx (V16QImode);
4792 emit_insn (gen_altivec_vspltisb (one, constm1_rtx));
4793 emit_insn (gen_bcdshift_v16qi (operands[0], one, operands[1], const0_rtx));
4799 ;; Peephole2 pattern to combine a bcdadd/bcdsub that calculates the value and
4800 ;; the bcdadd/bcdsub that tests the value. The combiner won't work since
4801 ;; CR6 is a hard coded register. Unfortunately, all of the Altivec predicate
4802 ;; support is hard coded to use the fixed register CR6 instead of creating
4803 ;; a register class for CR6.
4806 [(parallel [(set (match_operand:V1TI 0 "register_operand")
4807 (unspec:V1TI [(match_operand:V1TI 1 "register_operand")
4808 (match_operand:V1TI 2 "register_operand")
4809 (match_operand:QI 3 "const_0_to_1_operand")]
4810 UNSPEC_BCD_ADD_SUB))
4811 (clobber (reg:CCFP CR6_REGNO))])
4812 (parallel [(set (reg:CCFP CR6_REGNO)
4814 (unspec:V2DF [(match_dup 1)
4818 (match_operand:V2DF 4 "zero_constant")))
4819 (clobber (match_operand:V1TI 5 "register_operand"))])]
4821 [(parallel [(set (match_dup 0)
4822 (unspec:V1TI [(match_dup 1)
4825 UNSPEC_BCD_ADD_SUB))
4826 (set (reg:CCFP CR6_REGNO)
4828 (unspec:V2DF [(match_dup 1)