2 ;; Copyright (C) 2002-2024 Free Software Foundation, Inc.
3 ;; Contributed by Aldy Hernandez (aldy@quesejoda.com)
5 ;; This file is part of GCC.
7 ;; GCC is free software; you can redistribute it and/or modify it
8 ;; under the terms of the GNU General Public License as published
9 ;; by the Free Software Foundation; either version 3, or (at your
10 ;; option) any later version.
12 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
13 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 ;; License for more details.
17 ;; You should have received a copy of the GNU General Public License
18 ;; along with GCC; see the file COPYING3. If not see
19 ;; <http://www.gnu.org/licenses/>.
21 (define_c_enum "unspec"
51 UNSPEC_VPACK_SIGN_SIGN_SAT
52 UNSPEC_VPACK_SIGN_UNS_SAT
53 UNSPEC_VPACK_UNS_UNS_SAT
54 UNSPEC_VPACK_UNS_UNS_MOD
55 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT
77 UNSPEC_VUNPACK_HI_SIGN
78 UNSPEC_VUNPACK_LO_SIGN
79 UNSPEC_VUNPACK_HI_SIGN_DIRECT
80 UNSPEC_VUNPACK_LO_SIGN_DIRECT
83 UNSPEC_CONVERT_4F32_8I16
84 UNSPEC_CONVERT_4F32_8F16
146 UNSPEC_VSUMSWS_DIRECT
175 (define_c_enum "unspecv"
183 ;; Short vec int modes
184 (define_mode_iterator VIshort [V8HI V16QI])
186 (define_mode_iterator VF [V4SF])
187 ;; Vec modes, pity mode iterators are not composable
188 (define_mode_iterator V [V4SI V8HI V16QI V4SF])
189 ;; Vec modes for move/logical/permute ops, include vector types for move not
190 ;; otherwise handled by altivec (v2df, v2di, ti)
191 (define_mode_iterator VM [V4SI
199 (KF "FLOAT128_VECTOR_P (KFmode)")
200 (TF "FLOAT128_VECTOR_P (TFmode)")])
202 ;; Like VM, except don't do TImode
203 (define_mode_iterator VM2 [V4SI
210 (KF "FLOAT128_VECTOR_P (KFmode)")
211 (TF "FLOAT128_VECTOR_P (TFmode)")])
213 ;; Map the Vector convert single precision to double precision for integer
214 ;; versus floating point
215 (define_mode_attr VS_sxwsp [(V4SI "sxw") (V4SF "sp")])
217 ;; Specific iterator for parity which does not have a byte/half-word form, but
218 ;; does have a quad word form
219 (define_mode_iterator VParity [V4SI
224 (define_mode_attr VI_char [(V2DI "d") (V4SI "w") (V8HI "h") (V16QI "b")])
225 (define_mode_attr VI_scalar [(V2DI "DI") (V4SI "SI") (V8HI "HI") (V16QI "QI")])
226 (define_mode_attr VI_unit [(V16QI "VECTOR_UNIT_ALTIVEC_P (V16QImode)")
227 (V8HI "VECTOR_UNIT_ALTIVEC_P (V8HImode)")
228 (V4SI "VECTOR_UNIT_ALTIVEC_P (V4SImode)")
229 (V2DI "VECTOR_UNIT_P8_VECTOR_P (V2DImode)")
230 (V1TI "VECTOR_UNIT_ALTIVEC_P (V1TImode)")])
232 ;; Vector pack/unpack
233 (define_mode_iterator VP [V2DI V4SI V8HI])
234 (define_mode_attr VP_small [(V2DI "V4SI") (V4SI "V8HI") (V8HI "V16QI")])
235 (define_mode_attr VP_small_lc [(V2DI "v4si") (V4SI "v8hi") (V8HI "v16qi")])
236 (define_mode_attr VU_char [(V2DI "w") (V4SI "h") (V8HI "b")])
239 (define_mode_iterator VNEG [V4SI V2DI])
241 ;; Vector move instructions.
242 (define_insn "*altivec_mov<mode>"
243 [(set (match_operand:VM2 0 "nonimmediate_operand" "=Z,v,v,?Y,?*r,?*r,v,v,?*r")
244 (match_operand:VM2 1 "input_operand" "v,Z,v,*r,Y,*r,j,W,W"))]
245 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)
246 && (register_operand (operands[0], <MODE>mode)
247 || register_operand (operands[1], <MODE>mode))"
256 * return output_vec_const_move (operands);
258 [(set_attr "type" "vecstore,vecload,veclogical,store,load,*,veclogical,*,*")
259 (set_attr "length" "*,*,*,20,20,20,*,8,32")])
261 ;; Unlike other altivec moves, allow the GPRs, since a normal use of TImode
262 ;; is for unions. However for plain data movement, slightly favor the vector
264 (define_insn "*altivec_movti"
265 [(set (match_operand:TI 0 "nonimmediate_operand" "=Z,v,v,?Y,?r,?r,v,v")
266 (match_operand:TI 1 "input_operand" "v,Z,v,r,Y,r,j,W"))]
267 "VECTOR_MEM_ALTIVEC_P (TImode)
268 && (register_operand (operands[0], TImode)
269 || register_operand (operands[1], TImode))"
278 * return output_vec_const_move (operands);"
279 [(set_attr "type" "vecstore,vecload,veclogical,store,load,*,veclogical,*")])
281 ;; Load up a vector with the most significant bit set by loading up -1 and
282 ;; doing a shift left
284 [(set (match_operand:VM 0 "altivec_register_operand")
285 (match_operand:VM 1 "easy_vector_constant_msb"))]
286 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
289 rtx dest = operands[0];
294 switch (easy_altivec_constant (operands[1], <MODE>mode))
308 if (mode != <MODE>mode)
309 dest = gen_lowpart (mode, dest);
311 num_elements = GET_MODE_NUNITS (mode);
312 v = rtvec_alloc (num_elements);
313 for (i = 0; i < num_elements; i++)
314 RTVEC_ELT (v, i) = constm1_rtx;
316 rs6000_expand_vector_init (dest, gen_rtx_PARALLEL (mode, v));
317 emit_insn (gen_rtx_SET (dest, gen_rtx_ASHIFT (mode, dest, dest)));
322 [(set (match_operand:VM 0 "altivec_register_operand")
323 (match_operand:VM 1 "easy_vector_constant_add_self"))]
324 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
325 [(set (match_dup 0) (match_dup 3))
326 (set (match_dup 0) (match_dup 4))]
328 rtx dup = gen_easy_altivec_constant (operands[1]);
330 machine_mode op_mode = <MODE>mode;
332 /* Divide the operand of the resulting VEC_DUPLICATE, and use
333 simplify_rtx to make a CONST_VECTOR. */
334 XEXP (dup, 0) = simplify_const_binary_operation (ASHIFTRT, QImode,
335 XEXP (dup, 0), const1_rtx);
336 const_vec = simplify_rtx (dup);
338 if (op_mode == V4SFmode)
341 operands[0] = gen_lowpart (op_mode, operands[0]);
343 if (GET_MODE (const_vec) == op_mode)
344 operands[3] = const_vec;
346 operands[3] = gen_lowpart (op_mode, const_vec);
347 operands[4] = gen_rtx_PLUS (op_mode, operands[0], operands[0]);
351 [(set (match_operand:VM 0 "altivec_register_operand")
352 (match_operand:VM 1 "easy_vector_constant_vsldoi"))]
353 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && can_create_pseudo_p ()"
354 [(set (match_dup 2) (match_dup 3))
355 (set (match_dup 4) (match_dup 5))
357 (unspec:VM [(match_dup 2)
362 rtx op1 = operands[1];
363 int elt = (BYTES_BIG_ENDIAN) ? 0 : GET_MODE_NUNITS (<MODE>mode) - 1;
364 HOST_WIDE_INT val = const_vector_elt_as_int (op1, elt);
365 rtx rtx_val = GEN_INT (val);
366 int shift = vspltis_shifted (op1);
368 gcc_assert (shift != 0);
369 operands[2] = gen_reg_rtx (<MODE>mode);
370 operands[3] = gen_const_vec_duplicate (<MODE>mode, rtx_val);
371 operands[4] = gen_reg_rtx (<MODE>mode);
375 operands[5] = CONSTM1_RTX (<MODE>mode);
376 operands[6] = GEN_INT (-shift);
380 operands[5] = CONST0_RTX (<MODE>mode);
381 operands[6] = GEN_INT (shift);
385 (define_insn_and_split "sldoi_to_mov<mode>"
386 [(set (match_operand:VM 0 "altivec_register_operand")
387 (unspec:VM [(match_operand:VM 1 "const_vector_each_byte_same")
389 (match_operand:QI 2 "u5bit_cint_operand")]
391 "VECTOR_MEM_ALTIVEC_OR_VSX_P (<MODE>mode) && can_create_pseudo_p ()"
394 [(set (match_dup 0) (match_dup 1))]
396 if (!easy_vector_constant (operands[1], <MODE>mode))
398 rtx dest = gen_reg_rtx (<MODE>mode);
399 emit_move_insn (dest, operands[1]);
404 (define_insn "get_vrsave_internal"
405 [(set (match_operand:SI 0 "register_operand" "=r")
406 (unspec:SI [(reg:SI VRSAVE_REGNO)] UNSPEC_GET_VRSAVE))]
410 return "mfspr %0,256";
412 return "mfvrsave %0";
414 [(set_attr "type" "*")])
416 (define_insn "*set_vrsave_internal"
417 [(match_parallel 0 "vrsave_operation"
418 [(set (reg:SI VRSAVE_REGNO)
419 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "r")
420 (reg:SI VRSAVE_REGNO)] UNSPECV_SET_VRSAVE))])]
424 return "mtspr 256,%1";
426 return "mtvrsave %1";
428 [(set_attr "type" "*")])
430 (define_insn "*save_world"
431 [(match_parallel 0 "save_world_operation"
432 [(clobber (reg:SI LR_REGNO))
433 (use (match_operand:SI 1 "call_operand" "s"))])]
434 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
436 [(set_attr "type" "branch")])
438 (define_insn "*restore_world"
439 [(match_parallel 0 "restore_world_operation"
441 (use (match_operand:SI 1 "call_operand" "s"))
442 (clobber (match_operand:SI 2 "gpc_reg_operand" "=r"))])]
443 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
446 ;; The save_vregs and restore_vregs patterns don't use memory_operand
447 ;; because (plus (reg) (const_int)) is not a valid vector address.
448 ;; This way is more compact than describing exactly what happens in
449 ;; the out-of-line functions, ie. loading the constant into r11/r12
450 ;; then using indexed addressing, and requires less editing of rtl
451 ;; to describe the operation to dwarf2out_frame_debug_expr.
452 (define_insn "*save_vregs_<mode>_r11"
453 [(match_parallel 0 "any_parallel_operand"
454 [(clobber (reg:P LR_REGNO))
455 (use (match_operand:P 1 "symbol_ref_operand" "s"))
458 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
459 (match_operand:P 3 "short_cint_operand" "I")))
460 (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
463 [(set_attr "type" "branch")])
465 (define_insn "*save_vregs_<mode>_r12"
466 [(match_parallel 0 "any_parallel_operand"
467 [(clobber (reg:P LR_REGNO))
468 (use (match_operand:P 1 "symbol_ref_operand" "s"))
471 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
472 (match_operand:P 3 "short_cint_operand" "I")))
473 (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
476 [(set_attr "type" "branch")])
478 (define_insn "*restore_vregs_<mode>_r11"
479 [(match_parallel 0 "any_parallel_operand"
480 [(clobber (reg:P LR_REGNO))
481 (use (match_operand:P 1 "symbol_ref_operand" "s"))
484 (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
485 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
486 (match_operand:P 4 "short_cint_operand" "I"))))])]
489 [(set_attr "type" "branch")])
491 (define_insn "*restore_vregs_<mode>_r12"
492 [(match_parallel 0 "any_parallel_operand"
493 [(clobber (reg:P LR_REGNO))
494 (use (match_operand:P 1 "symbol_ref_operand" "s"))
497 (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
498 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
499 (match_operand:P 4 "short_cint_operand" "I"))))])]
502 [(set_attr "type" "branch")])
504 ;; Simple binary operations.
507 (define_insn "add<mode>3"
508 [(set (match_operand:VI2 0 "register_operand" "=v")
509 (plus:VI2 (match_operand:VI2 1 "register_operand" "v")
510 (match_operand:VI2 2 "register_operand" "v")))]
512 "vaddu<VI_char>m %0,%1,%2"
513 [(set_attr "type" "vecsimple")])
515 (define_insn "*altivec_addv4sf3"
516 [(set (match_operand:V4SF 0 "register_operand" "=v")
517 (plus:V4SF (match_operand:V4SF 1 "register_operand" "v")
518 (match_operand:V4SF 2 "register_operand" "v")))]
519 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
521 [(set_attr "type" "vecfloat")])
523 (define_insn "altivec_vaddcuw"
524 [(set (match_operand:V4SI 0 "register_operand" "=v")
525 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
526 (match_operand:V4SI 2 "register_operand" "v")]
528 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
530 [(set_attr "type" "vecsimple")])
532 (define_insn "altivec_vaddu<VI_char>s"
533 [(set (match_operand:VI 0 "register_operand" "=v")
534 (us_plus:VI (match_operand:VI 1 "register_operand" "v")
535 (match_operand:VI 2 "register_operand" "v")))
536 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
538 "vaddu<VI_char>s %0,%1,%2"
539 [(set_attr "type" "vecsimple")])
541 (define_insn "altivec_vadds<VI_char>s"
542 [(set (match_operand:VI 0 "register_operand" "=v")
543 (ss_plus:VI (match_operand:VI 1 "register_operand" "v")
544 (match_operand:VI 2 "register_operand" "v")))
545 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
546 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
547 "vadds<VI_char>s %0,%1,%2"
548 [(set_attr "type" "vecsimple")])
551 (define_insn "sub<mode>3"
552 [(set (match_operand:VI2 0 "register_operand" "=v")
553 (minus:VI2 (match_operand:VI2 1 "register_operand" "v")
554 (match_operand:VI2 2 "register_operand" "v")))]
556 "vsubu<VI_char>m %0,%1,%2"
557 [(set_attr "type" "vecsimple")])
559 (define_insn "*altivec_subv4sf3"
560 [(set (match_operand:V4SF 0 "register_operand" "=v")
561 (minus:V4SF (match_operand:V4SF 1 "register_operand" "v")
562 (match_operand:V4SF 2 "register_operand" "v")))]
563 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
565 [(set_attr "type" "vecfloat")])
567 (define_insn "altivec_vsubcuw"
568 [(set (match_operand:V4SI 0 "register_operand" "=v")
569 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
570 (match_operand:V4SI 2 "register_operand" "v")]
572 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
574 [(set_attr "type" "vecsimple")])
576 (define_insn "altivec_vsubu<VI_char>s"
577 [(set (match_operand:VI 0 "register_operand" "=v")
578 (us_minus:VI (match_operand:VI 1 "register_operand" "v")
579 (match_operand:VI 2 "register_operand" "v")))
580 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
581 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
582 "vsubu<VI_char>s %0,%1,%2"
583 [(set_attr "type" "vecsimple")])
585 (define_insn "altivec_vsubs<VI_char>s"
586 [(set (match_operand:VI 0 "register_operand" "=v")
587 (ss_minus:VI (match_operand:VI 1 "register_operand" "v")
588 (match_operand:VI 2 "register_operand" "v")))
589 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
590 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
591 "vsubs<VI_char>s %0,%1,%2"
592 [(set_attr "type" "vecsimple")])
595 (define_insn "uavg<mode>3_ceil"
596 [(set (match_operand:VI 0 "register_operand" "=v")
597 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
598 (match_operand:VI 2 "register_operand" "v")]
601 "vavgu<VI_char> %0,%1,%2"
602 [(set_attr "type" "vecsimple")])
604 (define_insn "avg<mode>3_ceil"
605 [(set (match_operand:VI 0 "register_operand" "=v")
606 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
607 (match_operand:VI 2 "register_operand" "v")]
609 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
610 "vavgs<VI_char> %0,%1,%2"
611 [(set_attr "type" "vecsimple")])
613 (define_insn "altivec_vcmpbfp"
614 [(set (match_operand:V4SI 0 "register_operand" "=v")
615 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
616 (match_operand:V4SF 2 "register_operand" "v")]
618 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
620 [(set_attr "type" "veccmp")])
622 (define_insn "altivec_eqv1ti"
623 [(set (match_operand:V1TI 0 "altivec_register_operand" "=v")
624 (eq:V1TI (match_operand:V1TI 1 "altivec_register_operand" "v")
625 (match_operand:V1TI 2 "altivec_register_operand" "v")))]
628 [(set_attr "type" "veccmpfx")])
630 (define_insn "altivec_eq<mode>"
631 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
632 (eq:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
633 (match_operand:VI2 2 "altivec_register_operand" "v")))]
635 "vcmpequ<VI_char> %0,%1,%2"
636 [(set_attr "type" "veccmpfx")])
638 (define_insn "*altivec_gt<mode>"
639 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
640 (gt:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
641 (match_operand:VI2 2 "altivec_register_operand" "v")))]
643 "vcmpgts<VI_char> %0,%1,%2"
644 [(set_attr "type" "veccmpfx")])
646 (define_insn "*altivec_gtv1ti"
647 [(set (match_operand:V1TI 0 "altivec_register_operand" "=v")
648 (gt:V1TI (match_operand:V1TI 1 "altivec_register_operand" "v")
649 (match_operand:V1TI 2 "altivec_register_operand" "v")))]
652 [(set_attr "type" "veccmpfx")])
654 (define_insn "*altivec_gtu<mode>"
655 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
656 (gtu:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
657 (match_operand:VI2 2 "altivec_register_operand" "v")))]
659 "vcmpgtu<VI_char> %0,%1,%2"
660 [(set_attr "type" "veccmpfx")])
662 (define_insn "*altivec_gtuv1ti"
663 [(set (match_operand:V1TI 0 "altivec_register_operand" "=v")
664 (gtu:V1TI (match_operand:V1TI 1 "altivec_register_operand" "v")
665 (match_operand:V1TI 2 "altivec_register_operand" "v")))]
668 [(set_attr "type" "veccmpfx")])
670 (define_insn "*altivec_eqv4sf"
671 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
672 (eq:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
673 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
674 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
676 [(set_attr "type" "veccmp")])
678 (define_insn "*altivec_gtv4sf"
679 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
680 (gt:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
681 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
682 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
684 [(set_attr "type" "veccmp")])
686 (define_insn "*altivec_gev4sf"
687 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
688 (ge:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
689 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
690 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
692 [(set_attr "type" "veccmp")])
694 (define_insn "altivec_vsel<mode>"
695 [(set (match_operand:VM 0 "register_operand" "=wa,v")
698 (not:VM (match_operand:VM 3 "register_operand" "wa,v"))
699 (match_operand:VM 1 "register_operand" "wa,v"))
702 (match_operand:VM 2 "register_operand" "wa,v"))))]
703 "VECTOR_MEM_ALTIVEC_OR_VSX_P (<MODE>mode)"
705 xxsel %x0,%x1,%x2,%x3
707 [(set_attr "type" "vecmove")
708 (set_attr "isa" "<VSisa>")])
710 (define_insn "altivec_vsel<mode>2"
711 [(set (match_operand:VM 0 "register_operand" "=wa,v")
714 (not:VM (match_operand:VM 3 "register_operand" "wa,v"))
715 (match_operand:VM 1 "register_operand" "wa,v"))
717 (match_operand:VM 2 "register_operand" "wa,v")
719 "VECTOR_MEM_ALTIVEC_OR_VSX_P (<MODE>mode)"
721 xxsel %x0,%x1,%x2,%x3
723 [(set_attr "type" "vecmove")
724 (set_attr "isa" "<VSisa>")])
726 (define_insn "altivec_vsel<mode>3"
727 [(set (match_operand:VM 0 "register_operand" "=wa,v")
730 (match_operand:VM 3 "register_operand" "wa,v")
731 (match_operand:VM 1 "register_operand" "wa,v"))
733 (not:VM (match_dup 3))
734 (match_operand:VM 2 "register_operand" "wa,v"))))]
735 "VECTOR_MEM_ALTIVEC_OR_VSX_P (<MODE>mode)"
737 xxsel %x0,%x2,%x1,%x3
739 [(set_attr "type" "vecmove")
740 (set_attr "isa" "<VSisa>")])
742 (define_insn "altivec_vsel<mode>4"
743 [(set (match_operand:VM 0 "register_operand" "=wa,v")
746 (match_operand:VM 1 "register_operand" "wa,v")
747 (match_operand:VM 3 "register_operand" "wa,v"))
749 (not:VM (match_dup 3))
750 (match_operand:VM 2 "register_operand" "wa,v"))))]
751 "VECTOR_MEM_ALTIVEC_OR_VSX_P (<MODE>mode)"
753 xxsel %x0,%x2,%x1,%x3
755 [(set_attr "type" "vecmove")
756 (set_attr "isa" "<VSisa>")])
758 ;; Fused multiply add.
760 (define_insn "*altivec_fmav4sf4"
761 [(set (match_operand:V4SF 0 "register_operand" "=v")
762 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
763 (match_operand:V4SF 2 "register_operand" "v")
764 (match_operand:V4SF 3 "register_operand" "v")))]
765 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
766 "vmaddfp %0,%1,%2,%3"
767 [(set_attr "type" "vecfloat")])
769 ;; We do multiply as a fused multiply-add with an add of a -0.0 vector.
771 (define_expand "altivec_mulv4sf3"
772 [(set (match_operand:V4SF 0 "register_operand")
773 (fma:V4SF (match_operand:V4SF 1 "register_operand")
774 (match_operand:V4SF 2 "register_operand")
776 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
780 /* Generate [-0.0, -0.0, -0.0, -0.0]. */
781 neg0 = gen_reg_rtx (V4SImode);
782 emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
783 emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
785 operands[3] = gen_lowpart (V4SFmode, neg0);
788 ;; 32-bit integer multiplication
789 ;; A_high = Operand_0 & 0xFFFF0000 >> 16
790 ;; A_low = Operand_0 & 0xFFFF
791 ;; B_high = Operand_1 & 0xFFFF0000 >> 16
792 ;; B_low = Operand_1 & 0xFFFF
793 ;; result = A_low * B_low + (A_high * B_low + B_high * A_low) << 16
795 ;; (define_insn "mulv4si3"
796 ;; [(set (match_operand:V4SI 0 "register_operand" "=v")
797 ;; (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
798 ;; (match_operand:V4SI 2 "register_operand" "v")))]
799 (define_insn "mulv4si3_p8"
800 [(set (match_operand:V4SI 0 "register_operand" "=v")
801 (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
802 (match_operand:V4SI 2 "register_operand" "v")))]
805 [(set_attr "type" "veccomplex")])
807 (define_expand "mulv4si3"
808 [(use (match_operand:V4SI 0 "register_operand"))
809 (use (match_operand:V4SI 1 "register_operand"))
810 (use (match_operand:V4SI 2 "register_operand"))]
822 if (TARGET_P8_VECTOR)
824 emit_insn (gen_mulv4si3_p8 (operands[0], operands[1], operands[2]));
828 zero = gen_reg_rtx (V4SImode);
829 emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
831 sixteen = gen_reg_rtx (V4SImode);
832 emit_insn (gen_altivec_vspltisw (sixteen, gen_rtx_CONST_INT (V4SImode, -16)));
834 swap = gen_reg_rtx (V4SImode);
835 emit_insn (gen_vrotlv4si3 (swap, operands[2], sixteen));
837 one = gen_reg_rtx (V8HImode);
838 convert_move (one, operands[1], 0);
840 two = gen_reg_rtx (V8HImode);
841 convert_move (two, operands[2], 0);
843 small_swap = gen_reg_rtx (V8HImode);
844 convert_move (small_swap, swap, 0);
846 low_product = gen_reg_rtx (V4SImode);
847 emit_insn (gen_altivec_vmulouh (low_product, one, two));
849 high_product = gen_reg_rtx (V4SImode);
850 emit_insn (gen_altivec_vmsumuhm (high_product, one, small_swap, zero));
852 emit_insn (gen_vashlv4si3 (high_product, high_product, sixteen));
854 emit_insn (gen_addv4si3 (operands[0], high_product, low_product));
859 (define_expand "mulv8hi3"
860 [(use (match_operand:V8HI 0 "register_operand"))
861 (use (match_operand:V8HI 1 "register_operand"))
862 (use (match_operand:V8HI 2 "register_operand"))]
865 rtx zero = gen_reg_rtx (V8HImode);
867 emit_insn (gen_altivec_vspltish (zero, const0_rtx));
868 emit_insn (gen_fmav8hi4 (operands[0], operands[1], operands[2], zero));
873 ;; Map UNSPEC_SLDB to "l" and UNSPEC_SRDB to "r".
874 (define_int_attr SLDB_lr [(UNSPEC_SLDB "l")
877 (define_int_iterator VSHIFT_DBL_LR [UNSPEC_SLDB UNSPEC_SRDB])
879 (define_insn "vs<SLDB_lr>db_<mode>"
880 [(set (match_operand:VI2 0 "register_operand" "=v")
881 (unspec:VI2 [(match_operand:VI2 1 "register_operand" "v")
882 (match_operand:VI2 2 "register_operand" "v")
883 (match_operand:QI 3 "const_0_to_12_operand" "n")]
886 "vs<SLDB_lr>dbi %0,%1,%2,%3"
887 [(set_attr "type" "vecsimple")])
889 (define_expand "vstrir_<mode>"
890 [(set (match_operand:VIshort 0 "altivec_register_operand")
891 (unspec:VIshort [(match_operand:VIshort 1 "altivec_register_operand")]
895 if (BYTES_BIG_ENDIAN)
896 emit_insn (gen_vstrir_direct_<mode> (operands[0], operands[1]));
898 emit_insn (gen_vstril_direct_<mode> (operands[0], operands[1]));
902 (define_insn "vstrir_direct_<mode>"
903 [(set (match_operand:VIshort 0 "altivec_register_operand" "=v")
905 [(match_operand:VIshort 1 "altivec_register_operand" "v")]
909 [(set_attr "type" "vecsimple")])
911 ;; This expands into same code as vstrir<mode> followed by condition logic
912 ;; so that a single vstribr. or vstrihr. or vstribl. or vstrihl. instruction
913 ;; can, for example, satisfy the needs of a vec_strir () function paired
914 ;; with a vec_strir_p () function if both take the same incoming arguments.
915 (define_expand "vstrir_p_<mode>"
916 [(match_operand:SI 0 "gpc_reg_operand")
917 (match_operand:VIshort 1 "altivec_register_operand")]
920 rtx scratch = gen_reg_rtx (<MODE>mode);
921 if (BYTES_BIG_ENDIAN)
922 emit_insn (gen_vstrir_p_direct_<mode> (scratch, operands[1]));
924 emit_insn (gen_vstril_p_direct_<mode> (scratch, operands[1]));
925 emit_insn (gen_cr6_test_for_zero (operands[0]));
929 (define_insn "vstrir_p_direct_<mode>"
930 [(set (match_operand:VIshort 0 "altivec_register_operand" "=v")
932 [(match_operand:VIshort 1 "altivec_register_operand" "v")]
934 (set (reg:CC CR6_REGNO)
935 (unspec:CC [(match_dup 1)]
939 [(set_attr "type" "vecsimple")])
941 (define_expand "vstril_<mode>"
942 [(set (match_operand:VIshort 0 "altivec_register_operand")
943 (unspec:VIshort [(match_operand:VIshort 1 "altivec_register_operand")]
947 if (BYTES_BIG_ENDIAN)
948 emit_insn (gen_vstril_direct_<mode> (operands[0], operands[1]));
950 emit_insn (gen_vstrir_direct_<mode> (operands[0], operands[1]));
954 (define_insn "vstril_direct_<mode>"
955 [(set (match_operand:VIshort 0 "altivec_register_operand" "=v")
957 [(match_operand:VIshort 1 "altivec_register_operand" "v")]
961 [(set_attr "type" "vecsimple")])
963 ;; This expands into same code as vstril_<mode> followed by condition logic
964 ;; so that a single vstribr. or vstrihr. or vstribl. or vstrihl. instruction
965 ;; can, for example, satisfy the needs of a vec_stril () function paired
966 ;; with a vec_stril_p () function if both take the same incoming arguments.
967 (define_expand "vstril_p_<mode>"
968 [(match_operand:SI 0 "gpc_reg_operand")
969 (match_operand:VIshort 1 "altivec_register_operand")]
972 rtx scratch = gen_reg_rtx (<MODE>mode);
973 if (BYTES_BIG_ENDIAN)
974 emit_insn (gen_vstril_p_direct_<mode> (scratch, operands[1]));
976 emit_insn (gen_vstrir_p_direct_<mode> (scratch, operands[1]));
977 emit_insn (gen_cr6_test_for_zero (operands[0]));
981 (define_insn "vstril_p_direct_<mode>"
982 [(set (match_operand:VIshort 0 "altivec_register_operand" "=v")
984 [(match_operand:VIshort 1 "altivec_register_operand" "v")]
986 (set (reg:CC CR6_REGNO)
987 (unspec:CC [(match_dup 1)]
991 [(set_attr "type" "vecsimple")])
993 ;; Fused multiply subtract
994 (define_insn "*altivec_vnmsubfp"
995 [(set (match_operand:V4SF 0 "register_operand" "=v")
997 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
998 (match_operand:V4SF 2 "register_operand" "v")
1000 (match_operand:V4SF 3 "register_operand" "v")))))]
1001 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1002 "vnmsubfp %0,%1,%2,%3"
1003 [(set_attr "type" "vecfloat")])
1005 (define_insn "altivec_vmsumu<VI_char>m"
1006 [(set (match_operand:V4SI 0 "register_operand" "=v")
1007 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
1008 (match_operand:VIshort 2 "register_operand" "v")
1009 (match_operand:V4SI 3 "register_operand" "v")]
1012 "vmsumu<VI_char>m %0,%1,%2,%3"
1013 [(set_attr "type" "veccomplex")])
1015 (define_insn "altivec_vmsumudm"
1016 [(set (match_operand:V1TI 0 "register_operand" "=v")
1017 (unspec:V1TI [(match_operand:V2DI 1 "register_operand" "v")
1018 (match_operand:V2DI 2 "register_operand" "v")
1019 (match_operand:V1TI 3 "register_operand" "v")]
1022 "vmsumudm %0,%1,%2,%3"
1023 [(set_attr "type" "veccomplex")])
1025 (define_insn "altivec_vmsumm<VI_char>m"
1026 [(set (match_operand:V4SI 0 "register_operand" "=v")
1027 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
1028 (match_operand:VIshort 2 "register_operand" "v")
1029 (match_operand:V4SI 3 "register_operand" "v")]
1032 "vmsumm<VI_char>m %0,%1,%2,%3"
1033 [(set_attr "type" "veccomplex")])
1035 (define_insn "altivec_vmsumshm"
1036 [(set (match_operand:V4SI 0 "register_operand" "=v")
1037 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1038 (match_operand:V8HI 2 "register_operand" "v")
1039 (match_operand:V4SI 3 "register_operand" "v")]
1042 "vmsumshm %0,%1,%2,%3"
1043 [(set_attr "type" "veccomplex")])
1045 (define_insn "altivec_vmsumuhs"
1046 [(set (match_operand:V4SI 0 "register_operand" "=v")
1047 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1048 (match_operand:V8HI 2 "register_operand" "v")
1049 (match_operand:V4SI 3 "register_operand" "v")]
1051 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1053 "vmsumuhs %0,%1,%2,%3"
1054 [(set_attr "type" "veccomplex")])
1056 (define_insn "altivec_vmsumshs"
1057 [(set (match_operand:V4SI 0 "register_operand" "=v")
1058 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1059 (match_operand:V8HI 2 "register_operand" "v")
1060 (match_operand:V4SI 3 "register_operand" "v")]
1062 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1064 "vmsumshs %0,%1,%2,%3"
1065 [(set_attr "type" "veccomplex")])
1069 (define_insn "umax<mode>3"
1070 [(set (match_operand:VI2 0 "register_operand" "=v")
1071 (umax:VI2 (match_operand:VI2 1 "register_operand" "v")
1072 (match_operand:VI2 2 "register_operand" "v")))]
1074 "vmaxu<VI_char> %0,%1,%2"
1075 [(set_attr "type" "vecsimple")])
1077 (define_insn "smax<mode>3"
1078 [(set (match_operand:VI2 0 "register_operand" "=v")
1079 (smax:VI2 (match_operand:VI2 1 "register_operand" "v")
1080 (match_operand:VI2 2 "register_operand" "v")))]
1082 "vmaxs<VI_char> %0,%1,%2"
1083 [(set_attr "type" "vecsimple")])
1085 (define_insn "*altivec_smaxv4sf3"
1086 [(set (match_operand:V4SF 0 "register_operand" "=v")
1087 (smax:V4SF (match_operand:V4SF 1 "register_operand" "v")
1088 (match_operand:V4SF 2 "register_operand" "v")))]
1089 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1091 [(set_attr "type" "veccmp")])
1093 (define_insn "umin<mode>3"
1094 [(set (match_operand:VI2 0 "register_operand" "=v")
1095 (umin:VI2 (match_operand:VI2 1 "register_operand" "v")
1096 (match_operand:VI2 2 "register_operand" "v")))]
1098 "vminu<VI_char> %0,%1,%2"
1099 [(set_attr "type" "vecsimple")])
1101 (define_insn "smin<mode>3"
1102 [(set (match_operand:VI2 0 "register_operand" "=v")
1103 (smin:VI2 (match_operand:VI2 1 "register_operand" "v")
1104 (match_operand:VI2 2 "register_operand" "v")))]
1106 "vmins<VI_char> %0,%1,%2"
1107 [(set_attr "type" "vecsimple")])
1109 (define_insn "*altivec_sminv4sf3"
1110 [(set (match_operand:V4SF 0 "register_operand" "=v")
1111 (smin:V4SF (match_operand:V4SF 1 "register_operand" "v")
1112 (match_operand:V4SF 2 "register_operand" "v")))]
1113 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1115 [(set_attr "type" "veccmp")])
1117 (define_insn "altivec_vmhaddshs"
1118 [(set (match_operand:V8HI 0 "register_operand" "=v")
1119 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1120 (match_operand:V8HI 2 "register_operand" "v")
1121 (match_operand:V8HI 3 "register_operand" "v")]
1123 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1125 "vmhaddshs %0,%1,%2,%3"
1126 [(set_attr "type" "veccomplex")])
1128 (define_insn "altivec_vmhraddshs"
1129 [(set (match_operand:V8HI 0 "register_operand" "=v")
1130 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1131 (match_operand:V8HI 2 "register_operand" "v")
1132 (match_operand:V8HI 3 "register_operand" "v")]
1134 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1136 "vmhraddshs %0,%1,%2,%3"
1137 [(set_attr "type" "veccomplex")])
1139 (define_insn "fmav8hi4"
1140 [(set (match_operand:V8HI 0 "register_operand" "=v")
1141 (plus:V8HI (mult:V8HI (match_operand:V8HI 1 "register_operand" "v")
1142 (match_operand:V8HI 2 "register_operand" "v"))
1143 (match_operand:V8HI 3 "register_operand" "v")))]
1145 "vmladduhm %0,%1,%2,%3"
1146 [(set_attr "type" "veccomplex")])
1148 (define_expand "altivec_vmrghb"
1149 [(use (match_operand:V16QI 0 "register_operand"))
1150 (use (match_operand:V16QI 1 "register_operand"))
1151 (use (match_operand:V16QI 2 "register_operand"))]
1154 if (BYTES_BIG_ENDIAN)
1156 gen_altivec_vmrghb_direct_be (operands[0], operands[1], operands[2]));
1159 gen_altivec_vmrglb_direct_le (operands[0], operands[2], operands[1]));
1163 (define_insn "altivec_vmrghb_direct_be"
1164 [(set (match_operand:V16QI 0 "register_operand" "=v")
1167 (match_operand:V16QI 1 "register_operand" "v")
1168 (match_operand:V16QI 2 "register_operand" "v"))
1169 (parallel [(const_int 0) (const_int 16)
1170 (const_int 1) (const_int 17)
1171 (const_int 2) (const_int 18)
1172 (const_int 3) (const_int 19)
1173 (const_int 4) (const_int 20)
1174 (const_int 5) (const_int 21)
1175 (const_int 6) (const_int 22)
1176 (const_int 7) (const_int 23)])))]
1177 "TARGET_ALTIVEC && BYTES_BIG_ENDIAN"
1179 [(set_attr "type" "vecperm")])
1181 (define_insn "altivec_vmrghb_direct_le"
1182 [(set (match_operand:V16QI 0 "register_operand" "=v")
1185 (match_operand:V16QI 2 "register_operand" "v")
1186 (match_operand:V16QI 1 "register_operand" "v"))
1187 (parallel [(const_int 8) (const_int 24)
1188 (const_int 9) (const_int 25)
1189 (const_int 10) (const_int 26)
1190 (const_int 11) (const_int 27)
1191 (const_int 12) (const_int 28)
1192 (const_int 13) (const_int 29)
1193 (const_int 14) (const_int 30)
1194 (const_int 15) (const_int 31)])))]
1195 "TARGET_ALTIVEC && !BYTES_BIG_ENDIAN"
1197 [(set_attr "type" "vecperm")])
1199 (define_expand "altivec_vmrghh"
1200 [(use (match_operand:V8HI 0 "register_operand"))
1201 (use (match_operand:V8HI 1 "register_operand"))
1202 (use (match_operand:V8HI 2 "register_operand"))]
1205 if (BYTES_BIG_ENDIAN)
1207 gen_altivec_vmrghh_direct_be (operands[0], operands[1], operands[2]));
1210 gen_altivec_vmrglh_direct_le (operands[0], operands[2], operands[1]));
1214 (define_insn "altivec_vmrghh_direct_be"
1215 [(set (match_operand:V8HI 0 "register_operand" "=v")
1218 (match_operand:V8HI 1 "register_operand" "v")
1219 (match_operand:V8HI 2 "register_operand" "v"))
1220 (parallel [(const_int 0) (const_int 8)
1221 (const_int 1) (const_int 9)
1222 (const_int 2) (const_int 10)
1223 (const_int 3) (const_int 11)])))]
1224 "TARGET_ALTIVEC && BYTES_BIG_ENDIAN"
1226 [(set_attr "type" "vecperm")])
1228 (define_insn "altivec_vmrghh_direct_le"
1229 [(set (match_operand:V8HI 0 "register_operand" "=v")
1232 (match_operand:V8HI 2 "register_operand" "v")
1233 (match_operand:V8HI 1 "register_operand" "v"))
1234 (parallel [(const_int 4) (const_int 12)
1235 (const_int 5) (const_int 13)
1236 (const_int 6) (const_int 14)
1237 (const_int 7) (const_int 15)])))]
1238 "TARGET_ALTIVEC && !BYTES_BIG_ENDIAN"
1240 [(set_attr "type" "vecperm")])
1242 (define_expand "altivec_vmrghw"
1243 [(use (match_operand:V4SI 0 "register_operand"))
1244 (use (match_operand:V4SI 1 "register_operand"))
1245 (use (match_operand:V4SI 2 "register_operand"))]
1246 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1248 if (BYTES_BIG_ENDIAN)
1249 emit_insn (gen_altivec_vmrghw_direct_v4si_be (operands[0],
1253 emit_insn (gen_altivec_vmrglw_direct_v4si_le (operands[0],
1259 (define_insn "altivec_vmrghw_direct_<mode>_be"
1260 [(set (match_operand:VSX_W 0 "register_operand" "=wa,v")
1262 (vec_concat:<VS_double>
1263 (match_operand:VSX_W 1 "register_operand" "wa,v")
1264 (match_operand:VSX_W 2 "register_operand" "wa,v"))
1265 (parallel [(const_int 0) (const_int 4)
1266 (const_int 1) (const_int 5)])))]
1267 "TARGET_ALTIVEC && BYTES_BIG_ENDIAN"
1271 [(set_attr "type" "vecperm")])
1273 (define_insn "altivec_vmrghw_direct_<mode>_le"
1274 [(set (match_operand:VSX_W 0 "register_operand" "=wa,v")
1276 (vec_concat:<VS_double>
1277 (match_operand:VSX_W 2 "register_operand" "wa,v")
1278 (match_operand:VSX_W 1 "register_operand" "wa,v"))
1279 (parallel [(const_int 2) (const_int 6)
1280 (const_int 3) (const_int 7)])))]
1281 "TARGET_ALTIVEC && !BYTES_BIG_ENDIAN"
1285 [(set_attr "type" "vecperm")])
1287 (define_insn "*altivec_vmrghsf"
1288 [(set (match_operand:V4SF 0 "register_operand" "=v")
1291 (match_operand:V4SF 1 "register_operand" "v")
1292 (match_operand:V4SF 2 "register_operand" "v"))
1293 (parallel [(const_int 0) (const_int 4)
1294 (const_int 1) (const_int 5)])))]
1295 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1297 if (BYTES_BIG_ENDIAN)
1298 return "vmrghw %0,%1,%2";
1300 return "vmrglw %0,%2,%1";
1302 [(set_attr "type" "vecperm")])
1304 (define_expand "altivec_vmrglb"
1305 [(use (match_operand:V16QI 0 "register_operand"))
1306 (use (match_operand:V16QI 1 "register_operand"))
1307 (use (match_operand:V16QI 2 "register_operand"))]
1310 if (BYTES_BIG_ENDIAN)
1312 gen_altivec_vmrglb_direct_be (operands[0], operands[1], operands[2]));
1315 gen_altivec_vmrghb_direct_le (operands[0], operands[2], operands[1]));
1319 (define_insn "altivec_vmrglb_direct_be"
1320 [(set (match_operand:V16QI 0 "register_operand" "=v")
1323 (match_operand:V16QI 1 "register_operand" "v")
1324 (match_operand:V16QI 2 "register_operand" "v"))
1325 (parallel [(const_int 8) (const_int 24)
1326 (const_int 9) (const_int 25)
1327 (const_int 10) (const_int 26)
1328 (const_int 11) (const_int 27)
1329 (const_int 12) (const_int 28)
1330 (const_int 13) (const_int 29)
1331 (const_int 14) (const_int 30)
1332 (const_int 15) (const_int 31)])))]
1333 "TARGET_ALTIVEC && BYTES_BIG_ENDIAN"
1335 [(set_attr "type" "vecperm")])
1337 (define_insn "altivec_vmrglb_direct_le"
1338 [(set (match_operand:V16QI 0 "register_operand" "=v")
1341 (match_operand:V16QI 2 "register_operand" "v")
1342 (match_operand:V16QI 1 "register_operand" "v"))
1343 (parallel [(const_int 0) (const_int 16)
1344 (const_int 1) (const_int 17)
1345 (const_int 2) (const_int 18)
1346 (const_int 3) (const_int 19)
1347 (const_int 4) (const_int 20)
1348 (const_int 5) (const_int 21)
1349 (const_int 6) (const_int 22)
1350 (const_int 7) (const_int 23)])))]
1351 "TARGET_ALTIVEC && !BYTES_BIG_ENDIAN"
1353 [(set_attr "type" "vecperm")])
1355 (define_expand "altivec_vmrglh"
1356 [(use (match_operand:V8HI 0 "register_operand"))
1357 (use (match_operand:V8HI 1 "register_operand"))
1358 (use (match_operand:V8HI 2 "register_operand"))]
1361 if (BYTES_BIG_ENDIAN)
1363 gen_altivec_vmrglh_direct_be (operands[0], operands[1], operands[2]));
1366 gen_altivec_vmrghh_direct_le (operands[0], operands[2], operands[1]));
1370 (define_insn "altivec_vmrglh_direct_be"
1371 [(set (match_operand:V8HI 0 "register_operand" "=v")
1374 (match_operand:V8HI 1 "register_operand" "v")
1375 (match_operand:V8HI 2 "register_operand" "v"))
1376 (parallel [(const_int 4) (const_int 12)
1377 (const_int 5) (const_int 13)
1378 (const_int 6) (const_int 14)
1379 (const_int 7) (const_int 15)])))]
1380 "TARGET_ALTIVEC && BYTES_BIG_ENDIAN"
1382 [(set_attr "type" "vecperm")])
1384 (define_insn "altivec_vmrglh_direct_le"
1385 [(set (match_operand:V8HI 0 "register_operand" "=v")
1388 (match_operand:V8HI 2 "register_operand" "v")
1389 (match_operand:V8HI 1 "register_operand" "v"))
1390 (parallel [(const_int 0) (const_int 8)
1391 (const_int 1) (const_int 9)
1392 (const_int 2) (const_int 10)
1393 (const_int 3) (const_int 11)])))]
1394 "TARGET_ALTIVEC && !BYTES_BIG_ENDIAN"
1396 [(set_attr "type" "vecperm")])
1398 (define_expand "altivec_vmrglw"
1399 [(use (match_operand:V4SI 0 "register_operand"))
1400 (use (match_operand:V4SI 1 "register_operand"))
1401 (use (match_operand:V4SI 2 "register_operand"))]
1402 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1404 if (BYTES_BIG_ENDIAN)
1405 emit_insn (gen_altivec_vmrglw_direct_v4si_be (operands[0],
1409 emit_insn (gen_altivec_vmrghw_direct_v4si_le (operands[0],
1415 (define_insn "altivec_vmrglw_direct_<mode>_be"
1416 [(set (match_operand:VSX_W 0 "register_operand" "=wa,v")
1418 (vec_concat:<VS_double>
1419 (match_operand:VSX_W 1 "register_operand" "wa,v")
1420 (match_operand:VSX_W 2 "register_operand" "wa,v"))
1421 (parallel [(const_int 2) (const_int 6)
1422 (const_int 3) (const_int 7)])))]
1423 "TARGET_ALTIVEC && BYTES_BIG_ENDIAN"
1427 [(set_attr "type" "vecperm")])
1429 (define_insn "altivec_vmrglw_direct_<mode>_le"
1430 [(set (match_operand:VSX_W 0 "register_operand" "=wa,v")
1432 (vec_concat:<VS_double>
1433 (match_operand:VSX_W 2 "register_operand" "wa,v")
1434 (match_operand:VSX_W 1 "register_operand" "wa,v"))
1435 (parallel [(const_int 0) (const_int 4)
1436 (const_int 1) (const_int 5)])))]
1437 "TARGET_ALTIVEC && !BYTES_BIG_ENDIAN"
1441 [(set_attr "type" "vecperm")])
1443 (define_insn "*altivec_vmrglsf"
1444 [(set (match_operand:V4SF 0 "register_operand" "=v")
1447 (match_operand:V4SF 1 "register_operand" "v")
1448 (match_operand:V4SF 2 "register_operand" "v"))
1449 (parallel [(const_int 2) (const_int 6)
1450 (const_int 3) (const_int 7)])))]
1451 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1453 if (BYTES_BIG_ENDIAN)
1454 return "vmrglw %0,%1,%2";
1456 return "vmrghw %0,%2,%1";
1458 [(set_attr "type" "vecperm")])
1460 ;; Power8 vector merge two V2DF/V2DI even words to V2DF
1461 (define_expand "p8_vmrgew_<mode>"
1462 [(use (match_operand:VSX_D 0 "vsx_register_operand"))
1463 (use (match_operand:VSX_D 1 "vsx_register_operand"))
1464 (use (match_operand:VSX_D 2 "vsx_register_operand"))]
1465 "VECTOR_MEM_VSX_P (<MODE>mode)"
1470 v = gen_rtvec (2, GEN_INT (0), GEN_INT (2));
1471 x = gen_rtx_VEC_CONCAT (<VS_double>mode, operands[1], operands[2]);
1473 x = gen_rtx_VEC_SELECT (<MODE>mode, x, gen_rtx_PARALLEL (VOIDmode, v));
1474 emit_insn (gen_rtx_SET (operands[0], x));
1478 ;; Power8 vector merge two V4SF/V4SI even words to V4SF
1479 (define_insn "p8_vmrgew_<mode>"
1480 [(set (match_operand:VSX_W 0 "register_operand" "=v")
1482 (vec_concat:<VS_double>
1483 (match_operand:VSX_W 1 "register_operand" "v")
1484 (match_operand:VSX_W 2 "register_operand" "v"))
1485 (parallel [(const_int 0) (const_int 4)
1486 (const_int 2) (const_int 6)])))]
1489 if (BYTES_BIG_ENDIAN)
1490 return "vmrgew %0,%1,%2";
1492 return "vmrgow %0,%2,%1";
1494 [(set_attr "type" "vecperm")])
1496 (define_insn "p8_vmrgow_<mode>"
1497 [(set (match_operand:VSX_W 0 "register_operand" "=v")
1499 (vec_concat:<VS_double>
1500 (match_operand:VSX_W 1 "register_operand" "v")
1501 (match_operand:VSX_W 2 "register_operand" "v"))
1502 (parallel [(const_int 1) (const_int 5)
1503 (const_int 3) (const_int 7)])))]
1506 if (BYTES_BIG_ENDIAN)
1507 return "vmrgow %0,%1,%2";
1509 return "vmrgew %0,%2,%1";
1511 [(set_attr "type" "vecperm")])
1513 (define_expand "p8_vmrgow_<mode>"
1514 [(use (match_operand:VSX_D 0 "vsx_register_operand"))
1515 (use (match_operand:VSX_D 1 "vsx_register_operand"))
1516 (use (match_operand:VSX_D 2 "vsx_register_operand"))]
1517 "VECTOR_MEM_VSX_P (<MODE>mode)"
1522 v = gen_rtvec (2, GEN_INT (1), GEN_INT (3));
1523 x = gen_rtx_VEC_CONCAT (<VS_double>mode, operands[1], operands[2]);
1525 x = gen_rtx_VEC_SELECT (<MODE>mode, x, gen_rtx_PARALLEL (VOIDmode, v));
1526 emit_insn (gen_rtx_SET (operands[0], x));
1530 (define_insn "p8_vmrgew_<mode>_direct"
1531 [(set (match_operand:VSX_W 0 "register_operand" "=v")
1532 (unspec:VSX_W [(match_operand:VSX_W 1 "register_operand" "v")
1533 (match_operand:VSX_W 2 "register_operand" "v")]
1534 UNSPEC_VMRGEW_DIRECT))]
1537 [(set_attr "type" "vecperm")])
1539 (define_insn "p8_vmrgow_<mode>_direct"
1540 [(set (match_operand:VSX_W 0 "register_operand" "=v")
1541 (unspec:VSX_W [(match_operand:VSX_W 1 "register_operand" "v")
1542 (match_operand:VSX_W 2 "register_operand" "v")]
1543 UNSPEC_VMRGOW_DIRECT))]
1546 [(set_attr "type" "vecperm")])
1548 (define_expand "vec_widen_umult_even_v16qi"
1549 [(use (match_operand:V8HI 0 "register_operand"))
1550 (use (match_operand:V16QI 1 "register_operand"))
1551 (use (match_operand:V16QI 2 "register_operand"))]
1554 if (BYTES_BIG_ENDIAN)
1555 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1557 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1561 (define_expand "vec_widen_smult_even_v16qi"
1562 [(use (match_operand:V8HI 0 "register_operand"))
1563 (use (match_operand:V16QI 1 "register_operand"))
1564 (use (match_operand:V16QI 2 "register_operand"))]
1567 if (BYTES_BIG_ENDIAN)
1568 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1570 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1574 (define_expand "vec_widen_umult_even_v8hi"
1575 [(use (match_operand:V4SI 0 "register_operand"))
1576 (use (match_operand:V8HI 1 "register_operand"))
1577 (use (match_operand:V8HI 2 "register_operand"))]
1580 if (BYTES_BIG_ENDIAN)
1581 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1583 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1587 (define_expand "vec_widen_smult_even_v8hi"
1588 [(use (match_operand:V4SI 0 "register_operand"))
1589 (use (match_operand:V8HI 1 "register_operand"))
1590 (use (match_operand:V8HI 2 "register_operand"))]
1593 if (BYTES_BIG_ENDIAN)
1594 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1596 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1600 (define_expand "vec_widen_umult_even_v4si"
1601 [(use (match_operand:V2DI 0 "register_operand"))
1602 (use (match_operand:V4SI 1 "register_operand"))
1603 (use (match_operand:V4SI 2 "register_operand"))]
1606 if (BYTES_BIG_ENDIAN)
1607 emit_insn (gen_altivec_vmuleuw (operands[0], operands[1], operands[2]));
1609 emit_insn (gen_altivec_vmulouw (operands[0], operands[1], operands[2]));
1613 (define_expand "vec_widen_umult_even_v2di"
1614 [(use (match_operand:V1TI 0 "register_operand"))
1615 (use (match_operand:V2DI 1 "register_operand"))
1616 (use (match_operand:V2DI 2 "register_operand"))]
1619 if (BYTES_BIG_ENDIAN)
1620 emit_insn (gen_altivec_vmuleud (operands[0], operands[1], operands[2]));
1622 emit_insn (gen_altivec_vmuloud (operands[0], operands[1], operands[2]));
1626 (define_expand "vec_widen_smult_even_v4si"
1627 [(use (match_operand:V2DI 0 "register_operand"))
1628 (use (match_operand:V4SI 1 "register_operand"))
1629 (use (match_operand:V4SI 2 "register_operand"))]
1632 if (BYTES_BIG_ENDIAN)
1633 emit_insn (gen_altivec_vmulesw (operands[0], operands[1], operands[2]));
1635 emit_insn (gen_altivec_vmulosw (operands[0], operands[1], operands[2]));
1639 (define_expand "vec_widen_smult_even_v2di"
1640 [(use (match_operand:V1TI 0 "register_operand"))
1641 (use (match_operand:V2DI 1 "register_operand"))
1642 (use (match_operand:V2DI 2 "register_operand"))]
1645 if (BYTES_BIG_ENDIAN)
1646 emit_insn (gen_altivec_vmulesd (operands[0], operands[1], operands[2]));
1648 emit_insn (gen_altivec_vmulosd (operands[0], operands[1], operands[2]));
1652 (define_expand "vec_widen_umult_odd_v16qi"
1653 [(use (match_operand:V8HI 0 "register_operand"))
1654 (use (match_operand:V16QI 1 "register_operand"))
1655 (use (match_operand:V16QI 2 "register_operand"))]
1658 if (BYTES_BIG_ENDIAN)
1659 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1661 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1665 (define_expand "vec_widen_smult_odd_v16qi"
1666 [(use (match_operand:V8HI 0 "register_operand"))
1667 (use (match_operand:V16QI 1 "register_operand"))
1668 (use (match_operand:V16QI 2 "register_operand"))]
1671 if (BYTES_BIG_ENDIAN)
1672 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1674 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1678 (define_expand "vec_widen_umult_odd_v8hi"
1679 [(use (match_operand:V4SI 0 "register_operand"))
1680 (use (match_operand:V8HI 1 "register_operand"))
1681 (use (match_operand:V8HI 2 "register_operand"))]
1684 if (BYTES_BIG_ENDIAN)
1685 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1687 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1691 (define_expand "vec_widen_smult_odd_v8hi"
1692 [(use (match_operand:V4SI 0 "register_operand"))
1693 (use (match_operand:V8HI 1 "register_operand"))
1694 (use (match_operand:V8HI 2 "register_operand"))]
1697 if (BYTES_BIG_ENDIAN)
1698 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1700 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1704 (define_expand "vec_widen_umult_odd_v4si"
1705 [(use (match_operand:V2DI 0 "register_operand"))
1706 (use (match_operand:V4SI 1 "register_operand"))
1707 (use (match_operand:V4SI 2 "register_operand"))]
1710 if (BYTES_BIG_ENDIAN)
1711 emit_insn (gen_altivec_vmulouw (operands[0], operands[1], operands[2]));
1713 emit_insn (gen_altivec_vmuleuw (operands[0], operands[1], operands[2]));
1717 (define_expand "vec_widen_umult_odd_v2di"
1718 [(use (match_operand:V1TI 0 "register_operand"))
1719 (use (match_operand:V2DI 1 "register_operand"))
1720 (use (match_operand:V2DI 2 "register_operand"))]
1723 if (BYTES_BIG_ENDIAN)
1724 emit_insn (gen_altivec_vmuloud (operands[0], operands[1], operands[2]));
1726 emit_insn (gen_altivec_vmuleud (operands[0], operands[1], operands[2]));
1730 (define_expand "vec_widen_smult_odd_v4si"
1731 [(use (match_operand:V2DI 0 "register_operand"))
1732 (use (match_operand:V4SI 1 "register_operand"))
1733 (use (match_operand:V4SI 2 "register_operand"))]
1736 if (BYTES_BIG_ENDIAN)
1737 emit_insn (gen_altivec_vmulosw (operands[0], operands[1], operands[2]));
1739 emit_insn (gen_altivec_vmulesw (operands[0], operands[1], operands[2]));
1743 (define_expand "vec_widen_smult_odd_v2di"
1744 [(use (match_operand:V1TI 0 "register_operand"))
1745 (use (match_operand:V2DI 1 "register_operand"))
1746 (use (match_operand:V2DI 2 "register_operand"))]
1749 if (BYTES_BIG_ENDIAN)
1750 emit_insn (gen_altivec_vmulosd (operands[0], operands[1], operands[2]));
1752 emit_insn (gen_altivec_vmulesd (operands[0], operands[1], operands[2]));
1756 (define_insn "altivec_vmuleub"
1757 [(set (match_operand:V8HI 0 "register_operand" "=v")
1758 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1759 (match_operand:V16QI 2 "register_operand" "v")]
1763 [(set_attr "type" "veccomplex")])
1765 (define_insn "altivec_vmuloub"
1766 [(set (match_operand:V8HI 0 "register_operand" "=v")
1767 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1768 (match_operand:V16QI 2 "register_operand" "v")]
1772 [(set_attr "type" "veccomplex")])
1774 (define_insn "altivec_vmulesb"
1775 [(set (match_operand:V8HI 0 "register_operand" "=v")
1776 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1777 (match_operand:V16QI 2 "register_operand" "v")]
1781 [(set_attr "type" "veccomplex")])
1783 (define_insn "altivec_vmulosb"
1784 [(set (match_operand:V8HI 0 "register_operand" "=v")
1785 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1786 (match_operand:V16QI 2 "register_operand" "v")]
1790 [(set_attr "type" "veccomplex")])
1792 (define_insn "altivec_vmuleuh"
1793 [(set (match_operand:V4SI 0 "register_operand" "=v")
1794 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1795 (match_operand:V8HI 2 "register_operand" "v")]
1799 [(set_attr "type" "veccomplex")])
1801 (define_insn "altivec_vmulouh"
1802 [(set (match_operand:V4SI 0 "register_operand" "=v")
1803 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1804 (match_operand:V8HI 2 "register_operand" "v")]
1808 [(set_attr "type" "veccomplex")])
1810 (define_insn "altivec_vmulesh"
1811 [(set (match_operand:V4SI 0 "register_operand" "=v")
1812 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1813 (match_operand:V8HI 2 "register_operand" "v")]
1817 [(set_attr "type" "veccomplex")])
1819 (define_insn "altivec_vmulosh"
1820 [(set (match_operand:V4SI 0 "register_operand" "=v")
1821 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1822 (match_operand:V8HI 2 "register_operand" "v")]
1826 [(set_attr "type" "veccomplex")])
1828 (define_insn "altivec_vmuleuw"
1829 [(set (match_operand:V2DI 0 "register_operand" "=v")
1830 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1831 (match_operand:V4SI 2 "register_operand" "v")]
1835 [(set_attr "type" "veccomplex")])
1837 (define_insn "altivec_vmuleud"
1838 [(set (match_operand:V1TI 0 "register_operand" "=v")
1839 (unspec:V1TI [(match_operand:V2DI 1 "register_operand" "v")
1840 (match_operand:V2DI 2 "register_operand" "v")]
1844 [(set_attr "type" "veccomplex")])
1846 (define_insn "altivec_vmulouw"
1847 [(set (match_operand:V2DI 0 "register_operand" "=v")
1848 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1849 (match_operand:V4SI 2 "register_operand" "v")]
1853 [(set_attr "type" "veccomplex")])
1855 (define_insn "altivec_vmuloud"
1856 [(set (match_operand:V1TI 0 "register_operand" "=v")
1857 (unspec:V1TI [(match_operand:V2DI 1 "register_operand" "v")
1858 (match_operand:V2DI 2 "register_operand" "v")]
1862 [(set_attr "type" "veccomplex")])
1864 (define_insn "altivec_vmulesw"
1865 [(set (match_operand:V2DI 0 "register_operand" "=v")
1866 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1867 (match_operand:V4SI 2 "register_operand" "v")]
1871 [(set_attr "type" "veccomplex")])
1873 (define_insn "altivec_vmulesd"
1874 [(set (match_operand:V1TI 0 "register_operand" "=v")
1875 (unspec:V1TI [(match_operand:V2DI 1 "register_operand" "v")
1876 (match_operand:V2DI 2 "register_operand" "v")]
1880 [(set_attr "type" "veccomplex")])
1882 (define_insn "altivec_vmulosw"
1883 [(set (match_operand:V2DI 0 "register_operand" "=v")
1884 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1885 (match_operand:V4SI 2 "register_operand" "v")]
1889 [(set_attr "type" "veccomplex")])
1891 (define_insn "altivec_vmulosd"
1892 [(set (match_operand:V1TI 0 "register_operand" "=v")
1893 (unspec:V1TI [(match_operand:V2DI 1 "register_operand" "v")
1894 (match_operand:V2DI 2 "register_operand" "v")]
1898 [(set_attr "type" "veccomplex")])
1900 ;; Vector pack/unpack
1901 (define_insn "altivec_vpkpx"
1902 [(set (match_operand:V8HI 0 "register_operand" "=v")
1903 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v")
1904 (match_operand:V4SI 2 "register_operand" "v")]
1908 if (BYTES_BIG_ENDIAN)
1909 return "vpkpx %0,%1,%2";
1911 return "vpkpx %0,%2,%1";
1913 [(set_attr "type" "vecperm")])
1915 (define_insn "altivec_vpks<VI_char>ss"
1916 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1917 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1918 (match_operand:VP 2 "register_operand" "v")]
1919 UNSPEC_VPACK_SIGN_SIGN_SAT))]
1922 if (BYTES_BIG_ENDIAN)
1923 return "vpks<VI_char>ss %0,%1,%2";
1925 return "vpks<VI_char>ss %0,%2,%1";
1927 [(set_attr "type" "vecperm")])
1929 (define_insn "altivec_vpks<VI_char>us"
1930 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1931 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1932 (match_operand:VP 2 "register_operand" "v")]
1933 UNSPEC_VPACK_SIGN_UNS_SAT))]
1936 if (BYTES_BIG_ENDIAN)
1937 return "vpks<VI_char>us %0,%1,%2";
1939 return "vpks<VI_char>us %0,%2,%1";
1941 [(set_attr "type" "vecperm")])
1943 (define_insn "altivec_vpku<VI_char>us"
1944 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1945 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1946 (match_operand:VP 2 "register_operand" "v")]
1947 UNSPEC_VPACK_UNS_UNS_SAT))]
1950 if (BYTES_BIG_ENDIAN)
1951 return "vpku<VI_char>us %0,%1,%2";
1953 return "vpku<VI_char>us %0,%2,%1";
1955 [(set_attr "type" "vecperm")])
1957 (define_insn "altivec_vpku<VI_char>um"
1958 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1959 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1960 (match_operand:VP 2 "register_operand" "v")]
1961 UNSPEC_VPACK_UNS_UNS_MOD))]
1964 if (BYTES_BIG_ENDIAN)
1965 return "vpku<VI_char>um %0,%1,%2";
1967 return "vpku<VI_char>um %0,%2,%1";
1969 [(set_attr "type" "vecperm")])
1971 (define_insn "altivec_vpku<VI_char>um_direct"
1972 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1973 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1974 (match_operand:VP 2 "register_operand" "v")]
1975 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT))]
1978 if (BYTES_BIG_ENDIAN)
1979 return "vpku<VI_char>um %0,%1,%2";
1981 return "vpku<VI_char>um %0,%2,%1";
1983 [(set_attr "type" "vecperm")])
1985 (define_insn "altivec_vrl<VI_char>"
1986 [(set (match_operand:VI2 0 "register_operand" "=v")
1987 (rotate:VI2 (match_operand:VI2 1 "register_operand" "v")
1988 (match_operand:VI2 2 "register_operand" "v")))]
1990 "vrl<VI_char> %0,%1,%2"
1991 [(set_attr "type" "vecsimple")])
1993 (define_insn "altivec_vrlq"
1994 [(set (match_operand:V1TI 0 "vsx_register_operand" "=v")
1995 (rotate:V1TI (match_operand:V1TI 1 "vsx_register_operand" "v")
1996 (match_operand:V1TI 2 "vsx_register_operand" "v")))]
1998 ;; rotate amount in needs to be in bits[57:63] of operand2.
2000 [(set_attr "type" "vecsimple")])
2002 (define_insn "altivec_vrl<VI_char>mi"
2003 [(set (match_operand:VIlong 0 "register_operand" "=v")
2004 (unspec:VIlong [(match_operand:VIlong 1 "register_operand" "v")
2005 (match_operand:VIlong 2 "register_operand" "0")
2006 (match_operand:VIlong 3 "register_operand" "v")]
2009 "vrl<VI_char>mi %0,%1,%3"
2010 [(set_attr "type" "veclogical")])
2012 (define_expand "altivec_vrlqmi"
2013 [(set (match_operand:V1TI 0 "vsx_register_operand")
2014 (unspec:V1TI [(match_operand:V1TI 1 "vsx_register_operand")
2015 (match_operand:V1TI 2 "vsx_register_operand")
2016 (match_operand:V1TI 3 "vsx_register_operand")]
2020 /* Mask bit begin, end fields need to be in bits [41:55] of 128-bit operand2.
2021 Shift amount in needs to be put in bits[57:63] of 128-bit operand2. */
2022 rtx tmp = gen_reg_rtx (V1TImode);
2024 emit_insn (gen_xxswapd_v1ti (tmp, operands[3]));
2025 emit_insn (gen_altivec_vrlqmi_inst (operands[0], operands[1], operands[2],
2030 (define_insn "altivec_vrlqmi_inst"
2031 [(set (match_operand:V1TI 0 "vsx_register_operand" "=v")
2032 (unspec:V1TI [(match_operand:V1TI 1 "vsx_register_operand" "v")
2033 (match_operand:V1TI 2 "vsx_register_operand" "0")
2034 (match_operand:V1TI 3 "vsx_register_operand" "v")]
2038 [(set_attr "type" "veclogical")])
2040 (define_insn "altivec_vrl<VI_char>nm"
2041 [(set (match_operand:VIlong 0 "register_operand" "=v")
2042 (unspec:VIlong [(match_operand:VIlong 1 "register_operand" "v")
2043 (match_operand:VIlong 2 "register_operand" "v")]
2046 "vrl<VI_char>nm %0,%1,%2"
2047 [(set_attr "type" "veclogical")])
2049 (define_expand "altivec_vrlqnm"
2050 [(set (match_operand:V1TI 0 "vsx_register_operand")
2051 (unspec:V1TI [(match_operand:V1TI 1 "vsx_register_operand")
2052 (match_operand:V1TI 2 "vsx_register_operand")]
2056 /* Shift amount in needs to be put in bits[57:63] of 128-bit operand2. */
2057 rtx tmp = gen_reg_rtx (V1TImode);
2059 emit_insn (gen_xxswapd_v1ti (tmp, operands[2]));
2060 emit_insn (gen_altivec_vrlqnm_inst (operands[0], operands[1], tmp));
2064 (define_insn "altivec_vrlqnm_inst"
2065 [(set (match_operand:V1TI 0 "vsx_register_operand" "=v")
2066 (unspec:V1TI [(match_operand:V1TI 1 "vsx_register_operand" "v")
2067 (match_operand:V1TI 2 "vsx_register_operand" "v")]
2070 ;; rotate and mask bits need to be in upper 64-bits of operand2.
2072 [(set_attr "type" "veclogical")])
2074 (define_insn "altivec_vsl"
2075 [(set (match_operand:V4SI 0 "register_operand" "=v")
2076 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2077 (match_operand:V4SI 2 "register_operand" "v")]
2081 [(set_attr "type" "vecperm")])
2083 (define_insn "altivec_vslo"
2084 [(set (match_operand:V4SI 0 "register_operand" "=v")
2085 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2086 (match_operand:V4SI 2 "register_operand" "v")]
2090 [(set_attr "type" "vecperm")])
2093 [(set (match_operand:V16QI 0 "register_operand" "=v")
2094 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
2095 (match_operand:V16QI 2 "register_operand" "v")]
2099 [(set_attr "type" "vecsimple")])
2102 [(set (match_operand:V16QI 0 "register_operand" "=v")
2103 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
2104 (match_operand:V16QI 2 "register_operand" "v")]
2108 [(set_attr "type" "vecsimple")])
2110 (define_insn "*altivec_vsl<VI_char>"
2111 [(set (match_operand:VI2 0 "register_operand" "=v")
2112 (ashift:VI2 (match_operand:VI2 1 "register_operand" "v")
2113 (match_operand:VI2 2 "register_operand" "v")))]
2115 "vsl<VI_char> %0,%1,%2"
2116 [(set_attr "type" "vecsimple")])
2118 (define_insn "altivec_vslq_<mode>"
2119 [(set (match_operand:VEC_TI 0 "vsx_register_operand" "=v")
2120 (ashift:VEC_TI (match_operand:VEC_TI 1 "vsx_register_operand" "v")
2121 (match_operand:VEC_TI 2 "vsx_register_operand" "v")))]
2123 /* Shift amount in needs to be in bits[57:63] of 128-bit operand. */
2125 [(set_attr "type" "vecsimple")])
2127 (define_insn "*altivec_vsr<VI_char>"
2128 [(set (match_operand:VI2 0 "register_operand" "=v")
2129 (lshiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
2130 (match_operand:VI2 2 "register_operand" "v")))]
2132 "vsr<VI_char> %0,%1,%2"
2133 [(set_attr "type" "vecsimple")])
2135 (define_insn "altivec_vsrq_<mode>"
2136 [(set (match_operand:VEC_TI 0 "vsx_register_operand" "=v")
2137 (lshiftrt:VEC_TI (match_operand:VEC_TI 1 "vsx_register_operand" "v")
2138 (match_operand:VEC_TI 2 "vsx_register_operand" "v")))]
2140 /* Shift amount in needs to be in bits[57:63] of 128-bit operand. */
2142 [(set_attr "type" "vecsimple")])
2144 (define_insn "*altivec_vsra<VI_char>"
2145 [(set (match_operand:VI2 0 "register_operand" "=v")
2146 (ashiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
2147 (match_operand:VI2 2 "register_operand" "v")))]
2149 "vsra<VI_char> %0,%1,%2"
2150 [(set_attr "type" "vecsimple")])
2152 (define_insn "altivec_vsraq"
2153 [(set (match_operand:V1TI 0 "vsx_register_operand" "=v")
2154 (ashiftrt:V1TI (match_operand:V1TI 1 "vsx_register_operand" "v")
2155 (match_operand:V1TI 2 "vsx_register_operand" "v")))]
2157 /* Shift amount in needs to be in bits[57:63] of 128-bit operand. */
2159 [(set_attr "type" "vecsimple")])
2161 (define_insn "altivec_vsr"
2162 [(set (match_operand:V4SI 0 "register_operand" "=v")
2163 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2164 (match_operand:V4SI 2 "register_operand" "v")]
2168 [(set_attr "type" "vecperm")])
2170 (define_insn "altivec_vsro"
2171 [(set (match_operand:V4SI 0 "register_operand" "=v")
2172 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2173 (match_operand:V4SI 2 "register_operand" "v")]
2177 [(set_attr "type" "vecperm")])
2179 (define_insn "altivec_vsum4ubs"
2180 [(set (match_operand:V4SI 0 "register_operand" "=v")
2181 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")
2182 (match_operand:V4SI 2 "register_operand" "v")]
2184 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2187 [(set_attr "type" "veccomplex")])
2189 (define_insn "altivec_vsum4s<VI_char>s"
2190 [(set (match_operand:V4SI 0 "register_operand" "=v")
2191 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
2192 (match_operand:V4SI 2 "register_operand" "v")]
2194 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2196 "vsum4s<VI_char>s %0,%1,%2"
2197 [(set_attr "type" "veccomplex")])
2199 (define_expand "altivec_vsum2sws"
2200 [(use (match_operand:V4SI 0 "register_operand"))
2201 (use (match_operand:V4SI 1 "register_operand"))
2202 (use (match_operand:V4SI 2 "register_operand"))]
2205 if (BYTES_BIG_ENDIAN)
2206 emit_insn (gen_altivec_vsum2sws_direct (operands[0], operands[1],
2210 rtx tmp1 = gen_reg_rtx (V4SImode);
2211 rtx tmp2 = gen_reg_rtx (V4SImode);
2212 emit_insn (gen_altivec_vsldoi_v4si (tmp1, operands[2],
2213 operands[2], GEN_INT (12)));
2214 emit_insn (gen_altivec_vsum2sws_direct (tmp2, operands[1], tmp1));
2215 emit_insn (gen_altivec_vsldoi_v4si (operands[0], tmp2, tmp2,
2221 ; FIXME: This can probably be expressed without an UNSPEC.
2222 (define_insn "altivec_vsum2sws_direct"
2223 [(set (match_operand:V4SI 0 "register_operand" "=v")
2224 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2225 (match_operand:V4SI 2 "register_operand" "v")]
2227 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2230 [(set_attr "type" "veccomplex")])
2232 (define_expand "altivec_vsumsws"
2233 [(use (match_operand:V4SI 0 "register_operand"))
2234 (use (match_operand:V4SI 1 "register_operand"))
2235 (use (match_operand:V4SI 2 "register_operand"))]
2238 if (BYTES_BIG_ENDIAN)
2239 emit_insn (gen_altivec_vsumsws_direct (operands[0], operands[1],
2243 rtx tmp1 = gen_reg_rtx (V4SImode);
2244 rtx tmp2 = gen_reg_rtx (V4SImode);
2245 emit_insn (gen_altivec_vspltw_direct (tmp1, operands[2], const0_rtx));
2246 emit_insn (gen_altivec_vsumsws_direct (tmp2, operands[1], tmp1));
2247 emit_insn (gen_altivec_vsldoi_v4si (operands[0], tmp2, tmp2,
2253 ; FIXME: This can probably be expressed without an UNSPEC.
2254 (define_insn "altivec_vsumsws_direct"
2255 [(set (match_operand:V4SI 0 "register_operand" "=v")
2256 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2257 (match_operand:V4SI 2 "register_operand" "v")]
2258 UNSPEC_VSUMSWS_DIRECT))
2259 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2262 [(set_attr "type" "veccomplex")])
2264 (define_expand "altivec_vspltb"
2265 [(use (match_operand:V16QI 0 "register_operand"))
2266 (use (match_operand:V16QI 1 "register_operand"))
2267 (use (match_operand:QI 2 "const_0_to_15_operand"))]
2270 rtvec v = gen_rtvec (1, operands[2]);
2272 x = gen_rtx_VEC_SELECT (QImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
2273 x = gen_rtx_VEC_DUPLICATE (V16QImode, x);
2274 emit_insn (gen_rtx_SET (operands[0], x));
2278 (define_insn "*altivec_vspltb_internal"
2279 [(set (match_operand:V16QI 0 "register_operand" "=v")
2280 (vec_duplicate:V16QI
2281 (vec_select:QI (match_operand:V16QI 1 "register_operand" "v")
2283 [(match_operand:QI 2 "const_0_to_15_operand" "")]))))]
2286 if (!BYTES_BIG_ENDIAN)
2287 operands[2] = GEN_INT (15 - INTVAL (operands[2]));
2289 return "vspltb %0,%1,%2";
2291 [(set_attr "type" "vecperm")])
2293 (define_insn "altivec_vspltb_direct"
2294 [(set (match_operand:V16QI 0 "register_operand" "=v")
2295 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
2296 (match_operand:QI 2 "const_0_to_15_operand" "i")]
2297 UNSPEC_VSPLT_DIRECT))]
2300 [(set_attr "type" "vecperm")])
2302 (define_expand "altivec_vsplth"
2303 [(use (match_operand:V8HI 0 "register_operand"))
2304 (use (match_operand:V8HI 1 "register_operand"))
2305 (use (match_operand:QI 2 "const_0_to_7_operand"))]
2308 rtvec v = gen_rtvec (1, operands[2]);
2310 x = gen_rtx_VEC_SELECT (HImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
2311 x = gen_rtx_VEC_DUPLICATE (V8HImode, x);
2312 emit_insn (gen_rtx_SET (operands[0], x));
2316 (define_insn "*altivec_vsplth_internal"
2317 [(set (match_operand:V8HI 0 "register_operand" "=v")
2319 (vec_select:HI (match_operand:V8HI 1 "register_operand" "v")
2321 [(match_operand:QI 2 "const_0_to_7_operand" "")]))))]
2324 if (!BYTES_BIG_ENDIAN)
2325 operands[2] = GEN_INT (7 - INTVAL (operands[2]));
2327 return "vsplth %0,%1,%2";
2329 [(set_attr "type" "vecperm")])
2331 (define_insn "altivec_vsplth_direct"
2332 [(set (match_operand:V8HI 0 "register_operand" "=v")
2333 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
2334 (match_operand:QI 2 "const_0_to_7_operand" "i")]
2335 UNSPEC_VSPLT_DIRECT))]
2338 [(set_attr "type" "vecperm")])
2340 (define_expand "altivec_vspltw"
2341 [(use (match_operand:V4SI 0 "register_operand"))
2342 (use (match_operand:V4SI 1 "register_operand"))
2343 (use (match_operand:QI 2 "const_0_to_3_operand"))]
2346 rtvec v = gen_rtvec (1, operands[2]);
2348 x = gen_rtx_VEC_SELECT (SImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
2349 x = gen_rtx_VEC_DUPLICATE (V4SImode, x);
2350 emit_insn (gen_rtx_SET (operands[0], x));
2354 (define_insn "*altivec_vspltw_internal"
2355 [(set (match_operand:V4SI 0 "register_operand" "=v")
2357 (vec_select:SI (match_operand:V4SI 1 "register_operand" "v")
2359 [(match_operand:QI 2 "const_0_to_3_operand" "i")]))))]
2362 if (!BYTES_BIG_ENDIAN)
2363 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
2365 return "vspltw %0,%1,%2";
2367 [(set_attr "type" "vecperm")])
2369 (define_insn "altivec_vspltw_direct"
2370 [(set (match_operand:V4SI 0 "register_operand" "=v")
2371 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2372 (match_operand:QI 2 "const_0_to_3_operand" "i")]
2373 UNSPEC_VSPLT_DIRECT))]
2376 [(set_attr "type" "vecperm")])
2378 (define_expand "altivec_vspltsf"
2379 [(use (match_operand:V4SF 0 "register_operand"))
2380 (use (match_operand:V4SF 1 "register_operand"))
2381 (use (match_operand:QI 2 "const_0_to_3_operand"))]
2384 rtvec v = gen_rtvec (1, operands[2]);
2386 x = gen_rtx_VEC_SELECT (SFmode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
2387 x = gen_rtx_VEC_DUPLICATE (V4SFmode, x);
2388 emit_insn (gen_rtx_SET (operands[0], x));
2392 (define_insn "*altivec_vspltsf_internal"
2393 [(set (match_operand:V4SF 0 "register_operand" "=v")
2395 (vec_select:SF (match_operand:V4SF 1 "register_operand" "v")
2397 [(match_operand:QI 2 "const_0_to_3_operand" "i")]))))]
2398 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2400 if (!BYTES_BIG_ENDIAN)
2401 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
2403 return "vspltw %0,%1,%2";
2405 [(set_attr "type" "vecperm")])
2407 (define_insn "altivec_vspltis<VI_char>"
2408 [(set (match_operand:VI 0 "register_operand" "=v")
2410 (match_operand:QI 1 "s5bit_cint_operand" "i")))]
2412 "vspltis<VI_char> %0,%1"
2413 [(set_attr "type" "vecperm")])
2415 (define_insn "*altivec_vrfiz"
2416 [(set (match_operand:V4SF 0 "register_operand" "=v")
2417 (fix:V4SF (match_operand:V4SF 1 "register_operand" "v")))]
2418 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2420 [(set_attr "type" "vecfloat")])
2422 (define_expand "altivec_vperm_<mode>"
2423 [(set (match_operand:VM 0 "register_operand")
2424 (unspec:VM [(match_operand:VM 1 "register_operand")
2425 (match_operand:VM 2 "register_operand")
2426 (match_operand:V16QI 3 "register_operand")]
2430 if (!BYTES_BIG_ENDIAN)
2432 altivec_expand_vec_perm_le (operands);
2437 ;; Slightly prefer vperm, since the target does not overlap the source
2438 (define_insn "altivec_vperm_<mode>_direct"
2439 [(set (match_operand:VM 0 "register_operand" "=?wa,v")
2440 (unspec:VM [(match_operand:VM 1 "register_operand" "wa,v")
2441 (match_operand:VM 2 "register_operand" "0,v")
2442 (match_operand:V16QI 3 "register_operand" "wa,v")]
2448 [(set_attr "type" "vecperm")
2449 (set_attr "isa" "p9v,*")])
2451 (define_insn "altivec_vperm_v8hiv16qi"
2452 [(set (match_operand:V16QI 0 "register_operand" "=?wa,v")
2453 (unspec:V16QI [(match_operand:V8HI 1 "register_operand" "wa,v")
2454 (match_operand:V8HI 2 "register_operand" "0,v")
2455 (match_operand:V16QI 3 "register_operand" "wa,v")]
2461 [(set_attr "type" "vecperm")
2462 (set_attr "isa" "p9v,*")])
2464 (define_expand "altivec_vperm_<mode>_uns"
2465 [(set (match_operand:VM 0 "register_operand")
2466 (unspec:VM [(match_operand:VM 1 "register_operand")
2467 (match_operand:VM 2 "register_operand")
2468 (match_operand:V16QI 3 "register_operand")]
2472 if (!BYTES_BIG_ENDIAN)
2474 altivec_expand_vec_perm_le (operands);
2479 (define_insn "*altivec_vperm_<mode>_uns_internal"
2480 [(set (match_operand:VM 0 "register_operand" "=?wa,v")
2481 (unspec:VM [(match_operand:VM 1 "register_operand" "wa,v")
2482 (match_operand:VM 2 "register_operand" "0,v")
2483 (match_operand:V16QI 3 "register_operand" "wa,v")]
2489 [(set_attr "type" "vecperm")
2490 (set_attr "isa" "p9v,*")])
2492 (define_expand "vec_permv16qi"
2493 [(set (match_operand:V16QI 0 "register_operand")
2494 (unspec:V16QI [(match_operand:V16QI 1 "register_operand")
2495 (match_operand:V16QI 2 "register_operand")
2496 (match_operand:V16QI 3 "register_operand")]
2500 if (!BYTES_BIG_ENDIAN) {
2501 altivec_expand_vec_perm_le (operands);
2506 (define_insn "*altivec_vpermr_<mode>_internal"
2507 [(set (match_operand:VM 0 "register_operand" "=?wa,v")
2508 (unspec:VM [(match_operand:VM 1 "register_operand" "wa,v")
2509 (match_operand:VM 2 "register_operand" "0,v")
2510 (match_operand:V16QI 3 "register_operand" "wa,v")]
2516 [(set_attr "type" "vecperm")
2517 (set_attr "isa" "p9v,*")])
2519 (define_insn "altivec_vrfip" ; ceil
2520 [(set (match_operand:V4SF 0 "register_operand" "=v")
2521 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2525 [(set_attr "type" "vecfloat")])
2527 (define_insn "altivec_vrfin"
2528 [(set (match_operand:V4SF 0 "register_operand" "=v")
2529 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2533 [(set_attr "type" "vecfloat")])
2535 (define_insn "*altivec_vrfim" ; floor
2536 [(set (match_operand:V4SF 0 "register_operand" "=v")
2537 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2541 [(set_attr "type" "vecfloat")])
2543 (define_insn "altivec_vcfux"
2544 [(set (match_operand:V4SF 0 "register_operand" "=v")
2545 (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
2546 (match_operand:QI 2 "immediate_operand" "i")]
2550 [(set_attr "type" "vecfloat")])
2552 (define_insn "altivec_vcfsx"
2553 [(set (match_operand:V4SF 0 "register_operand" "=v")
2554 (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
2555 (match_operand:QI 2 "immediate_operand" "i")]
2559 [(set_attr "type" "vecfloat")])
2561 (define_insn "altivec_vctuxs"
2562 [(set (match_operand:V4SI 0 "register_operand" "=v")
2563 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2564 (match_operand:QI 2 "immediate_operand" "i")]
2566 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2569 [(set_attr "type" "vecfloat")])
2571 (define_insn "altivec_vctsxs"
2572 [(set (match_operand:V4SI 0 "register_operand" "=v")
2573 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2574 (match_operand:QI 2 "immediate_operand" "i")]
2576 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2579 [(set_attr "type" "vecfloat")])
2581 (define_insn "altivec_vlogefp"
2582 [(set (match_operand:V4SF 0 "register_operand" "=v")
2583 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2587 [(set_attr "type" "vecfloat")])
2589 (define_insn "altivec_vexptefp"
2590 [(set (match_operand:V4SF 0 "register_operand" "=v")
2591 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2595 [(set_attr "type" "vecfloat")])
2597 (define_insn "*altivec_vrsqrtefp"
2598 [(set (match_operand:V4SF 0 "register_operand" "=v")
2599 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2601 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2603 [(set_attr "type" "vecfloat")])
2605 (define_insn "altivec_vrefp"
2606 [(set (match_operand:V4SF 0 "register_operand" "=v")
2607 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2609 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2611 [(set_attr "type" "vecfloat")])
2613 (define_expand "altivec_copysign_v4sf3"
2614 [(use (match_operand:V4SF 0 "register_operand"))
2615 (use (match_operand:V4SF 1 "register_operand"))
2616 (use (match_operand:V4SF 2 "register_operand"))]
2617 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2619 rtx mask = gen_reg_rtx (V4SImode);
2620 rtx mask_val = gen_int_mode (HOST_WIDE_INT_1U << 31, SImode);
2621 rtvec v = gen_rtvec (4, mask_val, mask_val, mask_val, mask_val);
2623 emit_insn (gen_vec_initv4sisi (mask, gen_rtx_PARALLEL (V4SImode, v)));
2624 emit_insn (gen_vector_select_v4sf (operands[0], operands[1], operands[2],
2625 gen_lowpart (V4SFmode, mask)));
2629 (define_insn "altivec_vsldoi_<mode>"
2630 [(set (match_operand:VM 0 "register_operand" "=v")
2631 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
2632 (match_operand:VM 2 "register_operand" "v")
2633 (match_operand:QI 3 "immediate_operand" "i")]
2636 "vsldoi %0,%1,%2,%3"
2637 [(set_attr "type" "vecperm")])
2639 (define_insn "altivec_vupkhs<VU_char>"
2640 [(set (match_operand:VP 0 "register_operand" "=v")
2641 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2642 UNSPEC_VUNPACK_HI_SIGN))]
2645 if (BYTES_BIG_ENDIAN)
2646 return "vupkhs<VU_char> %0,%1";
2648 return "vupkls<VU_char> %0,%1";
2650 [(set_attr "type" "vecperm")])
2652 (define_insn "altivec_vupkhs<VU_char>_direct"
2653 [(set (match_operand:VP 0 "register_operand" "=v")
2654 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2655 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2657 "vupkhs<VU_char> %0,%1"
2658 [(set_attr "type" "vecperm")])
2660 (define_insn "altivec_vupkls<VU_char>"
2661 [(set (match_operand:VP 0 "register_operand" "=v")
2662 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2663 UNSPEC_VUNPACK_LO_SIGN))]
2666 if (BYTES_BIG_ENDIAN)
2667 return "vupkls<VU_char> %0,%1";
2669 return "vupkhs<VU_char> %0,%1";
2671 [(set_attr "type" "vecperm")])
2673 (define_insn "*altivec_vupkls<VU_char>_direct"
2674 [(set (match_operand:VP 0 "register_operand" "=v")
2675 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2676 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2678 "vupkls<VU_char> %0,%1"
2679 [(set_attr "type" "vecperm")])
2681 (define_insn "altivec_vupkhpx"
2682 [(set (match_operand:V4SI 0 "register_operand" "=v")
2683 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2687 if (BYTES_BIG_ENDIAN)
2688 return "vupkhpx %0,%1";
2690 return "vupklpx %0,%1";
2692 [(set_attr "type" "vecperm")])
2694 (define_insn "altivec_vupklpx"
2695 [(set (match_operand:V4SI 0 "register_operand" "=v")
2696 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2700 if (BYTES_BIG_ENDIAN)
2701 return "vupklpx %0,%1";
2703 return "vupkhpx %0,%1";
2705 [(set_attr "type" "vecperm")])
2707 /* The cbranch_optab doesn't allow FAIL, so old cpus which are
2708 inefficient on unaligned vsx are disabled as the cost is high
2709 for unaligned load/store. */
2710 (define_expand "cbranchv16qi4"
2711 [(use (match_operator 0 "equality_operator"
2712 [(match_operand:V16QI 1 "reg_or_mem_operand")
2713 (match_operand:V16QI 2 "reg_or_mem_operand")]))
2714 (use (match_operand 3))]
2715 "VECTOR_MEM_VSX_P (V16QImode)
2716 && TARGET_EFFICIENT_UNALIGNED_VSX"
2718 /* Use direct move for P8 LE to skip doubleword swap, as the byte
2719 order doesn't matter for equality compare. If any operands are
2720 altivec indexed or indirect operands, the load can be implemented
2721 directly by altivec aligned load instruction and swap is no
2723 if (!TARGET_P9_VECTOR
2724 && !BYTES_BIG_ENDIAN
2725 && MEM_P (operands[1])
2726 && !altivec_indexed_or_indirect_operand (operands[1], V16QImode)
2727 && MEM_P (operands[2])
2728 && !altivec_indexed_or_indirect_operand (operands[2], V16QImode))
2730 rtx reg_op1 = gen_reg_rtx (V16QImode);
2731 rtx reg_op2 = gen_reg_rtx (V16QImode);
2732 rs6000_emit_le_vsx_permute (reg_op1, operands[1], V16QImode);
2733 rs6000_emit_le_vsx_permute (reg_op2, operands[2], V16QImode);
2734 operands[1] = reg_op1;
2735 operands[2] = reg_op2;
2739 operands[1] = force_reg (V16QImode, operands[1]);
2740 operands[2] = force_reg (V16QImode, operands[2]);
2743 rtx_code code = GET_CODE (operands[0]);
2744 operands[0] = gen_rtx_fmt_ee (code, V16QImode, operands[1], operands[2]);
2745 rs6000_emit_cbranch (V16QImode, operands);
2749 ;; Compare vectors producing a vector result and a predicate, setting CR6 to
2750 ;; indicate a combined status
2751 (define_insn "altivec_vcmpequ<VI_char>_p"
2752 [(set (reg:CC CR6_REGNO)
2753 (unspec:CC [(eq:CC (match_operand:VI2 1 "register_operand" "v")
2754 (match_operand:VI2 2 "register_operand" "v"))]
2756 (set (match_operand:VI2 0 "register_operand" "=v")
2757 (eq:VI2 (match_dup 1)
2760 "vcmpequ<VI_char>. %0,%1,%2"
2761 [(set_attr "type" "veccmpfx")])
2763 (define_insn "altivec_vcmpequt_p"
2764 [(set (reg:CC CR6_REGNO)
2765 (unspec:CC [(eq:CC (match_operand:V1TI 1 "altivec_register_operand" "v")
2766 (match_operand:V1TI 2 "altivec_register_operand" "v"))]
2768 (set (match_operand:V1TI 0 "altivec_register_operand" "=v")
2769 (eq:V1TI (match_dup 1)
2772 "vcmpequq. %0,%1,%2"
2773 [(set_attr "type" "veccmpfx")])
2775 ;; Expand for builtin vcmpne{b,h,w}
2776 (define_expand "altivec_vcmpne_<mode>"
2777 [(set (match_operand:VSX_EXTRACT_I 3 "altivec_register_operand" "=v")
2778 (eq:VSX_EXTRACT_I (match_operand:VSX_EXTRACT_I 1 "altivec_register_operand" "v")
2779 (match_operand:VSX_EXTRACT_I 2 "altivec_register_operand" "v")))
2780 (set (match_operand:VSX_EXTRACT_I 0 "altivec_register_operand" "=v")
2781 (not:VSX_EXTRACT_I (match_dup 3)))]
2784 operands[3] = gen_reg_rtx (GET_MODE (operands[0]));
2787 (define_insn "*altivec_vcmpgts<VI_char>_p"
2788 [(set (reg:CC CR6_REGNO)
2789 (unspec:CC [(gt:CC (match_operand:VI2 1 "register_operand" "v")
2790 (match_operand:VI2 2 "register_operand" "v"))]
2792 (set (match_operand:VI2 0 "register_operand" "=v")
2793 (gt:VI2 (match_dup 1)
2796 "vcmpgts<VI_char>. %0,%1,%2"
2797 [(set_attr "type" "veccmpfx")])
2799 (define_insn "*altivec_vcmpgtst_p"
2800 [(set (reg:CC CR6_REGNO)
2801 (unspec:CC [(gt:CC (match_operand:V1TI 1 "register_operand" "v")
2802 (match_operand:V1TI 2 "register_operand" "v"))]
2804 (set (match_operand:V1TI 0 "register_operand" "=v")
2805 (gt:V1TI (match_dup 1)
2808 "vcmpgtsq. %0,%1,%2"
2809 [(set_attr "type" "veccmpfx")])
2811 (define_insn "*altivec_vcmpgtu<VI_char>_p"
2812 [(set (reg:CC CR6_REGNO)
2813 (unspec:CC [(gtu:CC (match_operand:VI2 1 "register_operand" "v")
2814 (match_operand:VI2 2 "register_operand" "v"))]
2816 (set (match_operand:VI2 0 "register_operand" "=v")
2817 (gtu:VI2 (match_dup 1)
2820 "vcmpgtu<VI_char>. %0,%1,%2"
2821 [(set_attr "type" "veccmpfx")])
2823 (define_insn "*altivec_vcmpgtut_p"
2824 [(set (reg:CC CR6_REGNO)
2825 (unspec:CC [(gtu:CC (match_operand:V1TI 1 "register_operand" "v")
2826 (match_operand:V1TI 2 "register_operand" "v"))]
2828 (set (match_operand:V1TI 0 "register_operand" "=v")
2829 (gtu:V1TI (match_dup 1)
2832 "vcmpgtuq. %0,%1,%2"
2833 [(set_attr "type" "veccmpfx")])
2835 (define_insn "*altivec_vcmpeqfp_p"
2836 [(set (reg:CC CR6_REGNO)
2837 (unspec:CC [(eq:CC (match_operand:V4SF 1 "register_operand" "v")
2838 (match_operand:V4SF 2 "register_operand" "v"))]
2840 (set (match_operand:V4SF 0 "register_operand" "=v")
2841 (eq:V4SF (match_dup 1)
2843 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2844 "vcmpeqfp. %0,%1,%2"
2845 [(set_attr "type" "veccmp")])
2847 (define_insn "*altivec_vcmpgtfp_p"
2848 [(set (reg:CC CR6_REGNO)
2849 (unspec:CC [(gt:CC (match_operand:V4SF 1 "register_operand" "v")
2850 (match_operand:V4SF 2 "register_operand" "v"))]
2852 (set (match_operand:V4SF 0 "register_operand" "=v")
2853 (gt:V4SF (match_dup 1)
2855 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2856 "vcmpgtfp. %0,%1,%2"
2857 [(set_attr "type" "veccmp")])
2859 (define_insn "*altivec_vcmpgefp_p"
2860 [(set (reg:CC CR6_REGNO)
2861 (unspec:CC [(ge:CC (match_operand:V4SF 1 "register_operand" "v")
2862 (match_operand:V4SF 2 "register_operand" "v"))]
2864 (set (match_operand:V4SF 0 "register_operand" "=v")
2865 (ge:V4SF (match_dup 1)
2867 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2868 "vcmpgefp. %0,%1,%2"
2869 [(set_attr "type" "veccmp")])
2871 (define_insn "altivec_vcmpbfp_p"
2872 [(set (reg:CC CR6_REGNO)
2873 (unspec:CC [(match_operand:V4SF 1 "register_operand" "v")
2874 (match_operand:V4SF 2 "register_operand" "v")]
2876 (set (match_operand:V4SF 0 "register_operand" "=v")
2877 (unspec:V4SF [(match_dup 1)
2880 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode)"
2882 [(set_attr "type" "veccmp")])
2884 (define_insn "altivec_mtvscr"
2885 [(set (reg:SI VSCR_REGNO)
2887 [(match_operand:V4SI 0 "register_operand" "v")] UNSPECV_MTVSCR))]
2890 [(set_attr "type" "vecsimple")])
2892 (define_insn "altivec_mfvscr"
2893 [(set (match_operand:V8HI 0 "register_operand" "=v")
2894 (unspec_volatile:V8HI [(reg:SI VSCR_REGNO)] UNSPECV_MFVSCR))]
2897 [(set_attr "type" "vecsimple")])
2899 (define_insn "altivec_dssall"
2900 [(unspec_volatile [(const_int 0)] UNSPECV_DSSALL)]
2903 [(set_attr "type" "vecsimple")])
2905 (define_insn "altivec_dss"
2906 [(unspec_volatile [(match_operand:QI 0 "immediate_operand" "i")]
2910 [(set_attr "type" "vecsimple")])
2912 (define_insn "altivec_dst"
2913 [(unspec [(match_operand 0 "register_operand" "b")
2914 (match_operand:SI 1 "register_operand" "r")
2915 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DST)]
2916 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2918 [(set_attr "type" "vecsimple")])
2920 (define_insn "altivec_dstt"
2921 [(unspec [(match_operand 0 "register_operand" "b")
2922 (match_operand:SI 1 "register_operand" "r")
2923 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTT)]
2924 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2926 [(set_attr "type" "vecsimple")])
2928 (define_insn "altivec_dstst"
2929 [(unspec [(match_operand 0 "register_operand" "b")
2930 (match_operand:SI 1 "register_operand" "r")
2931 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTST)]
2932 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2934 [(set_attr "type" "vecsimple")])
2936 (define_insn "altivec_dststt"
2937 [(unspec [(match_operand 0 "register_operand" "b")
2938 (match_operand:SI 1 "register_operand" "r")
2939 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTSTT)]
2940 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2942 [(set_attr "type" "vecsimple")])
2944 (define_expand "altivec_lvsl"
2945 [(use (match_operand:V16QI 0 "register_operand"))
2946 (use (match_operand:V16QI 1 "memory_operand"))]
2949 if (BYTES_BIG_ENDIAN)
2950 emit_insn (gen_altivec_lvsl_direct (operands[0], operands[1]));
2953 rtx mask, constv, vperm;
2954 mask = gen_reg_rtx (V16QImode);
2955 emit_insn (gen_altivec_lvsl_direct (mask, operands[1]));
2956 constv = gen_const_vec_series (V16QImode, const0_rtx, const1_rtx);
2957 constv = force_reg (V16QImode, constv);
2958 vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2960 emit_insn (gen_rtx_SET (operands[0], vperm));
2965 (define_insn "altivec_lvsl_reg_<mode>"
2966 [(set (match_operand:V16QI 0 "altivec_register_operand" "=v")
2968 [(match_operand:GPR 1 "gpc_reg_operand" "b")]
2972 [(set_attr "type" "vecload")])
2974 (define_insn "altivec_lvsl_direct"
2975 [(set (match_operand:V16QI 0 "register_operand" "=v")
2976 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2980 [(set_attr "type" "vecload")])
2982 (define_expand "altivec_lvsr"
2983 [(use (match_operand:V16QI 0 "altivec_register_operand"))
2984 (use (match_operand:V16QI 1 "memory_operand"))]
2987 if (BYTES_BIG_ENDIAN)
2988 emit_insn (gen_altivec_lvsr_direct (operands[0], operands[1]));
2991 rtx mask, constv, vperm;
2992 mask = gen_reg_rtx (V16QImode);
2993 emit_insn (gen_altivec_lvsr_direct (mask, operands[1]));
2994 constv = gen_const_vec_series (V16QImode, const0_rtx, const1_rtx);
2995 constv = force_reg (V16QImode, constv);
2996 vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2998 emit_insn (gen_rtx_SET (operands[0], vperm));
3003 (define_insn "altivec_lvsr_reg_<mode>"
3004 [(set (match_operand:V16QI 0 "altivec_register_operand" "=v")
3006 [(match_operand:GPR 1 "gpc_reg_operand" "b")]
3010 [(set_attr "type" "vecload")])
3012 (define_insn "altivec_lvsr_direct"
3013 [(set (match_operand:V16QI 0 "register_operand" "=v")
3014 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
3018 [(set_attr "type" "vecload")])
3020 (define_expand "build_vector_mask_for_load"
3021 [(set (match_operand:V16QI 0 "register_operand")
3022 (unspec:V16QI [(match_operand 1 "memory_operand")] UNSPEC_LVSR))]
3028 gcc_assert (MEM_P (operands[1]));
3030 addr = XEXP (operands[1], 0);
3031 temp = gen_reg_rtx (GET_MODE (addr));
3032 emit_insn (gen_rtx_SET (temp, gen_rtx_NEG (GET_MODE (addr), addr)));
3033 emit_insn (gen_altivec_lvsr (operands[0],
3034 replace_equiv_address (operands[1], temp)));
3038 ;; Parallel some of the LVE* and STV*'s with unspecs because some have
3039 ;; identical rtl but different instructions-- and gcc gets confused.
3041 (define_insn "altivec_lve<VI_char>x"
3043 [(set (match_operand:VI 0 "register_operand" "=v")
3044 (match_operand:VI 1 "memory_operand" "Z"))
3045 (unspec [(const_int 0)] UNSPEC_LVE)])]
3047 "lve<VI_char>x %0,%y1"
3048 [(set_attr "type" "vecload")])
3050 (define_insn "*altivec_lvesfx"
3052 [(set (match_operand:V4SF 0 "register_operand" "=v")
3053 (match_operand:V4SF 1 "memory_operand" "Z"))
3054 (unspec [(const_int 0)] UNSPEC_LVE)])]
3057 [(set_attr "type" "vecload")])
3059 (define_insn "altivec_lvxl_<mode>"
3061 [(set (match_operand:VM2 0 "register_operand" "=v")
3062 (match_operand:VM2 1 "memory_operand" "Z"))
3063 (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
3066 [(set_attr "type" "vecload")])
3068 ; This version of lvx is used only in cases where we need to force an lvx
3069 ; over any other load, and we don't care about losing CSE opportunities.
3070 ; Its primary use is for prologue register saves.
3071 (define_insn "altivec_lvx_<mode>_internal"
3073 [(set (match_operand:VM2 0 "register_operand" "=v")
3074 (match_operand:VM2 1 "memory_operand" "Z"))
3075 (unspec [(const_int 0)] UNSPEC_LVX)])]
3078 [(set_attr "type" "vecload")])
3080 ; The following patterns embody what lvx should usually look like.
3081 (define_expand "altivec_lvx_<VM2:mode>"
3082 [(set (match_operand:VM2 0 "register_operand")
3083 (match_operand:VM2 1 "altivec_indexed_or_indirect_operand"))]
3086 rtx addr = XEXP (operand1, 0);
3087 if (rs6000_sum_of_two_registers_p (addr))
3089 rtx op1 = XEXP (addr, 0);
3090 rtx op2 = XEXP (addr, 1);
3092 emit_insn (gen_altivec_lvx_<VM2:mode>_2op_di (operand0, op1, op2));
3094 emit_insn (gen_altivec_lvx_<VM2:mode>_2op_si (operand0, op1, op2));
3099 emit_insn (gen_altivec_lvx_<VM2:mode>_1op_di (operand0, addr));
3101 emit_insn (gen_altivec_lvx_<VM2:mode>_1op_si (operand0, addr));
3106 ; The next two patterns embody what lvx should usually look like.
3107 (define_insn "altivec_lvx_<VM2:mode>_2op_<P:mptrsize>"
3108 [(set (match_operand:VM2 0 "register_operand" "=v")
3109 (mem:VM2 (and:P (plus:P (match_operand:P 1 "register_operand" "b")
3110 (match_operand:P 2 "register_operand" "r"))
3114 [(set_attr "type" "vecload")])
3116 (define_insn "altivec_lvx_<VM2:mode>_1op_<P:mptrsize>"
3117 [(set (match_operand:VM2 0 "register_operand" "=v")
3118 (mem:VM2 (and:P (match_operand:P 1 "register_operand" "r")
3122 [(set_attr "type" "vecload")])
3124 ; This version of stvx is used only in cases where we need to force an stvx
3125 ; over any other store, and we don't care about losing CSE opportunities.
3126 ; Its primary use is for epilogue register restores.
3127 (define_insn "altivec_stvx_<mode>_internal"
3129 [(set (match_operand:VM2 0 "memory_operand" "=Z")
3130 (match_operand:VM2 1 "register_operand" "v"))
3131 (unspec [(const_int 0)] UNSPEC_STVX)])]
3134 [(set_attr "type" "vecstore")])
3136 ; The following patterns embody what stvx should usually look like.
3137 (define_expand "altivec_stvx_<VM2:mode>"
3138 [(set (match_operand:VM2 1 "altivec_indexed_or_indirect_operand")
3139 (match_operand:VM2 0 "register_operand"))]
3142 rtx addr = XEXP (operand1, 0);
3143 if (rs6000_sum_of_two_registers_p (addr))
3145 rtx op1 = XEXP (addr, 0);
3146 rtx op2 = XEXP (addr, 1);
3148 emit_insn (gen_altivec_stvx_<VM2:mode>_2op_di (operand0, op1, op2));
3150 emit_insn (gen_altivec_stvx_<VM2:mode>_2op_si (operand0, op1, op2));
3155 emit_insn (gen_altivec_stvx_<VM2:mode>_1op_di (operand0, addr));
3157 emit_insn (gen_altivec_stvx_<VM2:mode>_1op_si (operand0, addr));
3162 ; The next two patterns embody what stvx should usually look like.
3163 (define_insn "altivec_stvx_<VM2:mode>_2op_<P:mptrsize>"
3164 [(set (mem:VM2 (and:P (plus:P (match_operand:P 1 "register_operand" "b")
3165 (match_operand:P 2 "register_operand" "r"))
3167 (match_operand:VM2 0 "register_operand" "v"))]
3170 [(set_attr "type" "vecstore")])
3172 (define_insn "altivec_stvx_<VM2:mode>_1op_<P:mptrsize>"
3173 [(set (mem:VM2 (and:P (match_operand:P 1 "register_operand" "r")
3175 (match_operand:VM2 0 "register_operand" "v"))]
3178 [(set_attr "type" "vecstore")])
3180 (define_insn "altivec_stvxl_<mode>"
3182 [(set (match_operand:VM2 0 "memory_operand" "=Z")
3183 (match_operand:VM2 1 "register_operand" "v"))
3184 (unspec [(const_int 0)] UNSPEC_STVXL)])]
3187 [(set_attr "type" "vecstore")])
3189 (define_insn "altivec_stve<VI_char>x"
3190 [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
3191 (unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
3193 "stve<VI_char>x %1,%y0"
3194 [(set_attr "type" "vecstore")])
3196 (define_insn "*altivec_stvesfx"
3197 [(set (match_operand:SF 0 "memory_operand" "=Z")
3198 (unspec:SF [(match_operand:V4SF 1 "register_operand" "v")] UNSPEC_STVE))]
3201 [(set_attr "type" "vecstore")])
3204 ;; signed int/float to double convert words 0 and 2
3205 (define_expand "doublee<mode>2"
3206 [(set (match_operand:V2DF 0 "register_operand" "=v")
3207 (match_operand:VSX_W 1 "register_operand" "v"))]
3210 machine_mode op_mode = GET_MODE (operands[1]);
3212 if (BYTES_BIG_ENDIAN)
3214 /* Big endian word numbering for words in operand is 0 1 2 3.
3215 Input words 0 and 2 are where they need to be. */
3216 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], operands[1]));
3220 /* Little endian word numbering for operand is 3 2 1 0.
3221 take (operand[1] operand[1]) and shift left one word
3222 3 2 1 0 3 2 1 0 => 2 1 0 3
3223 Input words 2 and 0 are now where they need to be for the
3226 rtx rtx_val = GEN_INT (1);
3228 rtx_tmp = gen_reg_rtx (op_mode);
3229 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3230 operands[1], rtx_val));
3231 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3235 [(set_attr "type" "veccomplex")])
3237 ;; Generate unsdoublee
3238 ;; unsigned int to double convert words 0 and 2
3239 (define_expand "unsdoubleev4si2"
3240 [(set (match_operand:V2DF 0 "register_operand" "=v")
3241 (match_operand:V4SI 1 "register_operand" "v"))]
3244 if (BYTES_BIG_ENDIAN)
3246 /* Big endian word numbering for words in operand is 0 1 2 3.
3247 Input words 0 and 2 are where they need to be. */
3248 emit_insn (gen_vsx_xvcvuxwdp (operands[0], operands[1]));
3252 /* Little endian word numbering for operand is 3 2 1 0.
3253 take (operand[1] operand[1]) and shift left one word
3254 3 2 1 0 3 2 1 0 => 2 1 0 3
3255 Input words 2 and 0 are now where they need to be for the
3258 rtx rtx_val = GEN_INT (1);
3260 rtx_tmp = gen_reg_rtx (V4SImode);
3261 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3262 operands[1], rtx_val));
3263 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3267 [(set_attr "type" "veccomplex")])
3269 ;; Generate doubleov
3270 ;; signed int/float to double convert words 1 and 3
3271 (define_expand "doubleo<mode>2"
3272 [(set (match_operand:V2DF 0 "register_operand" "=v")
3273 (match_operand:VSX_W 1 "register_operand" "v"))]
3276 machine_mode op_mode = GET_MODE (operands[1]);
3278 if (BYTES_BIG_ENDIAN)
3280 /* Big endian word numbering for words in operand is 0 1 2 3.
3281 take (operand[1] operand[1]) and shift left one word
3282 0 1 2 3 0 1 2 3 => 1 2 3 0
3283 Input words 1 and 3 are now where they need to be for the
3286 rtx rtx_val = GEN_INT (1);
3288 rtx_tmp = gen_reg_rtx (op_mode);
3289 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3290 operands[1], rtx_val));
3291 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3295 /* Little endian word numbering for operand is 3 2 1 0.
3296 Input words 3 and 1 are where they need to be. */
3297 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], operands[1]));
3301 [(set_attr "type" "veccomplex")])
3303 ;; Generate unsdoubleov
3304 ;; unsigned int to double convert words 1 and 3
3305 (define_expand "unsdoubleov4si2"
3306 [(set (match_operand:V2DF 0 "register_operand" "=v")
3307 (match_operand:V4SI 1 "register_operand" "v"))]
3310 if (BYTES_BIG_ENDIAN)
3312 /* Big endian word numbering for words in operand is 0 1 2 3.
3313 take (operand[1] operand[1]) and shift left one word
3314 0 1 2 3 0 1 2 3 => 1 2 3 0
3315 Input words 1 and 3 are now where they need to be for the
3318 rtx rtx_val = GEN_INT (1);
3320 rtx_tmp = gen_reg_rtx (V4SImode);
3321 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3322 operands[1], rtx_val));
3323 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3327 /* Want to convert the words 1 and 3.
3328 Little endian word numbering for operand is 3 2 1 0.
3329 Input words 3 and 1 are where they need to be. */
3330 emit_insn (gen_vsx_xvcvuxwdp (operands[0], operands[1]));
3334 [(set_attr "type" "veccomplex")])
3336 ;; Generate doublehv
3337 ;; signed int/float to double convert words 0 and 1
3338 (define_expand "doubleh<mode>2"
3339 [(set (match_operand:V2DF 0 "register_operand" "=v")
3340 (match_operand:VSX_W 1 "register_operand" "v"))]
3346 machine_mode op_mode = GET_MODE (operands[1]);
3347 rtx_tmp = gen_reg_rtx (op_mode);
3349 if (BYTES_BIG_ENDIAN)
3351 /* Big endian word numbering for words in operand is 0 1 2 3.
3352 Shift operand left one word, rtx_tmp word order is now 1 2 3 0.
3353 take (rts_tmp operand[1]) and shift left three words
3354 1 2 3 0 0 1 2 3 => 0 0 1 2
3355 Input words 0 and 1 are now where they need to be for the
3357 rtx_val = GEN_INT (1);
3358 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3359 operands[1], rtx_val));
3361 rtx_val = GEN_INT (3);
3362 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, rtx_tmp,
3363 operands[1], rtx_val));
3364 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3368 /* Little endian word numbering for operand is 3 2 1 0.
3369 Shift operand left three words, rtx_tmp word order is now 0 3 2 1.
3370 take (operand[1] rts_tmp) and shift left two words
3371 3 2 1 0 0 3 2 1 => 1 0 0 3
3372 Input words 0 and 1 are now where they need to be for the
3374 rtx_val = GEN_INT (3);
3375 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3376 operands[1], rtx_val));
3378 rtx_val = GEN_INT (2);
3379 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3381 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3385 [(set_attr "type" "veccomplex")])
3387 ;; Generate unsdoublehv
3388 ;; unsigned int to double convert words 0 and 1
3389 (define_expand "unsdoublehv4si2"
3390 [(set (match_operand:V2DF 0 "register_operand" "=v")
3391 (match_operand:V4SI 1 "register_operand" "v"))]
3394 rtx rtx_tmp = gen_reg_rtx (V4SImode);
3395 rtx rtx_val = GEN_INT (12);
3397 if (BYTES_BIG_ENDIAN)
3399 /* Big endian word numbering for words in operand is 0 1 2 3.
3400 Shift operand left one word, rtx_tmp word order is now 1 2 3 0.
3401 take (rts_tmp operand[1]) and shift left three words
3402 1 2 3 0 0 1 2 3 => 0 0 1 2
3403 Input words 0 and 1 are now where they need to be for the
3405 rtx_val = GEN_INT (1);
3406 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3407 operands[1], rtx_val));
3409 rtx_val = GEN_INT (3);
3410 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, rtx_tmp,
3411 operands[1], rtx_val));
3412 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3416 /* Little endian word numbering for operand is 3 2 1 0.
3417 Shift operand left three words, rtx_tmp word order is now 0 3 2 1.
3418 take (operand[1] rts_tmp) and shift left two words
3419 3 2 1 0 0 3 2 1 => 1 0 0 3
3420 Input words 1 and 0 are now where they need to be for the
3422 rtx_val = GEN_INT (3);
3424 rtx_tmp = gen_reg_rtx (V4SImode);
3425 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3426 operands[1], rtx_val));
3428 rtx_val = GEN_INT (2);
3429 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3431 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3435 [(set_attr "type" "veccomplex")])
3437 ;; Generate doublelv
3438 ;; signed int/float to double convert words 2 and 3
3439 (define_expand "doublel<mode>2"
3440 [(set (match_operand:V2DF 0 "register_operand" "=v")
3441 (match_operand:VSX_W 1 "register_operand" "v"))]
3445 rtx rtx_val = GEN_INT (3);
3447 machine_mode op_mode = GET_MODE (operands[1]);
3448 rtx_tmp = gen_reg_rtx (op_mode);
3450 if (BYTES_BIG_ENDIAN)
3452 /* Big endian word numbering for operand is 0 1 2 3.
3453 Shift operand left three words, rtx_tmp word order is now 3 0 1 2.
3454 take (operand[1] rtx_tmp) and shift left two words
3455 0 1 2 3 3 0 1 2 => 2 3 3 0
3456 now use convert instruction to convert word 2 and 3 in the
3458 rtx_val = GEN_INT (3);
3459 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3460 operands[1], rtx_val));
3462 rtx_val = GEN_INT (2);
3463 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3465 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3469 /* Little endian word numbering for operand is 3 2 1 0.
3470 Shift operand left one word, rtx_tmp word order is now 2 1 0 3.
3471 take (rtx_tmp operand[1]) and shift left three words
3472 2 1 0 3 3 2 1 0 => 3 3 2 1
3473 now use convert instruction to convert word 3 and 2 in the
3475 rtx_val = GEN_INT (1);
3476 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3477 operands[1], rtx_val));
3479 rtx_val = GEN_INT (3);
3480 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, rtx_tmp,
3481 operands[1], rtx_val));
3482 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3486 [(set_attr "type" "veccomplex")])
3488 ;; Generate unsdoublelv
3489 ;; unsigned int to double convert convert 2 and 3
3490 (define_expand "unsdoublelv4si2"
3491 [(set (match_operand:V2DF 0 "register_operand" "=v")
3492 (match_operand:V4SI 1 "register_operand" "v"))]
3495 rtx rtx_tmp = gen_reg_rtx (V4SImode);
3496 rtx rtx_val = GEN_INT (12);
3498 if (BYTES_BIG_ENDIAN)
3500 /* Big endian word numbering for operand is 0 1 2 3.
3501 Shift operand left three words, rtx_tmp word order is now 3 0 1 2.
3502 take (operand[1] rtx_tmp) and shift left two words
3503 0 1 2 3 3 0 1 2 => 2 3 3 0
3504 now use convert instruction to convert word 2 and 3 in the
3506 rtx_val = GEN_INT (3);
3507 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3508 operands[1], rtx_val));
3510 rtx_val = GEN_INT (2);
3511 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3513 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3517 /* Little endian word numbering for operand is 3 2 1 0.
3518 Shift operand left one word, rtx_tmp word order is now 2 1 0 3.
3519 take (rtx_tmp operand[1]) and shift left three words
3520 2 1 0 3 3 2 1 0 => 3 3 2 1
3521 now use convert instruction to convert word 3 and 2 in the
3523 rtx_val = GEN_INT (1);
3524 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp,
3525 operands[1], operands[1], rtx_val));
3527 rtx_val = GEN_INT (3);
3528 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, rtx_tmp,
3529 operands[1], rtx_val));
3530 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3534 [(set_attr "type" "veccomplex")])
3536 ;; Generate two vector F32 converted to packed vector I16 vector
3537 (define_expand "convert_4f32_8i16"
3538 [(set (match_operand:V8HI 0 "register_operand" "=v")
3539 (unspec:V8HI [(match_operand:V4SF 1 "register_operand" "v")
3540 (match_operand:V4SF 2 "register_operand" "v")]
3541 UNSPEC_CONVERT_4F32_8I16))]
3544 rtx rtx_tmp_hi = gen_reg_rtx (V4SImode);
3545 rtx rtx_tmp_lo = gen_reg_rtx (V4SImode);
3547 emit_insn (gen_altivec_vctuxs (rtx_tmp_hi, operands[1], const0_rtx));
3548 emit_insn (gen_altivec_vctuxs (rtx_tmp_lo, operands[2], const0_rtx));
3549 emit_insn (gen_altivec_vpkswss (operands[0], rtx_tmp_hi, rtx_tmp_lo));
3554 ;; Convert two vector F32 to packed vector F16.
3555 ;; This builtin packs 32-bit floating-point values into a packed
3556 ;; 16-bit floating point values (stored in 16bit integer type).
3557 ;; (vector unsigned short r = vec_pack_to_short_fp32 (a, b);
3558 ;; The expected codegen for this builtin is
3561 ;; if (little endian)
3566 (define_expand "convert_4f32_8f16"
3567 [(set (match_operand:V8HI 0 "register_operand" "=v")
3568 (unspec:V8HI [(match_operand:V4SF 1 "register_operand" "v")
3569 (match_operand:V4SF 2 "register_operand" "v")]
3570 UNSPEC_CONVERT_4F32_8F16))]
3573 rtx rtx_tmp_hi = gen_reg_rtx (V4SImode);
3574 rtx rtx_tmp_lo = gen_reg_rtx (V4SImode);
3576 emit_insn (gen_vsx_xvcvsphp (rtx_tmp_hi, operands[1]));
3577 emit_insn (gen_vsx_xvcvsphp (rtx_tmp_lo, operands[2]));
3578 if (!BYTES_BIG_ENDIAN)
3579 emit_insn (gen_altivec_vpkuwum (operands[0], rtx_tmp_hi, rtx_tmp_lo));
3581 emit_insn (gen_altivec_vpkuwum (operands[0], rtx_tmp_lo, rtx_tmp_hi));
3587 ;; xxlxor/vxor SCRATCH0,SCRATCH0,SCRATCH0
3588 ;; vsubu?m SCRATCH2,SCRATCH1,%1
3589 ;; vmaxs? %0,%1,SCRATCH2"
3590 (define_expand "abs<mode>2"
3591 [(set (match_dup 2) (match_dup 3))
3593 (minus:VI2 (match_dup 2)
3594 (match_operand:VI2 1 "register_operand" "v")))
3595 (set (match_operand:VI2 0 "register_operand" "=v")
3596 (smax:VI2 (match_dup 1) (match_dup 4)))]
3599 operands[2] = gen_reg_rtx (<MODE>mode);
3600 operands[3] = CONST0_RTX (<MODE>mode);
3601 operands[4] = gen_reg_rtx (<MODE>mode);
3605 ;; vspltisw SCRATCH1,0
3606 ;; vsubu?m SCRATCH2,SCRATCH1,%1
3607 ;; vmins? %0,%1,SCRATCH2"
3608 (define_expand "nabs<mode>2"
3609 [(set (match_dup 2) (match_dup 3))
3611 (minus:VI2 (match_dup 2)
3612 (match_operand:VI2 1 "register_operand" "v")))
3613 (set (match_operand:VI2 0 "register_operand" "=v")
3614 (smin:VI2 (match_dup 1) (match_dup 4)))]
3617 operands[2] = gen_reg_rtx (<MODE>mode);
3618 operands[3] = CONST0_RTX (<MODE>mode);
3619 operands[4] = gen_reg_rtx (<MODE>mode);
3623 ;; vspltisw SCRATCH1,-1
3624 ;; vslw SCRATCH2,SCRATCH1,SCRATCH1
3625 ;; vandc %0,%1,SCRATCH2
3626 (define_expand "altivec_absv4sf2"
3628 (vec_duplicate:V4SI (const_int -1)))
3630 (ashift:V4SI (match_dup 2) (match_dup 2)))
3631 (set (match_operand:V4SF 0 "register_operand" "=v")
3632 (and:V4SF (not:V4SF (subreg:V4SF (match_dup 3) 0))
3633 (match_operand:V4SF 1 "register_operand" "v")))]
3636 operands[2] = gen_reg_rtx (V4SImode);
3637 operands[3] = gen_reg_rtx (V4SImode);
3641 ;; vspltis? SCRATCH0,0
3642 ;; vsubs?s SCRATCH2,SCRATCH1,%1
3643 ;; vmaxs? %0,%1,SCRATCH2"
3644 (define_expand "altivec_abss_<mode>"
3645 [(set (match_dup 2) (vec_duplicate:VI (const_int 0)))
3646 (parallel [(set (match_dup 3)
3647 (ss_minus:VI (match_dup 2)
3648 (match_operand:VI 1 "register_operand" "v")))
3649 (set (reg:SI VSCR_REGNO)
3650 (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))])
3651 (set (match_operand:VI 0 "register_operand" "=v")
3652 (smax:VI (match_dup 1) (match_dup 3)))]
3655 operands[2] = gen_reg_rtx (GET_MODE (operands[0]));
3656 operands[3] = gen_reg_rtx (GET_MODE (operands[0]));
3659 (define_expand "reduc_plus_scal_<mode>"
3660 [(set (match_operand:<VI_scalar> 0 "register_operand" "=v")
3661 (unspec:VIshort [(match_operand:VIshort 1 "register_operand" "v")]
3662 UNSPEC_REDUC_PLUS))]
3665 rtx vzero = gen_reg_rtx (V4SImode);
3666 rtx vtmp1 = gen_reg_rtx (V4SImode);
3667 rtx vtmp2 = gen_reg_rtx (<MODE>mode);
3668 rtx dest = gen_lowpart (V4SImode, vtmp2);
3669 int elt = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (<MODE>mode) - 1 : 0;
3671 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
3672 emit_insn (gen_altivec_vsum4s<VI_char>s (vtmp1, operands[1], vzero));
3673 emit_insn (gen_altivec_vsumsws_direct (dest, vtmp1, vzero));
3674 rs6000_expand_vector_extract (operands[0], vtmp2, GEN_INT (elt));
3678 (define_insn "*p9_neg<mode>2"
3679 [(set (match_operand:VNEG 0 "altivec_register_operand" "=v")
3680 (neg:VNEG (match_operand:VNEG 1 "altivec_register_operand" "v")))]
3682 "vneg<VI_char> %0,%1"
3683 [(set_attr "type" "vecsimple")])
3685 (define_expand "neg<mode>2"
3686 [(set (match_operand:VI2 0 "register_operand")
3687 (neg:VI2 (match_operand:VI2 1 "register_operand")))]
3690 if (!TARGET_P9_VECTOR || (<MODE>mode != V4SImode && <MODE>mode != V2DImode))
3694 vzero = gen_reg_rtx (GET_MODE (operands[0]));
3695 emit_move_insn (vzero, CONST0_RTX (<MODE>mode));
3696 emit_insn (gen_sub<mode>3 (operands[0], vzero, operands[1]));
3701 (define_expand "udot_prod<mode>"
3702 [(set (match_operand:V4SI 0 "register_operand" "=v")
3703 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
3704 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
3705 (match_operand:VIshort 2 "register_operand" "v")]
3709 emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], operands[2], operands[3]));
3713 (define_expand "sdot_prodv8hi"
3714 [(set (match_operand:V4SI 0 "register_operand" "=v")
3715 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
3716 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3717 (match_operand:V8HI 2 "register_operand" "v")]
3721 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], operands[2], operands[3]));
3725 (define_expand "widen_usum<mode>3"
3726 [(set (match_operand:V4SI 0 "register_operand" "=v")
3727 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
3728 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")]
3732 rtx vones = gen_reg_rtx (GET_MODE (operands[1]));
3734 emit_insn (gen_altivec_vspltis<VI_char> (vones, const1_rtx));
3735 emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], vones, operands[2]));
3739 (define_expand "widen_ssumv16qi3"
3740 [(set (match_operand:V4SI 0 "register_operand" "=v")
3741 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
3742 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")]
3746 rtx vones = gen_reg_rtx (V16QImode);
3748 emit_insn (gen_altivec_vspltisb (vones, const1_rtx));
3749 emit_insn (gen_altivec_vmsummbm (operands[0], operands[1], vones, operands[2]));
3753 (define_expand "widen_ssumv8hi3"
3754 [(set (match_operand:V4SI 0 "register_operand" "=v")
3755 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
3756 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
3760 rtx vones = gen_reg_rtx (V8HImode);
3762 emit_insn (gen_altivec_vspltish (vones, const1_rtx));
3763 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], vones, operands[2]));
3767 (define_expand "vec_unpacks_hi_<VP_small_lc>"
3768 [(set (match_operand:VP 0 "register_operand" "=v")
3769 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
3770 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
3774 (define_expand "vec_unpacks_lo_<VP_small_lc>"
3775 [(set (match_operand:VP 0 "register_operand" "=v")
3776 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
3777 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
3781 (define_insn "vperm_v8hiv4si"
3782 [(set (match_operand:V4SI 0 "register_operand" "=?wa,v")
3783 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "wa,v")
3784 (match_operand:V4SI 2 "register_operand" "0,v")
3785 (match_operand:V16QI 3 "register_operand" "wa,v")]
3791 [(set_attr "type" "vecperm")
3792 (set_attr "isa" "p9v,*")])
3794 (define_insn "vperm_v16qiv8hi"
3795 [(set (match_operand:V8HI 0 "register_operand" "=?wa,v")
3796 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "wa,v")
3797 (match_operand:V8HI 2 "register_operand" "0,v")
3798 (match_operand:V16QI 3 "register_operand" "wa,v")]
3804 [(set_attr "type" "vecperm")
3805 (set_attr "isa" "p9v,*")])
3807 (define_expand "vec_unpacku_hi_<VP_small_lc>"
3808 [(set (match_operand:VP 0 "register_operand" "=v")
3809 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
3813 rtx vzero = gen_reg_rtx (<VP_small>mode);
3814 emit_insn (gen_altivec_vspltis<VU_char> (vzero, const0_rtx));
3816 rtx res = gen_reg_rtx (<VP_small>mode);
3817 rtx op1 = operands[1];
3819 if (BYTES_BIG_ENDIAN)
3820 emit_insn (gen_altivec_vmrgh<VU_char> (res, vzero, op1));
3822 emit_insn (gen_altivec_vmrgl<VU_char> (res, op1, vzero));
3824 emit_insn (gen_move_insn (operands[0], gen_lowpart (<MODE>mode, res)));
3828 (define_expand "vec_unpacku_lo_<VP_small_lc>"
3829 [(set (match_operand:VP 0 "register_operand" "=v")
3830 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
3834 rtx vzero = gen_reg_rtx (<VP_small>mode);
3835 emit_insn (gen_altivec_vspltis<VU_char> (vzero, const0_rtx));
3837 rtx res = gen_reg_rtx (<VP_small>mode);
3838 rtx op1 = operands[1];
3840 if (BYTES_BIG_ENDIAN)
3841 emit_insn (gen_altivec_vmrgl<VU_char> (res, vzero, op1));
3843 emit_insn (gen_altivec_vmrgh<VU_char> (res, op1, vzero));
3845 emit_insn (gen_move_insn (operands[0], gen_lowpart (<MODE>mode, res)));
3849 (define_expand "vec_widen_umult_hi_v16qi"
3850 [(set (match_operand:V8HI 0 "register_operand" "=v")
3851 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3852 (match_operand:V16QI 2 "register_operand" "v")]
3856 rtx ve = gen_reg_rtx (V8HImode);
3857 rtx vo = gen_reg_rtx (V8HImode);
3859 if (BYTES_BIG_ENDIAN)
3861 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
3862 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
3863 emit_insn (gen_altivec_vmrghh (operands[0], ve, vo));
3867 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3868 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3869 emit_insn (gen_altivec_vmrglh (operands[0], ve, vo));
3874 (define_expand "vec_widen_umult_lo_v16qi"
3875 [(set (match_operand:V8HI 0 "register_operand" "=v")
3876 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3877 (match_operand:V16QI 2 "register_operand" "v")]
3881 rtx ve = gen_reg_rtx (V8HImode);
3882 rtx vo = gen_reg_rtx (V8HImode);
3884 if (BYTES_BIG_ENDIAN)
3886 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
3887 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
3888 emit_insn (gen_altivec_vmrglh (operands[0], ve, vo));
3892 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3893 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3894 emit_insn (gen_altivec_vmrghh (operands[0], ve, vo));
3899 (define_expand "vec_widen_smult_hi_v16qi"
3900 [(set (match_operand:V8HI 0 "register_operand" "=v")
3901 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3902 (match_operand:V16QI 2 "register_operand" "v")]
3906 rtx ve = gen_reg_rtx (V8HImode);
3907 rtx vo = gen_reg_rtx (V8HImode);
3909 if (BYTES_BIG_ENDIAN)
3911 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3912 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3913 emit_insn (gen_altivec_vmrghh (operands[0], ve, vo));
3917 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3918 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3919 emit_insn (gen_altivec_vmrglh (operands[0], ve, vo));
3924 (define_expand "vec_widen_smult_lo_v16qi"
3925 [(set (match_operand:V8HI 0 "register_operand" "=v")
3926 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3927 (match_operand:V16QI 2 "register_operand" "v")]
3931 rtx ve = gen_reg_rtx (V8HImode);
3932 rtx vo = gen_reg_rtx (V8HImode);
3934 if (BYTES_BIG_ENDIAN)
3936 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3937 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3938 emit_insn (gen_altivec_vmrglh (operands[0], ve, vo));
3942 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3943 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3944 emit_insn (gen_altivec_vmrghh (operands[0], ve, vo));
3949 (define_expand "vec_widen_umult_hi_v8hi"
3950 [(set (match_operand:V4SI 0 "register_operand" "=v")
3951 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3952 (match_operand:V8HI 2 "register_operand" "v")]
3956 rtx ve = gen_reg_rtx (V4SImode);
3957 rtx vo = gen_reg_rtx (V4SImode);
3959 if (BYTES_BIG_ENDIAN)
3961 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3962 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3963 emit_insn (gen_altivec_vmrghw (operands[0], ve, vo));
3967 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3968 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3969 emit_insn (gen_altivec_vmrglw (operands[0], ve, vo));
3974 (define_expand "vec_widen_umult_lo_v8hi"
3975 [(set (match_operand:V4SI 0 "register_operand" "=v")
3976 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3977 (match_operand:V8HI 2 "register_operand" "v")]
3981 rtx ve = gen_reg_rtx (V4SImode);
3982 rtx vo = gen_reg_rtx (V4SImode);
3984 if (BYTES_BIG_ENDIAN)
3986 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3987 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3988 emit_insn (gen_altivec_vmrglw (operands[0], ve, vo));
3992 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3993 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3994 emit_insn (gen_altivec_vmrghw (operands[0], ve, vo));
3999 (define_expand "vec_widen_smult_hi_v8hi"
4000 [(set (match_operand:V4SI 0 "register_operand" "=v")
4001 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
4002 (match_operand:V8HI 2 "register_operand" "v")]
4006 rtx ve = gen_reg_rtx (V4SImode);
4007 rtx vo = gen_reg_rtx (V4SImode);
4009 if (BYTES_BIG_ENDIAN)
4011 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
4012 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
4013 emit_insn (gen_altivec_vmrghw (operands[0], ve, vo));
4017 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
4018 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
4019 emit_insn (gen_altivec_vmrglw (operands[0], ve, vo));
4024 (define_expand "vec_widen_smult_lo_v8hi"
4025 [(set (match_operand:V4SI 0 "register_operand" "=v")
4026 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
4027 (match_operand:V8HI 2 "register_operand" "v")]
4031 rtx ve = gen_reg_rtx (V4SImode);
4032 rtx vo = gen_reg_rtx (V4SImode);
4034 if (BYTES_BIG_ENDIAN)
4036 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
4037 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
4038 emit_insn (gen_altivec_vmrglw (operands[0], ve, vo));
4042 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
4043 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
4044 emit_insn (gen_altivec_vmrghw (operands[0], ve, vo));
4049 (define_expand "vec_pack_trunc_<mode>"
4050 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
4051 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
4052 (match_operand:VP 2 "register_operand" "v")]
4053 UNSPEC_VPACK_UNS_UNS_MOD))]
4057 (define_expand "mulv16qi3"
4058 [(set (match_operand:V16QI 0 "register_operand" "=v")
4059 (mult:V16QI (match_operand:V16QI 1 "register_operand" "v")
4060 (match_operand:V16QI 2 "register_operand" "v")))]
4063 rtx even = gen_reg_rtx (V8HImode);
4064 rtx odd = gen_reg_rtx (V8HImode);
4065 rtx mask = gen_reg_rtx (V16QImode);
4066 rtvec v = rtvec_alloc (16);
4069 for (i = 0; i < 8; ++i) {
4070 RTVEC_ELT (v, 2 * i)
4071 = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 1 : 31 - 2 * i);
4072 RTVEC_ELT (v, 2 * i + 1)
4073 = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 17 : 15 - 2 * i);
4076 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
4077 emit_insn (gen_altivec_vmulesb (even, operands[1], operands[2]));
4078 emit_insn (gen_altivec_vmulosb (odd, operands[1], operands[2]));
4079 emit_insn (gen_altivec_vperm_v8hiv16qi (operands[0], even, odd, mask));
4083 (define_expand "altivec_vpermxor"
4084 [(use (match_operand:V16QI 0 "register_operand"))
4085 (use (match_operand:V16QI 1 "register_operand"))
4086 (use (match_operand:V16QI 2 "register_operand"))
4087 (use (match_operand:V16QI 3 "register_operand"))]
4090 if (!BYTES_BIG_ENDIAN)
4092 /* vpermxor indexes the bytes using Big Endian numbering. If LE,
4093 change indexing in operand[3] to BE index. */
4094 rtx be_index = gen_reg_rtx (V16QImode);
4096 emit_insn (gen_one_cmplv16qi2 (be_index, operands[3]));
4097 emit_insn (gen_crypto_vpermxor_v16qi (operands[0], operands[1],
4098 operands[2], be_index));
4101 emit_insn (gen_crypto_vpermxor_v16qi (operands[0], operands[1],
4102 operands[2], operands[3]));
4106 (define_expand "altivec_negv4sf2"
4107 [(use (match_operand:V4SF 0 "register_operand"))
4108 (use (match_operand:V4SF 1 "register_operand"))]
4113 /* Generate [-0.0, -0.0, -0.0, -0.0]. */
4114 neg0 = gen_reg_rtx (V4SImode);
4115 emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
4116 emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
4119 emit_insn (gen_xorv4sf3 (operands[0],
4120 gen_lowpart (V4SFmode, neg0), operands[1]));
4125 ;; Vector reverse elements for V16QI V8HI V4SI V4SF
4126 (define_expand "altivec_vreve<mode>2"
4127 [(set (match_operand:VEC_K 0 "register_operand" "=v")
4128 (unspec:VEC_K [(match_operand:VEC_K 1 "register_operand" "v")]
4132 if (TARGET_P9_VECTOR)
4134 if (<MODE>mode == V16QImode)
4135 emit_insn (gen_p9_xxbrq_v16qi (operands[0], operands[1]));
4136 else if (<MODE>mode == V8HImode)
4138 rtx subreg1 = simplify_gen_subreg (V1TImode, operands[1],
4140 rtx temp = gen_reg_rtx (V1TImode);
4141 emit_insn (gen_p9_xxbrq_v1ti (temp, subreg1));
4142 rtx subreg2 = simplify_gen_subreg (<MODE>mode, temp,
4144 emit_insn (gen_p9_xxbrh_v8hi (operands[0], subreg2));
4146 else /* V4SI and V4SF. */
4148 rtx subreg1 = simplify_gen_subreg (V1TImode, operands[1],
4150 rtx temp = gen_reg_rtx (V1TImode);
4151 emit_insn (gen_p9_xxbrq_v1ti (temp, subreg1));
4152 rtx subreg2 = simplify_gen_subreg (<MODE>mode, temp,
4154 if (<MODE>mode == V4SImode)
4155 emit_insn (gen_p9_xxbrw_v4si (operands[0], subreg2));
4157 emit_insn (gen_p9_xxbrw_v4sf (operands[0], subreg2));
4162 int i, j, size, num_elements;
4163 rtvec v = rtvec_alloc (16);
4164 rtx mask = gen_reg_rtx (V16QImode);
4166 size = GET_MODE_UNIT_SIZE (<MODE>mode);
4167 num_elements = GET_MODE_NUNITS (<MODE>mode);
4169 for (j = 0; j < num_elements; j++)
4170 for (i = 0; i < size; i++)
4171 RTVEC_ELT (v, i + j * size)
4172 = GEN_INT (i + (num_elements - 1 - j) * size);
4174 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
4175 emit_insn (gen_altivec_vperm_<mode> (operands[0], operands[1],
4176 operands[1], mask));
4180 ;; Vector reverse elements for V2DI V2DF
4181 (define_expand "altivec_vreve<mode>2"
4182 [(set (match_operand:VEC_64 0 "register_operand" "=v")
4183 (unspec:VEC_64 [(match_operand:VEC_64 1 "register_operand" "v")]
4187 emit_insn (gen_xxswapd_<mode> (operands[0], operands[1]));
4191 ;; Vector SIMD PEM v2.06c defines LVLX, LVLXL, LVRX, LVRXL,
4192 ;; STVLX, STVLXL, STVVRX, STVRXL are available only on Cell.
4193 (define_insn "altivec_lvlx"
4194 [(set (match_operand:V16QI 0 "register_operand" "=v")
4195 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
4197 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4199 [(set_attr "type" "vecload")])
4201 (define_insn "altivec_lvlxl"
4202 [(set (match_operand:V16QI 0 "register_operand" "=v")
4203 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
4205 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4207 [(set_attr "type" "vecload")])
4209 (define_insn "altivec_lvrx"
4210 [(set (match_operand:V16QI 0 "register_operand" "=v")
4211 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
4213 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4215 [(set_attr "type" "vecload")])
4217 (define_insn "altivec_lvrxl"
4218 [(set (match_operand:V16QI 0 "register_operand" "=v")
4219 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
4221 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4223 [(set_attr "type" "vecload")])
4225 (define_insn "altivec_stvlx"
4227 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
4228 (match_operand:V16QI 1 "register_operand" "v"))
4229 (unspec [(const_int 0)] UNSPEC_STVLX)])]
4230 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4232 [(set_attr "type" "vecstore")])
4234 (define_insn "altivec_stvlxl"
4236 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
4237 (match_operand:V16QI 1 "register_operand" "v"))
4238 (unspec [(const_int 0)] UNSPEC_STVLXL)])]
4239 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4241 [(set_attr "type" "vecstore")])
4243 (define_insn "altivec_stvrx"
4245 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
4246 (match_operand:V16QI 1 "register_operand" "v"))
4247 (unspec [(const_int 0)] UNSPEC_STVRX)])]
4248 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4250 [(set_attr "type" "vecstore")])
4252 (define_insn "altivec_stvrxl"
4254 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
4255 (match_operand:V16QI 1 "register_operand" "v"))
4256 (unspec [(const_int 0)] UNSPEC_STVRXL)])]
4257 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4259 [(set_attr "type" "vecstore")])
4261 (define_expand "vec_unpacks_float_hi_v8hi"
4262 [(set (match_operand:V4SF 0 "register_operand")
4263 (unspec:V4SF [(match_operand:V8HI 1 "register_operand")]
4264 UNSPEC_VUPKHS_V4SF))]
4267 rtx tmp = gen_reg_rtx (V4SImode);
4269 emit_insn (gen_vec_unpacks_hi_v8hi (tmp, operands[1]));
4270 emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
4274 (define_expand "vec_unpacks_float_lo_v8hi"
4275 [(set (match_operand:V4SF 0 "register_operand")
4276 (unspec:V4SF [(match_operand:V8HI 1 "register_operand")]
4277 UNSPEC_VUPKLS_V4SF))]
4280 rtx tmp = gen_reg_rtx (V4SImode);
4282 emit_insn (gen_vec_unpacks_lo_v8hi (tmp, operands[1]));
4283 emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
4287 (define_expand "vec_unpacku_float_hi_v8hi"
4288 [(set (match_operand:V4SF 0 "register_operand")
4289 (unspec:V4SF [(match_operand:V8HI 1 "register_operand")]
4290 UNSPEC_VUPKHU_V4SF))]
4293 rtx tmp = gen_reg_rtx (V4SImode);
4295 emit_insn (gen_vec_unpacku_hi_v8hi (tmp, operands[1]));
4296 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
4300 (define_expand "vec_unpacku_float_lo_v8hi"
4301 [(set (match_operand:V4SF 0 "register_operand")
4302 (unspec:V4SF [(match_operand:V8HI 1 "register_operand")]
4303 UNSPEC_VUPKLU_V4SF))]
4306 rtx tmp = gen_reg_rtx (V4SImode);
4308 emit_insn (gen_vec_unpacku_lo_v8hi (tmp, operands[1]));
4309 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
4314 ;; Power8/power9 vector instructions encoded as Altivec instructions
4316 ;; Vector count leading zeros
4317 (define_insn "*p8v_clz<mode>2"
4318 [(set (match_operand:VI2 0 "register_operand" "=v")
4319 (clz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
4322 [(set_attr "type" "vecsimple")])
4324 ;; Vector absolute difference unsigned
4325 (define_insn "uabd<mode>3"
4326 [(set (match_operand:VI 0 "register_operand" "=v")
4329 (match_operand:VI 1 "register_operand" "v")
4330 (match_operand:VI 2 "register_operand" "v"))
4335 "vabsdu<wd> %0,%1,%2"
4336 [(set_attr "type" "vecsimple")])
4338 ;; Vector count trailing zeros
4339 (define_insn "*p9v_ctz<mode>2"
4340 [(set (match_operand:VI2 0 "register_operand" "=v")
4341 (ctz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
4344 [(set_attr "type" "vecsimple")])
4346 ;; Vector population count
4347 (define_insn "*p8v_popcount<mode>2"
4348 [(set (match_operand:VI2 0 "register_operand" "=v")
4349 (popcount:VI2 (match_operand:VI2 1 "register_operand" "v")))]
4352 [(set_attr "type" "vecsimple")])
4355 (define_insn "rs6000_vprtyb<mode>2"
4356 [(set (match_operand:VEC_IP 0 "register_operand" "=v")
4358 [(match_operand:VEC_IP 1 "register_operand" "v")]
4362 [(set_attr "type" "vecsimple")])
4364 ;; Vector Gather Bits by Bytes by Doubleword
4365 (define_insn "p8v_vgbbd"
4366 [(set (match_operand:V16QI 0 "register_operand" "=v")
4367 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")]
4371 [(set_attr "type" "vecsimple")])
4374 ;; 128-bit binary integer arithmetic
4375 ;; We have a special container type (V1TImode) to allow operations using the
4376 ;; ISA 2.07 128-bit binary support to target the VMX/altivec registers without
4377 ;; having to worry about the register allocator deciding GPRs are better.
4379 (define_insn "altivec_vadduqm"
4380 [(set (match_operand:V1TI 0 "register_operand" "=v")
4381 (plus:V1TI (match_operand:V1TI 1 "register_operand" "v")
4382 (match_operand:V1TI 2 "register_operand" "v")))]
4385 [(set_attr "type" "vecsimple")])
4387 (define_insn "altivec_vaddcuq"
4388 [(set (match_operand:V1TI 0 "register_operand" "=v")
4389 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4390 (match_operand:V1TI 2 "register_operand" "v")]
4394 [(set_attr "type" "vecsimple")])
4396 (define_insn "altivec_vsubuqm"
4397 [(set (match_operand:V1TI 0 "register_operand" "=v")
4398 (minus:V1TI (match_operand:V1TI 1 "register_operand" "v")
4399 (match_operand:V1TI 2 "register_operand" "v")))]
4402 [(set_attr "type" "vecsimple")])
4404 (define_insn "altivec_vsubcuq"
4405 [(set (match_operand:V1TI 0 "register_operand" "=v")
4406 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4407 (match_operand:V1TI 2 "register_operand" "v")]
4411 [(set_attr "type" "vecsimple")])
4413 (define_insn "altivec_vaddeuqm"
4414 [(set (match_operand:V1TI 0 "register_operand" "=v")
4415 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4416 (match_operand:V1TI 2 "register_operand" "v")
4417 (match_operand:V1TI 3 "register_operand" "v")]
4420 "vaddeuqm %0,%1,%2,%3"
4421 [(set_attr "type" "vecsimple")])
4423 (define_insn "altivec_vaddecuq"
4424 [(set (match_operand:V1TI 0 "register_operand" "=v")
4425 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4426 (match_operand:V1TI 2 "register_operand" "v")
4427 (match_operand:V1TI 3 "register_operand" "v")]
4430 "vaddecuq %0,%1,%2,%3"
4431 [(set_attr "type" "vecsimple")])
4433 (define_insn "altivec_vsubeuqm"
4434 [(set (match_operand:V1TI 0 "register_operand" "=v")
4435 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4436 (match_operand:V1TI 2 "register_operand" "v")
4437 (match_operand:V1TI 3 "register_operand" "v")]
4440 "vsubeuqm %0,%1,%2,%3"
4441 [(set_attr "type" "vecsimple")])
4443 (define_insn "altivec_vsubecuq"
4444 [(set (match_operand:V1TI 0 "register_operand" "=v")
4445 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4446 (match_operand:V1TI 2 "register_operand" "v")
4447 (match_operand:V1TI 3 "register_operand" "v")]
4450 "vsubecuq %0,%1,%2,%3"
4451 [(set_attr "type" "vecsimple")])
4453 ;; We use V2DI as the output type to simplify converting the permute
4454 ;; bits into an integer
4455 (define_insn "altivec_vbpermq"
4456 [(set (match_operand:V2DI 0 "register_operand" "=v")
4457 (unspec:V2DI [(match_operand:V16QI 1 "register_operand" "v")
4458 (match_operand:V16QI 2 "register_operand" "v")]
4462 [(set_attr "type" "vecperm")])
4464 ; One of the vector API interfaces requires returning vector unsigned char.
4465 (define_insn "altivec_vbpermq2"
4466 [(set (match_operand:V16QI 0 "register_operand" "=v")
4467 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
4468 (match_operand:V16QI 2 "register_operand" "v")]
4472 [(set_attr "type" "vecperm")])
4474 (define_insn "altivec_vbpermd"
4475 [(set (match_operand:V2DI 0 "register_operand" "=v")
4476 (unspec:V2DI [(match_operand:V2DI 1 "register_operand" "v")
4477 (match_operand:V16QI 2 "register_operand" "v")]
4481 [(set_attr "type" "vecsimple")])
4483 ;; Support for SAD (sum of absolute differences).
4485 ;; Due to saturating semantics, we can't combine the sum-across
4486 ;; with the vector accumulate in vsum4ubs. A vadduwm is needed.
4487 (define_expand "usadv16qi"
4488 [(use (match_operand:V4SI 0 "register_operand"))
4489 (use (match_operand:V16QI 1 "register_operand"))
4490 (use (match_operand:V16QI 2 "register_operand"))
4491 (use (match_operand:V4SI 3 "register_operand"))]
4494 rtx absd = gen_reg_rtx (V16QImode);
4495 rtx zero = gen_reg_rtx (V4SImode);
4496 rtx psum = gen_reg_rtx (V4SImode);
4498 emit_insn (gen_uabdv16qi3 (absd, operands[1], operands[2]));
4499 emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
4500 emit_insn (gen_altivec_vsum4ubs (psum, absd, zero));
4501 emit_insn (gen_addv4si3 (operands[0], psum, operands[3]));
4505 ;; Since vsum4shs is saturating and further performs signed
4506 ;; arithmetic, we can't combine the sum-across with the vector
4507 ;; accumulate in vsum4shs. A vadduwm is needed.
4508 (define_expand "usadv8hi"
4509 [(use (match_operand:V4SI 0 "register_operand"))
4510 (use (match_operand:V8HI 1 "register_operand"))
4511 (use (match_operand:V8HI 2 "register_operand"))
4512 (use (match_operand:V4SI 3 "register_operand"))]
4515 rtx absd = gen_reg_rtx (V8HImode);
4516 rtx zero = gen_reg_rtx (V4SImode);
4517 rtx psum = gen_reg_rtx (V4SImode);
4519 emit_insn (gen_uabdv8hi3 (absd, operands[1], operands[2]));
4520 emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
4521 emit_insn (gen_altivec_vsum4shs (psum, absd, zero));
4522 emit_insn (gen_addv4si3 (operands[0], psum, operands[3]));
4526 ;; Decimal Integer operations
4527 (define_int_iterator UNSPEC_BCD_ADD_SUB [UNSPEC_BCDADD UNSPEC_BCDSUB])
4529 (define_int_attr bcd_add_sub [(UNSPEC_BCDADD "add")
4530 (UNSPEC_BCDSUB "sub")])
4532 (define_code_iterator BCD_TEST [eq lt le gt ge unordered])
4533 (define_mode_iterator VBCD [V1TI V16QI])
4535 (define_insn "bcd<bcd_add_sub>_<mode>"
4536 [(set (match_operand:VBCD 0 "register_operand" "=v")
4537 (unspec:VBCD [(match_operand:VBCD 1 "register_operand" "v")
4538 (match_operand:VBCD 2 "register_operand" "v")
4539 (match_operand:QI 3 "const_0_to_1_operand" "n")]
4540 UNSPEC_BCD_ADD_SUB))
4541 (clobber (reg:CCFP CR6_REGNO))]
4543 "bcd<bcd_add_sub>. %0,%1,%2,%3"
4544 [(set_attr "type" "vecsimple")])
4546 ;; Use a floating point type (V2DFmode) for the compare to set CR6 so that we
4547 ;; can use the unordered test for BCD nans and add/subtracts that overflow. An
4548 ;; UNORDERED test on an integer type (like V1TImode) is not defined. The type
4549 ;; probably should be one that can go in the VMX (Altivec) registers, so we
4550 ;; can't use DDmode or DFmode.
4551 (define_insn "*bcd<bcd_add_sub>_test_<mode>"
4552 [(set (reg:CCFP CR6_REGNO)
4554 (unspec:V2DF [(match_operand:VBCD 1 "register_operand" "v")
4555 (match_operand:VBCD 2 "register_operand" "v")
4556 (match_operand:QI 3 "const_0_to_1_operand" "i")]
4558 (match_operand:V2DF 4 "zero_constant" "j")))
4559 (clobber (match_scratch:VBCD 0 "=v"))]
4561 "bcd<bcd_add_sub>. %0,%1,%2,%3"
4562 [(set_attr "type" "vecsimple")])
4564 (define_insn "*bcd<bcd_add_sub>_test2_<mode>"
4565 [(set (match_operand:VBCD 0 "register_operand" "=v")
4566 (unspec:VBCD [(match_operand:VBCD 1 "register_operand" "v")
4567 (match_operand:VBCD 2 "register_operand" "v")
4568 (match_operand:QI 3 "const_0_to_1_operand" "i")]
4569 UNSPEC_BCD_ADD_SUB))
4570 (set (reg:CCFP CR6_REGNO)
4572 (unspec:V2DF [(match_dup 1)
4576 (match_operand:V2DF 4 "zero_constant" "j")))]
4578 "bcd<bcd_add_sub>. %0,%1,%2,%3"
4579 [(set_attr "type" "vecsimple")])
4581 (define_insn "vcfuged"
4582 [(set (match_operand:V2DI 0 "altivec_register_operand" "=v")
4583 (unspec:V2DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4584 (match_operand:V2DI 2 "altivec_register_operand" "v")]
4588 [(set_attr "type" "vecsimple")])
4590 (define_insn "vclzdm"
4591 [(set (match_operand:V2DI 0 "altivec_register_operand" "=v")
4592 (unspec:V2DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4593 (match_operand:V2DI 2 "altivec_register_operand" "v")]
4597 [(set_attr "type" "vecsimple")])
4599 (define_insn "vctzdm"
4600 [(set (match_operand:V2DI 0 "altivec_register_operand" "=v")
4601 (unspec:V2DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4602 (match_operand:V2DI 2 "altivec_register_operand" "v")]
4606 [(set_attr "type" "vecsimple")])
4608 (define_insn "vpdepd"
4609 [(set (match_operand:V2DI 0 "altivec_register_operand" "=v")
4610 (unspec:V2DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4611 (match_operand:V2DI 2 "altivec_register_operand" "v")]
4615 [(set_attr "type" "vecsimple")])
4617 (define_insn "vpextd"
4618 [(set (match_operand:V2DI 0 "altivec_register_operand" "=v")
4619 (unspec:V2DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4620 (match_operand:V2DI 2 "altivec_register_operand" "v")]
4624 [(set_attr "type" "vecsimple")])
4627 [(set (match_operand:DI 0 "register_operand" "=r")
4628 (unspec:DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4629 (match_operand:QI 2 "u3bit_cint_operand" "n")]
4633 [(set_attr "type" "vecsimple")])
4635 (define_insn "vclrlb"
4636 [(set (match_operand:V16QI 0 "altivec_register_operand" "=v")
4637 (unspec:V16QI [(match_operand:V16QI 1 "altivec_register_operand" "v")
4638 (match_operand:SI 2 "gpc_reg_operand" "r")]
4642 if (BYTES_BIG_ENDIAN)
4643 return "vclrlb %0,%1,%2";
4645 return "vclrrb %0,%1,%2";
4647 [(set_attr "type" "vecsimple")])
4649 (define_insn "vclrrb"
4650 [(set (match_operand:V16QI 0 "altivec_register_operand" "=v")
4651 (unspec:V16QI [(match_operand:V16QI 1 "altivec_register_operand" "v")
4652 (match_operand:SI 2 "gpc_reg_operand" "r")]
4656 if (BYTES_BIG_ENDIAN)
4657 return "vclrrb %0,%1,%2";
4659 return "vclrlb %0,%1,%2";
4661 [(set_attr "type" "vecsimple")])
4663 (define_expand "bcd<bcd_add_sub>_<code>_<mode>"
4664 [(parallel [(set (reg:CCFP CR6_REGNO)
4666 (unspec:V2DF [(match_operand:VBCD 1 "register_operand")
4667 (match_operand:VBCD 2 "register_operand")
4668 (match_operand:QI 3 "const_0_to_1_operand")]
4671 (clobber (match_scratch:VBCD 5))])
4672 (set (match_operand:SI 0 "register_operand")
4673 (BCD_TEST:SI (reg:CCFP CR6_REGNO)
4677 operands[4] = CONST0_RTX (V2DFmode);
4680 (define_insn "*bcdinvalid_<mode>"
4681 [(set (reg:CCFP CR6_REGNO)
4683 (unspec:V2DF [(match_operand:VBCD 1 "register_operand" "v")]
4685 (match_operand:V2DF 2 "zero_constant" "j")))
4686 (clobber (match_scratch:VBCD 0 "=v"))]
4688 "bcdsub. %0,%1,%1,0"
4689 [(set_attr "type" "vecsimple")])
4691 (define_expand "bcdinvalid_<mode>"
4692 [(parallel [(set (reg:CCFP CR6_REGNO)
4694 (unspec:V2DF [(match_operand:VBCD 1 "register_operand")]
4697 (clobber (match_scratch:VBCD 3))])
4698 (set (match_operand:SI 0 "register_operand")
4699 (unordered:SI (reg:CCFP CR6_REGNO)
4703 operands[2] = CONST0_RTX (V2DFmode);
4706 (define_insn "bcdshift_v16qi"
4707 [(set (match_operand:V16QI 0 "register_operand" "=v")
4708 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
4709 (match_operand:V16QI 2 "register_operand" "v")
4710 (match_operand:QI 3 "const_0_to_1_operand" "n")]
4712 (clobber (reg:CCFP CR6_REGNO))]
4715 [(set_attr "type" "vecsimple")])
4717 (define_expand "bcdmul10_v16qi"
4718 [(set (match_operand:V16QI 0 "register_operand")
4719 (unspec:V16QI [(match_operand:V16QI 1 "register_operand")]
4721 (clobber (reg:CCFP CR6_REGNO))]
4724 rtx one = gen_reg_rtx (V16QImode);
4726 emit_insn (gen_altivec_vspltisb (one, const1_rtx));
4727 emit_insn (gen_bcdshift_v16qi (operands[0], one, operands[1], const0_rtx));
4732 (define_expand "bcddiv10_v16qi"
4733 [(set (match_operand:V16QI 0 "register_operand")
4734 (unspec:V16QI [(match_operand:V16QI 1 "register_operand")]
4736 (clobber (reg:CCFP CR6_REGNO))]
4739 rtx one = gen_reg_rtx (V16QImode);
4741 emit_insn (gen_altivec_vspltisb (one, constm1_rtx));
4742 emit_insn (gen_bcdshift_v16qi (operands[0], one, operands[1], const0_rtx));
4748 ;; Peephole2 pattern to combine a bcdadd/bcdsub that calculates the value and
4749 ;; the bcdadd/bcdsub that tests the value. The combiner won't work since
4750 ;; CR6 is a hard coded register. Unfortunately, all of the Altivec predicate
4751 ;; support is hard coded to use the fixed register CR6 instead of creating
4752 ;; a register class for CR6.
4755 [(parallel [(set (match_operand:V1TI 0 "register_operand")
4756 (unspec:V1TI [(match_operand:V1TI 1 "register_operand")
4757 (match_operand:V1TI 2 "register_operand")
4758 (match_operand:QI 3 "const_0_to_1_operand")]
4759 UNSPEC_BCD_ADD_SUB))
4760 (clobber (reg:CCFP CR6_REGNO))])
4761 (parallel [(set (reg:CCFP CR6_REGNO)
4763 (unspec:V2DF [(match_dup 1)
4767 (match_operand:V2DF 4 "zero_constant")))
4768 (clobber (match_operand:V1TI 5 "register_operand"))])]
4770 [(parallel [(set (match_dup 0)
4771 (unspec:V1TI [(match_dup 1)
4774 UNSPEC_BCD_ADD_SUB))
4775 (set (reg:CCFP CR6_REGNO)
4777 (unspec:V2DF [(match_dup 1)