2 ;; Copyright (C) 2002-2017 Free Software Foundation, Inc.
3 ;; Contributed by Aldy Hernandez (aldy@quesejoda.com)
5 ;; This file is part of GCC.
7 ;; GCC is free software; you can redistribute it and/or modify it
8 ;; under the terms of the GNU General Public License as published
9 ;; by the Free Software Foundation; either version 3, or (at your
10 ;; option) any later version.
12 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
13 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 ;; License for more details.
17 ;; You should have received a copy of the GNU General Public License
18 ;; along with GCC; see the file COPYING3. If not see
19 ;; <http://www.gnu.org/licenses/>.
21 (define_c_enum "unspec"
44 UNSPEC_VPACK_SIGN_SIGN_SAT
45 UNSPEC_VPACK_SIGN_UNS_SAT
46 UNSPEC_VPACK_UNS_UNS_SAT
47 UNSPEC_VPACK_UNS_UNS_MOD
48 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT
71 UNSPEC_VUNPACK_HI_SIGN
72 UNSPEC_VUNPACK_LO_SIGN
73 UNSPEC_VUNPACK_HI_SIGN_DIRECT
74 UNSPEC_VUNPACK_LO_SIGN_DIRECT
145 UNSPEC_VSUMSWS_DIRECT
164 (define_c_enum "unspecv"
172 ;; Like VI, defined in vector.md, but add ISA 2.07 integer vector ops
173 (define_mode_iterator VI2 [V4SI V8HI V16QI V2DI])
174 ;; Short vec int modes
175 (define_mode_iterator VIshort [V8HI V16QI])
176 ;; Longer vec int modes for rotate/mask ops
177 (define_mode_iterator VIlong [V2DI V4SI])
179 (define_mode_iterator VF [V4SF])
180 ;; Vec modes, pity mode iterators are not composable
181 (define_mode_iterator V [V4SI V8HI V16QI V4SF])
182 ;; Vec modes for move/logical/permute ops, include vector types for move not
183 ;; otherwise handled by altivec (v2df, v2di, ti)
184 (define_mode_iterator VM [V4SI
192 (KF "FLOAT128_VECTOR_P (KFmode)")
193 (TF "FLOAT128_VECTOR_P (TFmode)")])
195 ;; Like VM, except don't do TImode
196 (define_mode_iterator VM2 [V4SI
203 (KF "FLOAT128_VECTOR_P (KFmode)")
204 (TF "FLOAT128_VECTOR_P (TFmode)")])
206 ;; Specific iterator for parity which does not have a byte/half-word form, but
207 ;; does have a quad word form
208 (define_mode_iterator VParity [V4SI
211 (TI "TARGET_VSX_TIMODE")])
213 (define_mode_attr VI_char [(V2DI "d") (V4SI "w") (V8HI "h") (V16QI "b")])
214 (define_mode_attr VI_scalar [(V2DI "DI") (V4SI "SI") (V8HI "HI") (V16QI "QI")])
215 (define_mode_attr VI_unit [(V16QI "VECTOR_UNIT_ALTIVEC_P (V16QImode)")
216 (V8HI "VECTOR_UNIT_ALTIVEC_P (V8HImode)")
217 (V4SI "VECTOR_UNIT_ALTIVEC_P (V4SImode)")
218 (V2DI "VECTOR_UNIT_P8_VECTOR_P (V2DImode)")
219 (V1TI "VECTOR_UNIT_ALTIVEC_P (V1TImode)")])
221 ;; Vector pack/unpack
222 (define_mode_iterator VP [V2DI V4SI V8HI])
223 (define_mode_attr VP_small [(V2DI "V4SI") (V4SI "V8HI") (V8HI "V16QI")])
224 (define_mode_attr VP_small_lc [(V2DI "v4si") (V4SI "v8hi") (V8HI "v16qi")])
225 (define_mode_attr VU_char [(V2DI "w") (V4SI "h") (V8HI "b")])
228 (define_mode_iterator VNEG [V4SI V2DI])
230 ;; Vector move instructions.
231 (define_insn "*altivec_mov<mode>"
232 [(set (match_operand:VM2 0 "nonimmediate_operand" "=Z,v,v,?Y,?*r,?*r,v,v,?*r")
233 (match_operand:VM2 1 "input_operand" "v,Z,v,*r,Y,*r,j,W,W"))]
234 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)
235 && (register_operand (operands[0], <MODE>mode)
236 || register_operand (operands[1], <MODE>mode))"
238 switch (which_alternative)
240 case 0: return "stvx %1,%y0";
241 case 1: return "lvx %0,%y1";
242 case 2: return "vor %0,%1,%1";
246 case 6: return "vxor %0,%0,%0";
247 case 7: return output_vec_const_move (operands);
249 default: gcc_unreachable ();
252 [(set_attr "type" "vecstore,vecload,veclogical,store,load,*,veclogical,*,*")
253 (set_attr "length" "4,4,4,20,20,20,4,8,32")])
255 ;; Unlike other altivec moves, allow the GPRs, since a normal use of TImode
256 ;; is for unions. However for plain data movement, slightly favor the vector
258 (define_insn "*altivec_movti"
259 [(set (match_operand:TI 0 "nonimmediate_operand" "=Z,v,v,?Y,?r,?r,v,v")
260 (match_operand:TI 1 "input_operand" "v,Z,v,r,Y,r,j,W"))]
261 "VECTOR_MEM_ALTIVEC_P (TImode)
262 && (register_operand (operands[0], TImode)
263 || register_operand (operands[1], TImode))"
265 switch (which_alternative)
267 case 0: return "stvx %1,%y0";
268 case 1: return "lvx %0,%y1";
269 case 2: return "vor %0,%1,%1";
273 case 6: return "vxor %0,%0,%0";
274 case 7: return output_vec_const_move (operands);
275 default: gcc_unreachable ();
278 [(set_attr "type" "vecstore,vecload,veclogical,store,load,*,veclogical,*")])
280 ;; Load up a vector with the most significant bit set by loading up -1 and
281 ;; doing a shift left
283 [(set (match_operand:VM 0 "altivec_register_operand" "")
284 (match_operand:VM 1 "easy_vector_constant_msb" ""))]
285 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
288 rtx dest = operands[0];
289 machine_mode mode = GET_MODE (operands[0]);
293 if (mode == V4SFmode)
296 dest = gen_lowpart (V4SImode, dest);
299 num_elements = GET_MODE_NUNITS (mode);
300 v = rtvec_alloc (num_elements);
301 for (i = 0; i < num_elements; i++)
302 RTVEC_ELT (v, i) = constm1_rtx;
304 emit_insn (gen_vec_initv4sisi (dest, gen_rtx_PARALLEL (mode, v)));
305 emit_insn (gen_rtx_SET (dest, gen_rtx_ASHIFT (mode, dest, dest)));
310 [(set (match_operand:VM 0 "altivec_register_operand" "")
311 (match_operand:VM 1 "easy_vector_constant_add_self" ""))]
312 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
313 [(set (match_dup 0) (match_dup 3))
314 (set (match_dup 0) (match_dup 4))]
316 rtx dup = gen_easy_altivec_constant (operands[1]);
318 machine_mode op_mode = <MODE>mode;
320 /* Divide the operand of the resulting VEC_DUPLICATE, and use
321 simplify_rtx to make a CONST_VECTOR. */
322 XEXP (dup, 0) = simplify_const_binary_operation (ASHIFTRT, QImode,
323 XEXP (dup, 0), const1_rtx);
324 const_vec = simplify_rtx (dup);
326 if (op_mode == V4SFmode)
329 operands[0] = gen_lowpart (op_mode, operands[0]);
331 if (GET_MODE (const_vec) == op_mode)
332 operands[3] = const_vec;
334 operands[3] = gen_lowpart (op_mode, const_vec);
335 operands[4] = gen_rtx_PLUS (op_mode, operands[0], operands[0]);
339 [(set (match_operand:VM 0 "altivec_register_operand" "")
340 (match_operand:VM 1 "easy_vector_constant_vsldoi" ""))]
341 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && can_create_pseudo_p ()"
342 [(set (match_dup 2) (match_dup 3))
343 (set (match_dup 4) (match_dup 5))
345 (unspec:VM [(match_dup 2)
350 rtx op1 = operands[1];
351 int elt = (BYTES_BIG_ENDIAN) ? 0 : GET_MODE_NUNITS (<MODE>mode) - 1;
352 HOST_WIDE_INT val = const_vector_elt_as_int (op1, elt);
353 rtx rtx_val = GEN_INT (val);
354 int shift = vspltis_shifted (op1);
356 gcc_assert (shift != 0);
357 operands[2] = gen_reg_rtx (<MODE>mode);
358 operands[3] = gen_const_vec_duplicate (<MODE>mode, rtx_val);
359 operands[4] = gen_reg_rtx (<MODE>mode);
363 operands[5] = CONSTM1_RTX (<MODE>mode);
364 operands[6] = GEN_INT (-shift);
368 operands[5] = CONST0_RTX (<MODE>mode);
369 operands[6] = GEN_INT (shift);
373 (define_insn "get_vrsave_internal"
374 [(set (match_operand:SI 0 "register_operand" "=r")
375 (unspec:SI [(reg:SI VRSAVE_REGNO)] UNSPEC_GET_VRSAVE))]
379 return "mfspr %0,256";
381 return "mfvrsave %0";
383 [(set_attr "type" "*")])
385 (define_insn "*set_vrsave_internal"
386 [(match_parallel 0 "vrsave_operation"
387 [(set (reg:SI VRSAVE_REGNO)
388 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "r")
389 (reg:SI VRSAVE_REGNO)] UNSPECV_SET_VRSAVE))])]
393 return "mtspr 256,%1";
395 return "mtvrsave %1";
397 [(set_attr "type" "*")])
399 (define_insn "*save_world"
400 [(match_parallel 0 "save_world_operation"
401 [(clobber (reg:SI LR_REGNO))
402 (use (match_operand:SI 1 "call_operand" "s"))])]
403 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
405 [(set_attr "type" "branch")
406 (set_attr "length" "4")])
408 (define_insn "*restore_world"
409 [(match_parallel 0 "restore_world_operation"
411 (use (reg:SI LR_REGNO))
412 (use (match_operand:SI 1 "call_operand" "s"))
413 (clobber (match_operand:SI 2 "gpc_reg_operand" "=r"))])]
414 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
417 ;; The save_vregs and restore_vregs patterns don't use memory_operand
418 ;; because (plus (reg) (const_int)) is not a valid vector address.
419 ;; This way is more compact than describing exactly what happens in
420 ;; the out-of-line functions, ie. loading the constant into r11/r12
421 ;; then using indexed addressing, and requires less editing of rtl
422 ;; to describe the operation to dwarf2out_frame_debug_expr.
423 (define_insn "*save_vregs_<mode>_r11"
424 [(match_parallel 0 "any_parallel_operand"
425 [(clobber (reg:P LR_REGNO))
426 (use (match_operand:P 1 "symbol_ref_operand" "s"))
429 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
430 (match_operand:P 3 "short_cint_operand" "I")))
431 (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
434 [(set_attr "type" "branch")
435 (set_attr "length" "4")])
437 (define_insn "*save_vregs_<mode>_r12"
438 [(match_parallel 0 "any_parallel_operand"
439 [(clobber (reg:P LR_REGNO))
440 (use (match_operand:P 1 "symbol_ref_operand" "s"))
443 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
444 (match_operand:P 3 "short_cint_operand" "I")))
445 (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
448 [(set_attr "type" "branch")
449 (set_attr "length" "4")])
451 (define_insn "*restore_vregs_<mode>_r11"
452 [(match_parallel 0 "any_parallel_operand"
453 [(clobber (reg:P LR_REGNO))
454 (use (match_operand:P 1 "symbol_ref_operand" "s"))
457 (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
458 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
459 (match_operand:P 4 "short_cint_operand" "I"))))])]
462 [(set_attr "type" "branch")
463 (set_attr "length" "4")])
465 (define_insn "*restore_vregs_<mode>_r12"
466 [(match_parallel 0 "any_parallel_operand"
467 [(clobber (reg:P LR_REGNO))
468 (use (match_operand:P 1 "symbol_ref_operand" "s"))
471 (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
472 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
473 (match_operand:P 4 "short_cint_operand" "I"))))])]
476 [(set_attr "type" "branch")
477 (set_attr "length" "4")])
479 ;; Simple binary operations.
482 (define_insn "add<mode>3"
483 [(set (match_operand:VI2 0 "register_operand" "=v")
484 (plus:VI2 (match_operand:VI2 1 "register_operand" "v")
485 (match_operand:VI2 2 "register_operand" "v")))]
487 "vaddu<VI_char>m %0,%1,%2"
488 [(set_attr "type" "vecsimple")])
490 (define_insn "*altivec_addv4sf3"
491 [(set (match_operand:V4SF 0 "register_operand" "=v")
492 (plus:V4SF (match_operand:V4SF 1 "register_operand" "v")
493 (match_operand:V4SF 2 "register_operand" "v")))]
494 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
496 [(set_attr "type" "vecfloat")])
498 (define_insn "altivec_vaddcuw"
499 [(set (match_operand:V4SI 0 "register_operand" "=v")
500 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
501 (match_operand:V4SI 2 "register_operand" "v")]
503 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
505 [(set_attr "type" "vecsimple")])
507 (define_insn "altivec_vaddu<VI_char>s"
508 [(set (match_operand:VI 0 "register_operand" "=v")
509 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
510 (match_operand:VI 2 "register_operand" "v")]
512 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
514 "vaddu<VI_char>s %0,%1,%2"
515 [(set_attr "type" "vecsimple")])
517 (define_insn "altivec_vadds<VI_char>s"
518 [(set (match_operand:VI 0 "register_operand" "=v")
519 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
520 (match_operand:VI 2 "register_operand" "v")]
522 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
523 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
524 "vadds<VI_char>s %0,%1,%2"
525 [(set_attr "type" "vecsimple")])
528 (define_insn "sub<mode>3"
529 [(set (match_operand:VI2 0 "register_operand" "=v")
530 (minus:VI2 (match_operand:VI2 1 "register_operand" "v")
531 (match_operand:VI2 2 "register_operand" "v")))]
533 "vsubu<VI_char>m %0,%1,%2"
534 [(set_attr "type" "vecsimple")])
536 (define_insn "*altivec_subv4sf3"
537 [(set (match_operand:V4SF 0 "register_operand" "=v")
538 (minus:V4SF (match_operand:V4SF 1 "register_operand" "v")
539 (match_operand:V4SF 2 "register_operand" "v")))]
540 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
542 [(set_attr "type" "vecfloat")])
544 (define_insn "altivec_vsubcuw"
545 [(set (match_operand:V4SI 0 "register_operand" "=v")
546 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
547 (match_operand:V4SI 2 "register_operand" "v")]
549 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
551 [(set_attr "type" "vecsimple")])
553 (define_insn "altivec_vsubu<VI_char>s"
554 [(set (match_operand:VI 0 "register_operand" "=v")
555 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
556 (match_operand:VI 2 "register_operand" "v")]
558 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
559 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
560 "vsubu<VI_char>s %0,%1,%2"
561 [(set_attr "type" "vecsimple")])
563 (define_insn "altivec_vsubs<VI_char>s"
564 [(set (match_operand:VI 0 "register_operand" "=v")
565 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
566 (match_operand:VI 2 "register_operand" "v")]
568 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
569 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
570 "vsubs<VI_char>s %0,%1,%2"
571 [(set_attr "type" "vecsimple")])
574 (define_insn "altivec_vavgu<VI_char>"
575 [(set (match_operand:VI 0 "register_operand" "=v")
576 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
577 (match_operand:VI 2 "register_operand" "v")]
580 "vavgu<VI_char> %0,%1,%2"
581 [(set_attr "type" "vecsimple")])
583 (define_insn "altivec_vavgs<VI_char>"
584 [(set (match_operand:VI 0 "register_operand" "=v")
585 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
586 (match_operand:VI 2 "register_operand" "v")]
588 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
589 "vavgs<VI_char> %0,%1,%2"
590 [(set_attr "type" "vecsimple")])
592 (define_insn "altivec_vcmpbfp"
593 [(set (match_operand:V4SI 0 "register_operand" "=v")
594 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
595 (match_operand:V4SF 2 "register_operand" "v")]
597 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
599 [(set_attr "type" "veccmp")])
601 (define_insn "*altivec_eq<mode>"
602 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
603 (eq:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
604 (match_operand:VI2 2 "altivec_register_operand" "v")))]
606 "vcmpequ<VI_char> %0,%1,%2"
607 [(set_attr "type" "veccmpfx")])
609 (define_insn "*altivec_gt<mode>"
610 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
611 (gt:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
612 (match_operand:VI2 2 "altivec_register_operand" "v")))]
614 "vcmpgts<VI_char> %0,%1,%2"
615 [(set_attr "type" "veccmpfx")])
617 (define_insn "*altivec_gtu<mode>"
618 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
619 (gtu:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
620 (match_operand:VI2 2 "altivec_register_operand" "v")))]
622 "vcmpgtu<VI_char> %0,%1,%2"
623 [(set_attr "type" "veccmpfx")])
625 (define_insn "*altivec_eqv4sf"
626 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
627 (eq:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
628 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
629 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
631 [(set_attr "type" "veccmp")])
633 (define_insn "*altivec_gtv4sf"
634 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
635 (gt:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
636 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
637 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
639 [(set_attr "type" "veccmp")])
641 (define_insn "*altivec_gev4sf"
642 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
643 (ge:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
644 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
645 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
647 [(set_attr "type" "veccmp")])
649 (define_insn "*altivec_vsel<mode>"
650 [(set (match_operand:VM 0 "altivec_register_operand" "=v")
652 (ne:CC (match_operand:VM 1 "altivec_register_operand" "v")
653 (match_operand:VM 4 "zero_constant" ""))
654 (match_operand:VM 2 "altivec_register_operand" "v")
655 (match_operand:VM 3 "altivec_register_operand" "v")))]
656 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
658 [(set_attr "type" "vecmove")])
660 (define_insn "*altivec_vsel<mode>_uns"
661 [(set (match_operand:VM 0 "altivec_register_operand" "=v")
663 (ne:CCUNS (match_operand:VM 1 "altivec_register_operand" "v")
664 (match_operand:VM 4 "zero_constant" ""))
665 (match_operand:VM 2 "altivec_register_operand" "v")
666 (match_operand:VM 3 "altivec_register_operand" "v")))]
667 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
669 [(set_attr "type" "vecmove")])
671 ;; Fused multiply add.
673 (define_insn "*altivec_fmav4sf4"
674 [(set (match_operand:V4SF 0 "register_operand" "=v")
675 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
676 (match_operand:V4SF 2 "register_operand" "v")
677 (match_operand:V4SF 3 "register_operand" "v")))]
678 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
679 "vmaddfp %0,%1,%2,%3"
680 [(set_attr "type" "vecfloat")])
682 ;; We do multiply as a fused multiply-add with an add of a -0.0 vector.
684 (define_expand "altivec_mulv4sf3"
685 [(set (match_operand:V4SF 0 "register_operand" "")
686 (fma:V4SF (match_operand:V4SF 1 "register_operand" "")
687 (match_operand:V4SF 2 "register_operand" "")
689 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
693 /* Generate [-0.0, -0.0, -0.0, -0.0]. */
694 neg0 = gen_reg_rtx (V4SImode);
695 emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
696 emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
698 operands[3] = gen_lowpart (V4SFmode, neg0);
701 ;; 32-bit integer multiplication
702 ;; A_high = Operand_0 & 0xFFFF0000 >> 16
703 ;; A_low = Operand_0 & 0xFFFF
704 ;; B_high = Operand_1 & 0xFFFF0000 >> 16
705 ;; B_low = Operand_1 & 0xFFFF
706 ;; result = A_low * B_low + (A_high * B_low + B_high * A_low) << 16
708 ;; (define_insn "mulv4si3"
709 ;; [(set (match_operand:V4SI 0 "register_operand" "=v")
710 ;; (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
711 ;; (match_operand:V4SI 2 "register_operand" "v")))]
712 (define_insn "mulv4si3_p8"
713 [(set (match_operand:V4SI 0 "register_operand" "=v")
714 (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
715 (match_operand:V4SI 2 "register_operand" "v")))]
718 [(set_attr "type" "veccomplex")])
720 (define_expand "mulv4si3"
721 [(use (match_operand:V4SI 0 "register_operand" ""))
722 (use (match_operand:V4SI 1 "register_operand" ""))
723 (use (match_operand:V4SI 2 "register_operand" ""))]
735 if (TARGET_P8_VECTOR)
737 emit_insn (gen_mulv4si3_p8 (operands[0], operands[1], operands[2]));
741 zero = gen_reg_rtx (V4SImode);
742 emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
744 sixteen = gen_reg_rtx (V4SImode);
745 emit_insn (gen_altivec_vspltisw (sixteen, gen_rtx_CONST_INT (V4SImode, -16)));
747 swap = gen_reg_rtx (V4SImode);
748 emit_insn (gen_vrotlv4si3 (swap, operands[2], sixteen));
750 one = gen_reg_rtx (V8HImode);
751 convert_move (one, operands[1], 0);
753 two = gen_reg_rtx (V8HImode);
754 convert_move (two, operands[2], 0);
756 small_swap = gen_reg_rtx (V8HImode);
757 convert_move (small_swap, swap, 0);
759 low_product = gen_reg_rtx (V4SImode);
760 emit_insn (gen_altivec_vmulouh (low_product, one, two));
762 high_product = gen_reg_rtx (V4SImode);
763 emit_insn (gen_altivec_vmsumuhm (high_product, one, small_swap, zero));
765 emit_insn (gen_vashlv4si3 (high_product, high_product, sixteen));
767 emit_insn (gen_addv4si3 (operands[0], high_product, low_product));
772 (define_expand "mulv8hi3"
773 [(use (match_operand:V8HI 0 "register_operand" ""))
774 (use (match_operand:V8HI 1 "register_operand" ""))
775 (use (match_operand:V8HI 2 "register_operand" ""))]
778 rtx zero = gen_reg_rtx (V8HImode);
780 emit_insn (gen_altivec_vspltish (zero, const0_rtx));
781 emit_insn (gen_altivec_vmladduhm(operands[0], operands[1], operands[2], zero));
786 ;; Fused multiply subtract
787 (define_insn "*altivec_vnmsubfp"
788 [(set (match_operand:V4SF 0 "register_operand" "=v")
790 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
791 (match_operand:V4SF 2 "register_operand" "v")
793 (match_operand:V4SF 3 "register_operand" "v")))))]
794 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
795 "vnmsubfp %0,%1,%2,%3"
796 [(set_attr "type" "vecfloat")])
798 (define_insn "altivec_vmsumu<VI_char>m"
799 [(set (match_operand:V4SI 0 "register_operand" "=v")
800 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
801 (match_operand:VIshort 2 "register_operand" "v")
802 (match_operand:V4SI 3 "register_operand" "v")]
805 "vmsumu<VI_char>m %0,%1,%2,%3"
806 [(set_attr "type" "veccomplex")])
808 (define_insn "altivec_vmsumm<VI_char>m"
809 [(set (match_operand:V4SI 0 "register_operand" "=v")
810 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
811 (match_operand:VIshort 2 "register_operand" "v")
812 (match_operand:V4SI 3 "register_operand" "v")]
815 "vmsumm<VI_char>m %0,%1,%2,%3"
816 [(set_attr "type" "veccomplex")])
818 (define_insn "altivec_vmsumshm"
819 [(set (match_operand:V4SI 0 "register_operand" "=v")
820 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
821 (match_operand:V8HI 2 "register_operand" "v")
822 (match_operand:V4SI 3 "register_operand" "v")]
825 "vmsumshm %0,%1,%2,%3"
826 [(set_attr "type" "veccomplex")])
828 (define_insn "altivec_vmsumuhs"
829 [(set (match_operand:V4SI 0 "register_operand" "=v")
830 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
831 (match_operand:V8HI 2 "register_operand" "v")
832 (match_operand:V4SI 3 "register_operand" "v")]
834 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
836 "vmsumuhs %0,%1,%2,%3"
837 [(set_attr "type" "veccomplex")])
839 (define_insn "altivec_vmsumshs"
840 [(set (match_operand:V4SI 0 "register_operand" "=v")
841 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
842 (match_operand:V8HI 2 "register_operand" "v")
843 (match_operand:V4SI 3 "register_operand" "v")]
845 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
847 "vmsumshs %0,%1,%2,%3"
848 [(set_attr "type" "veccomplex")])
852 (define_insn "umax<mode>3"
853 [(set (match_operand:VI2 0 "register_operand" "=v")
854 (umax:VI2 (match_operand:VI2 1 "register_operand" "v")
855 (match_operand:VI2 2 "register_operand" "v")))]
857 "vmaxu<VI_char> %0,%1,%2"
858 [(set_attr "type" "vecsimple")])
860 (define_insn "smax<mode>3"
861 [(set (match_operand:VI2 0 "register_operand" "=v")
862 (smax:VI2 (match_operand:VI2 1 "register_operand" "v")
863 (match_operand:VI2 2 "register_operand" "v")))]
865 "vmaxs<VI_char> %0,%1,%2"
866 [(set_attr "type" "vecsimple")])
868 (define_insn "*altivec_smaxv4sf3"
869 [(set (match_operand:V4SF 0 "register_operand" "=v")
870 (smax:V4SF (match_operand:V4SF 1 "register_operand" "v")
871 (match_operand:V4SF 2 "register_operand" "v")))]
872 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
874 [(set_attr "type" "veccmp")])
876 (define_insn "umin<mode>3"
877 [(set (match_operand:VI2 0 "register_operand" "=v")
878 (umin:VI2 (match_operand:VI2 1 "register_operand" "v")
879 (match_operand:VI2 2 "register_operand" "v")))]
881 "vminu<VI_char> %0,%1,%2"
882 [(set_attr "type" "vecsimple")])
884 (define_insn "smin<mode>3"
885 [(set (match_operand:VI2 0 "register_operand" "=v")
886 (smin:VI2 (match_operand:VI2 1 "register_operand" "v")
887 (match_operand:VI2 2 "register_operand" "v")))]
889 "vmins<VI_char> %0,%1,%2"
890 [(set_attr "type" "vecsimple")])
892 (define_insn "*altivec_sminv4sf3"
893 [(set (match_operand:V4SF 0 "register_operand" "=v")
894 (smin:V4SF (match_operand:V4SF 1 "register_operand" "v")
895 (match_operand:V4SF 2 "register_operand" "v")))]
896 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
898 [(set_attr "type" "veccmp")])
900 (define_insn "altivec_vmhaddshs"
901 [(set (match_operand:V8HI 0 "register_operand" "=v")
902 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
903 (match_operand:V8HI 2 "register_operand" "v")
904 (match_operand:V8HI 3 "register_operand" "v")]
906 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
908 "vmhaddshs %0,%1,%2,%3"
909 [(set_attr "type" "veccomplex")])
911 (define_insn "altivec_vmhraddshs"
912 [(set (match_operand:V8HI 0 "register_operand" "=v")
913 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
914 (match_operand:V8HI 2 "register_operand" "v")
915 (match_operand:V8HI 3 "register_operand" "v")]
917 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
919 "vmhraddshs %0,%1,%2,%3"
920 [(set_attr "type" "veccomplex")])
922 (define_insn "altivec_vmladduhm"
923 [(set (match_operand:V8HI 0 "register_operand" "=v")
924 (plus:V8HI (mult:V8HI (match_operand:V8HI 1 "register_operand" "v")
925 (match_operand:V8HI 2 "register_operand" "v"))
926 (match_operand:V8HI 3 "register_operand" "v")))]
928 "vmladduhm %0,%1,%2,%3"
929 [(set_attr "type" "veccomplex")])
931 (define_expand "altivec_vmrghb"
932 [(use (match_operand:V16QI 0 "register_operand" ""))
933 (use (match_operand:V16QI 1 "register_operand" ""))
934 (use (match_operand:V16QI 2 "register_operand" ""))]
940 /* Special handling for LE with -maltivec=be. */
941 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
943 v = gen_rtvec (16, GEN_INT (8), GEN_INT (24), GEN_INT (9), GEN_INT (25),
944 GEN_INT (10), GEN_INT (26), GEN_INT (11), GEN_INT (27),
945 GEN_INT (12), GEN_INT (28), GEN_INT (13), GEN_INT (29),
946 GEN_INT (14), GEN_INT (30), GEN_INT (15), GEN_INT (31));
947 x = gen_rtx_VEC_CONCAT (V32QImode, operands[2], operands[1]);
951 v = gen_rtvec (16, GEN_INT (0), GEN_INT (16), GEN_INT (1), GEN_INT (17),
952 GEN_INT (2), GEN_INT (18), GEN_INT (3), GEN_INT (19),
953 GEN_INT (4), GEN_INT (20), GEN_INT (5), GEN_INT (21),
954 GEN_INT (6), GEN_INT (22), GEN_INT (7), GEN_INT (23));
955 x = gen_rtx_VEC_CONCAT (V32QImode, operands[1], operands[2]);
958 x = gen_rtx_VEC_SELECT (V16QImode, x, gen_rtx_PARALLEL (VOIDmode, v));
959 emit_insn (gen_rtx_SET (operands[0], x));
963 (define_insn "*altivec_vmrghb_internal"
964 [(set (match_operand:V16QI 0 "register_operand" "=v")
967 (match_operand:V16QI 1 "register_operand" "v")
968 (match_operand:V16QI 2 "register_operand" "v"))
969 (parallel [(const_int 0) (const_int 16)
970 (const_int 1) (const_int 17)
971 (const_int 2) (const_int 18)
972 (const_int 3) (const_int 19)
973 (const_int 4) (const_int 20)
974 (const_int 5) (const_int 21)
975 (const_int 6) (const_int 22)
976 (const_int 7) (const_int 23)])))]
979 if (BYTES_BIG_ENDIAN)
980 return "vmrghb %0,%1,%2";
982 return "vmrglb %0,%2,%1";
984 [(set_attr "type" "vecperm")])
986 (define_insn "altivec_vmrghb_direct"
987 [(set (match_operand:V16QI 0 "register_operand" "=v")
988 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
989 (match_operand:V16QI 2 "register_operand" "v")]
990 UNSPEC_VMRGH_DIRECT))]
993 [(set_attr "type" "vecperm")])
995 (define_expand "altivec_vmrghh"
996 [(use (match_operand:V8HI 0 "register_operand" ""))
997 (use (match_operand:V8HI 1 "register_operand" ""))
998 (use (match_operand:V8HI 2 "register_operand" ""))]
1004 /* Special handling for LE with -maltivec=be. */
1005 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1007 v = gen_rtvec (8, GEN_INT (4), GEN_INT (12), GEN_INT (5), GEN_INT (13),
1008 GEN_INT (6), GEN_INT (14), GEN_INT (7), GEN_INT (15));
1009 x = gen_rtx_VEC_CONCAT (V16HImode, operands[2], operands[1]);
1013 v = gen_rtvec (8, GEN_INT (0), GEN_INT (8), GEN_INT (1), GEN_INT (9),
1014 GEN_INT (2), GEN_INT (10), GEN_INT (3), GEN_INT (11));
1015 x = gen_rtx_VEC_CONCAT (V16HImode, operands[1], operands[2]);
1018 x = gen_rtx_VEC_SELECT (V8HImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1019 emit_insn (gen_rtx_SET (operands[0], x));
1023 (define_insn "*altivec_vmrghh_internal"
1024 [(set (match_operand:V8HI 0 "register_operand" "=v")
1027 (match_operand:V8HI 1 "register_operand" "v")
1028 (match_operand:V8HI 2 "register_operand" "v"))
1029 (parallel [(const_int 0) (const_int 8)
1030 (const_int 1) (const_int 9)
1031 (const_int 2) (const_int 10)
1032 (const_int 3) (const_int 11)])))]
1035 if (BYTES_BIG_ENDIAN)
1036 return "vmrghh %0,%1,%2";
1038 return "vmrglh %0,%2,%1";
1040 [(set_attr "type" "vecperm")])
1042 (define_insn "altivec_vmrghh_direct"
1043 [(set (match_operand:V8HI 0 "register_operand" "=v")
1044 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1045 (match_operand:V8HI 2 "register_operand" "v")]
1046 UNSPEC_VMRGH_DIRECT))]
1049 [(set_attr "type" "vecperm")])
1051 (define_expand "altivec_vmrghw"
1052 [(use (match_operand:V4SI 0 "register_operand" ""))
1053 (use (match_operand:V4SI 1 "register_operand" ""))
1054 (use (match_operand:V4SI 2 "register_operand" ""))]
1055 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1060 /* Special handling for LE with -maltivec=be. */
1061 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1063 v = gen_rtvec (4, GEN_INT (2), GEN_INT (6), GEN_INT (3), GEN_INT (7));
1064 x = gen_rtx_VEC_CONCAT (V8SImode, operands[2], operands[1]);
1068 v = gen_rtvec (4, GEN_INT (0), GEN_INT (4), GEN_INT (1), GEN_INT (5));
1069 x = gen_rtx_VEC_CONCAT (V8SImode, operands[1], operands[2]);
1072 x = gen_rtx_VEC_SELECT (V4SImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1073 emit_insn (gen_rtx_SET (operands[0], x));
1077 (define_insn "*altivec_vmrghw_internal"
1078 [(set (match_operand:V4SI 0 "register_operand" "=v")
1081 (match_operand:V4SI 1 "register_operand" "v")
1082 (match_operand:V4SI 2 "register_operand" "v"))
1083 (parallel [(const_int 0) (const_int 4)
1084 (const_int 1) (const_int 5)])))]
1085 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1087 if (BYTES_BIG_ENDIAN)
1088 return "vmrghw %0,%1,%2";
1090 return "vmrglw %0,%2,%1";
1092 [(set_attr "type" "vecperm")])
1094 (define_insn "altivec_vmrghw_direct"
1095 [(set (match_operand:V4SI 0 "register_operand" "=v")
1096 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1097 (match_operand:V4SI 2 "register_operand" "v")]
1098 UNSPEC_VMRGH_DIRECT))]
1101 [(set_attr "type" "vecperm")])
1103 (define_insn "*altivec_vmrghsf"
1104 [(set (match_operand:V4SF 0 "register_operand" "=v")
1107 (match_operand:V4SF 1 "register_operand" "v")
1108 (match_operand:V4SF 2 "register_operand" "v"))
1109 (parallel [(const_int 0) (const_int 4)
1110 (const_int 1) (const_int 5)])))]
1111 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1113 if (BYTES_BIG_ENDIAN)
1114 return "vmrghw %0,%1,%2";
1116 return "vmrglw %0,%2,%1";
1118 [(set_attr "type" "vecperm")])
1120 (define_expand "altivec_vmrglb"
1121 [(use (match_operand:V16QI 0 "register_operand" ""))
1122 (use (match_operand:V16QI 1 "register_operand" ""))
1123 (use (match_operand:V16QI 2 "register_operand" ""))]
1129 /* Special handling for LE with -maltivec=be. */
1130 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1132 v = gen_rtvec (16, GEN_INT (0), GEN_INT (16), GEN_INT (1), GEN_INT (17),
1133 GEN_INT (2), GEN_INT (18), GEN_INT (3), GEN_INT (19),
1134 GEN_INT (4), GEN_INT (20), GEN_INT (5), GEN_INT (21),
1135 GEN_INT (6), GEN_INT (22), GEN_INT (7), GEN_INT (23));
1136 x = gen_rtx_VEC_CONCAT (V32QImode, operands[2], operands[1]);
1140 v = gen_rtvec (16, GEN_INT (8), GEN_INT (24), GEN_INT (9), GEN_INT (25),
1141 GEN_INT (10), GEN_INT (26), GEN_INT (11), GEN_INT (27),
1142 GEN_INT (12), GEN_INT (28), GEN_INT (13), GEN_INT (29),
1143 GEN_INT (14), GEN_INT (30), GEN_INT (15), GEN_INT (31));
1144 x = gen_rtx_VEC_CONCAT (V32QImode, operands[1], operands[2]);
1147 x = gen_rtx_VEC_SELECT (V16QImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1148 emit_insn (gen_rtx_SET (operands[0], x));
1152 (define_insn "*altivec_vmrglb_internal"
1153 [(set (match_operand:V16QI 0 "register_operand" "=v")
1156 (match_operand:V16QI 1 "register_operand" "v")
1157 (match_operand:V16QI 2 "register_operand" "v"))
1158 (parallel [(const_int 8) (const_int 24)
1159 (const_int 9) (const_int 25)
1160 (const_int 10) (const_int 26)
1161 (const_int 11) (const_int 27)
1162 (const_int 12) (const_int 28)
1163 (const_int 13) (const_int 29)
1164 (const_int 14) (const_int 30)
1165 (const_int 15) (const_int 31)])))]
1168 if (BYTES_BIG_ENDIAN)
1169 return "vmrglb %0,%1,%2";
1171 return "vmrghb %0,%2,%1";
1173 [(set_attr "type" "vecperm")])
1175 (define_insn "altivec_vmrglb_direct"
1176 [(set (match_operand:V16QI 0 "register_operand" "=v")
1177 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1178 (match_operand:V16QI 2 "register_operand" "v")]
1179 UNSPEC_VMRGL_DIRECT))]
1182 [(set_attr "type" "vecperm")])
1184 (define_expand "altivec_vmrglh"
1185 [(use (match_operand:V8HI 0 "register_operand" ""))
1186 (use (match_operand:V8HI 1 "register_operand" ""))
1187 (use (match_operand:V8HI 2 "register_operand" ""))]
1193 /* Special handling for LE with -maltivec=be. */
1194 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1196 v = gen_rtvec (8, GEN_INT (0), GEN_INT (8), GEN_INT (1), GEN_INT (9),
1197 GEN_INT (2), GEN_INT (10), GEN_INT (3), GEN_INT (11));
1198 x = gen_rtx_VEC_CONCAT (V16HImode, operands[2], operands[1]);
1202 v = gen_rtvec (8, GEN_INT (4), GEN_INT (12), GEN_INT (5), GEN_INT (13),
1203 GEN_INT (6), GEN_INT (14), GEN_INT (7), GEN_INT (15));
1204 x = gen_rtx_VEC_CONCAT (V16HImode, operands[1], operands[2]);
1207 x = gen_rtx_VEC_SELECT (V8HImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1208 emit_insn (gen_rtx_SET (operands[0], x));
1212 (define_insn "*altivec_vmrglh_internal"
1213 [(set (match_operand:V8HI 0 "register_operand" "=v")
1216 (match_operand:V8HI 1 "register_operand" "v")
1217 (match_operand:V8HI 2 "register_operand" "v"))
1218 (parallel [(const_int 4) (const_int 12)
1219 (const_int 5) (const_int 13)
1220 (const_int 6) (const_int 14)
1221 (const_int 7) (const_int 15)])))]
1224 if (BYTES_BIG_ENDIAN)
1225 return "vmrglh %0,%1,%2";
1227 return "vmrghh %0,%2,%1";
1229 [(set_attr "type" "vecperm")])
1231 (define_insn "altivec_vmrglh_direct"
1232 [(set (match_operand:V8HI 0 "register_operand" "=v")
1233 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1234 (match_operand:V8HI 2 "register_operand" "v")]
1235 UNSPEC_VMRGL_DIRECT))]
1238 [(set_attr "type" "vecperm")])
1240 (define_expand "altivec_vmrglw"
1241 [(use (match_operand:V4SI 0 "register_operand" ""))
1242 (use (match_operand:V4SI 1 "register_operand" ""))
1243 (use (match_operand:V4SI 2 "register_operand" ""))]
1244 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1249 /* Special handling for LE with -maltivec=be. */
1250 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1252 v = gen_rtvec (4, GEN_INT (0), GEN_INT (4), GEN_INT (1), GEN_INT (5));
1253 x = gen_rtx_VEC_CONCAT (V8SImode, operands[2], operands[1]);
1257 v = gen_rtvec (4, GEN_INT (2), GEN_INT (6), GEN_INT (3), GEN_INT (7));
1258 x = gen_rtx_VEC_CONCAT (V8SImode, operands[1], operands[2]);
1261 x = gen_rtx_VEC_SELECT (V4SImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1262 emit_insn (gen_rtx_SET (operands[0], x));
1266 (define_insn "*altivec_vmrglw_internal"
1267 [(set (match_operand:V4SI 0 "register_operand" "=v")
1270 (match_operand:V4SI 1 "register_operand" "v")
1271 (match_operand:V4SI 2 "register_operand" "v"))
1272 (parallel [(const_int 2) (const_int 6)
1273 (const_int 3) (const_int 7)])))]
1274 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1276 if (BYTES_BIG_ENDIAN)
1277 return "vmrglw %0,%1,%2";
1279 return "vmrghw %0,%2,%1";
1281 [(set_attr "type" "vecperm")])
1283 (define_insn "altivec_vmrglw_direct"
1284 [(set (match_operand:V4SI 0 "register_operand" "=v")
1285 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1286 (match_operand:V4SI 2 "register_operand" "v")]
1287 UNSPEC_VMRGL_DIRECT))]
1290 [(set_attr "type" "vecperm")])
1292 (define_insn "*altivec_vmrglsf"
1293 [(set (match_operand:V4SF 0 "register_operand" "=v")
1296 (match_operand:V4SF 1 "register_operand" "v")
1297 (match_operand:V4SF 2 "register_operand" "v"))
1298 (parallel [(const_int 2) (const_int 6)
1299 (const_int 3) (const_int 7)])))]
1300 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1302 if (BYTES_BIG_ENDIAN)
1303 return "vmrglw %0,%1,%2";
1305 return "vmrghw %0,%2,%1";
1307 [(set_attr "type" "vecperm")])
1309 ;; Power8 vector merge even/odd
1310 (define_insn "p8_vmrgew"
1311 [(set (match_operand:V4SI 0 "register_operand" "=v")
1314 (match_operand:V4SI 1 "register_operand" "v")
1315 (match_operand:V4SI 2 "register_operand" "v"))
1316 (parallel [(const_int 0) (const_int 4)
1317 (const_int 2) (const_int 6)])))]
1320 if (BYTES_BIG_ENDIAN)
1321 return "vmrgew %0,%1,%2";
1323 return "vmrgow %0,%2,%1";
1325 [(set_attr "type" "vecperm")])
1327 (define_insn "p8_vmrgow"
1328 [(set (match_operand:V4SI 0 "register_operand" "=v")
1331 (match_operand:V4SI 1 "register_operand" "v")
1332 (match_operand:V4SI 2 "register_operand" "v"))
1333 (parallel [(const_int 1) (const_int 5)
1334 (const_int 3) (const_int 7)])))]
1337 if (BYTES_BIG_ENDIAN)
1338 return "vmrgow %0,%1,%2";
1340 return "vmrgew %0,%2,%1";
1342 [(set_attr "type" "vecperm")])
1344 (define_insn "p8_vmrgew_v4sf_direct"
1345 [(set (match_operand:V4SF 0 "register_operand" "=v")
1346 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")
1347 (match_operand:V4SF 2 "register_operand" "v")]
1348 UNSPEC_VMRGEW_DIRECT))]
1351 [(set_attr "type" "vecperm")])
1353 (define_expand "vec_widen_umult_even_v16qi"
1354 [(use (match_operand:V8HI 0 "register_operand" ""))
1355 (use (match_operand:V16QI 1 "register_operand" ""))
1356 (use (match_operand:V16QI 2 "register_operand" ""))]
1359 if (VECTOR_ELT_ORDER_BIG)
1360 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1362 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1366 (define_expand "vec_widen_smult_even_v16qi"
1367 [(use (match_operand:V8HI 0 "register_operand" ""))
1368 (use (match_operand:V16QI 1 "register_operand" ""))
1369 (use (match_operand:V16QI 2 "register_operand" ""))]
1372 if (VECTOR_ELT_ORDER_BIG)
1373 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1375 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1379 (define_expand "vec_widen_umult_even_v8hi"
1380 [(use (match_operand:V4SI 0 "register_operand" ""))
1381 (use (match_operand:V8HI 1 "register_operand" ""))
1382 (use (match_operand:V8HI 2 "register_operand" ""))]
1385 if (VECTOR_ELT_ORDER_BIG)
1386 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1388 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1392 (define_expand "vec_widen_smult_even_v8hi"
1393 [(use (match_operand:V4SI 0 "register_operand" ""))
1394 (use (match_operand:V8HI 1 "register_operand" ""))
1395 (use (match_operand:V8HI 2 "register_operand" ""))]
1398 if (VECTOR_ELT_ORDER_BIG)
1399 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1401 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1405 (define_expand "vec_widen_umult_odd_v16qi"
1406 [(use (match_operand:V8HI 0 "register_operand" ""))
1407 (use (match_operand:V16QI 1 "register_operand" ""))
1408 (use (match_operand:V16QI 2 "register_operand" ""))]
1411 if (VECTOR_ELT_ORDER_BIG)
1412 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1414 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1418 (define_expand "vec_widen_smult_odd_v16qi"
1419 [(use (match_operand:V8HI 0 "register_operand" ""))
1420 (use (match_operand:V16QI 1 "register_operand" ""))
1421 (use (match_operand:V16QI 2 "register_operand" ""))]
1424 if (VECTOR_ELT_ORDER_BIG)
1425 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1427 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1431 (define_expand "vec_widen_umult_odd_v8hi"
1432 [(use (match_operand:V4SI 0 "register_operand" ""))
1433 (use (match_operand:V8HI 1 "register_operand" ""))
1434 (use (match_operand:V8HI 2 "register_operand" ""))]
1437 if (VECTOR_ELT_ORDER_BIG)
1438 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1440 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1444 (define_expand "vec_widen_smult_odd_v8hi"
1445 [(use (match_operand:V4SI 0 "register_operand" ""))
1446 (use (match_operand:V8HI 1 "register_operand" ""))
1447 (use (match_operand:V8HI 2 "register_operand" ""))]
1450 if (VECTOR_ELT_ORDER_BIG)
1451 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1453 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1457 (define_insn "altivec_vmuleub"
1458 [(set (match_operand:V8HI 0 "register_operand" "=v")
1459 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1460 (match_operand:V16QI 2 "register_operand" "v")]
1464 [(set_attr "type" "veccomplex")])
1466 (define_insn "altivec_vmuloub"
1467 [(set (match_operand:V8HI 0 "register_operand" "=v")
1468 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1469 (match_operand:V16QI 2 "register_operand" "v")]
1473 [(set_attr "type" "veccomplex")])
1475 (define_insn "altivec_vmulesb"
1476 [(set (match_operand:V8HI 0 "register_operand" "=v")
1477 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1478 (match_operand:V16QI 2 "register_operand" "v")]
1482 [(set_attr "type" "veccomplex")])
1484 (define_insn "altivec_vmulosb"
1485 [(set (match_operand:V8HI 0 "register_operand" "=v")
1486 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1487 (match_operand:V16QI 2 "register_operand" "v")]
1491 [(set_attr "type" "veccomplex")])
1493 (define_insn "altivec_vmuleuh"
1494 [(set (match_operand:V4SI 0 "register_operand" "=v")
1495 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1496 (match_operand:V8HI 2 "register_operand" "v")]
1500 [(set_attr "type" "veccomplex")])
1502 (define_insn "altivec_vmulouh"
1503 [(set (match_operand:V4SI 0 "register_operand" "=v")
1504 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1505 (match_operand:V8HI 2 "register_operand" "v")]
1509 [(set_attr "type" "veccomplex")])
1511 (define_insn "altivec_vmulesh"
1512 [(set (match_operand:V4SI 0 "register_operand" "=v")
1513 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1514 (match_operand:V8HI 2 "register_operand" "v")]
1518 [(set_attr "type" "veccomplex")])
1520 (define_insn "altivec_vmulosh"
1521 [(set (match_operand:V4SI 0 "register_operand" "=v")
1522 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1523 (match_operand:V8HI 2 "register_operand" "v")]
1527 [(set_attr "type" "veccomplex")])
1530 ;; Vector pack/unpack
1531 (define_insn "altivec_vpkpx"
1532 [(set (match_operand:V8HI 0 "register_operand" "=v")
1533 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v")
1534 (match_operand:V4SI 2 "register_operand" "v")]
1539 if (VECTOR_ELT_ORDER_BIG)
1540 return \"vpkpx %0,%1,%2\";
1542 return \"vpkpx %0,%2,%1\";
1544 [(set_attr "type" "vecperm")])
1546 (define_insn "altivec_vpks<VI_char>ss"
1547 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1548 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1549 (match_operand:VP 2 "register_operand" "v")]
1550 UNSPEC_VPACK_SIGN_SIGN_SAT))]
1554 if (VECTOR_ELT_ORDER_BIG)
1555 return \"vpks<VI_char>ss %0,%1,%2\";
1557 return \"vpks<VI_char>ss %0,%2,%1\";
1559 [(set_attr "type" "vecperm")])
1561 (define_insn "altivec_vpks<VI_char>us"
1562 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1563 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1564 (match_operand:VP 2 "register_operand" "v")]
1565 UNSPEC_VPACK_SIGN_UNS_SAT))]
1569 if (VECTOR_ELT_ORDER_BIG)
1570 return \"vpks<VI_char>us %0,%1,%2\";
1572 return \"vpks<VI_char>us %0,%2,%1\";
1574 [(set_attr "type" "vecperm")])
1576 (define_insn "altivec_vpku<VI_char>us"
1577 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1578 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1579 (match_operand:VP 2 "register_operand" "v")]
1580 UNSPEC_VPACK_UNS_UNS_SAT))]
1584 if (VECTOR_ELT_ORDER_BIG)
1585 return \"vpku<VI_char>us %0,%1,%2\";
1587 return \"vpku<VI_char>us %0,%2,%1\";
1589 [(set_attr "type" "vecperm")])
1591 (define_insn "altivec_vpku<VI_char>um"
1592 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1593 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1594 (match_operand:VP 2 "register_operand" "v")]
1595 UNSPEC_VPACK_UNS_UNS_MOD))]
1599 if (VECTOR_ELT_ORDER_BIG)
1600 return \"vpku<VI_char>um %0,%1,%2\";
1602 return \"vpku<VI_char>um %0,%2,%1\";
1604 [(set_attr "type" "vecperm")])
1606 (define_insn "altivec_vpku<VI_char>um_direct"
1607 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1608 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1609 (match_operand:VP 2 "register_operand" "v")]
1610 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT))]
1614 if (BYTES_BIG_ENDIAN)
1615 return \"vpku<VI_char>um %0,%1,%2\";
1617 return \"vpku<VI_char>um %0,%2,%1\";
1619 [(set_attr "type" "vecperm")])
1621 (define_insn "*altivec_vrl<VI_char>"
1622 [(set (match_operand:VI2 0 "register_operand" "=v")
1623 (rotate:VI2 (match_operand:VI2 1 "register_operand" "v")
1624 (match_operand:VI2 2 "register_operand" "v")))]
1626 "vrl<VI_char> %0,%1,%2"
1627 [(set_attr "type" "vecsimple")])
1629 (define_insn "altivec_vrl<VI_char>mi"
1630 [(set (match_operand:VIlong 0 "register_operand" "=v")
1631 (unspec:VIlong [(match_operand:VIlong 1 "register_operand" "0")
1632 (match_operand:VIlong 2 "register_operand" "v")
1633 (match_operand:VIlong 3 "register_operand" "v")]
1636 "vrl<VI_char>mi %0,%2,%3"
1637 [(set_attr "type" "veclogical")])
1639 (define_insn "altivec_vrl<VI_char>nm"
1640 [(set (match_operand:VIlong 0 "register_operand" "=v")
1641 (unspec:VIlong [(match_operand:VIlong 1 "register_operand" "v")
1642 (match_operand:VIlong 2 "register_operand" "v")]
1645 "vrl<VI_char>nm %0,%1,%2"
1646 [(set_attr "type" "veclogical")])
1648 (define_insn "altivec_vsl"
1649 [(set (match_operand:V4SI 0 "register_operand" "=v")
1650 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1651 (match_operand:V4SI 2 "register_operand" "v")]
1655 [(set_attr "type" "vecperm")])
1657 (define_insn "altivec_vslo"
1658 [(set (match_operand:V4SI 0 "register_operand" "=v")
1659 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1660 (match_operand:V4SI 2 "register_operand" "v")]
1664 [(set_attr "type" "vecperm")])
1667 [(set (match_operand:V16QI 0 "register_operand" "=v")
1668 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1669 (match_operand:V16QI 2 "register_operand" "v")]
1673 [(set_attr "type" "vecsimple")])
1676 [(set (match_operand:V16QI 0 "register_operand" "=v")
1677 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1678 (match_operand:V16QI 2 "register_operand" "v")]
1682 [(set_attr "type" "vecsimple")])
1684 (define_insn "*altivec_vsl<VI_char>"
1685 [(set (match_operand:VI2 0 "register_operand" "=v")
1686 (ashift:VI2 (match_operand:VI2 1 "register_operand" "v")
1687 (match_operand:VI2 2 "register_operand" "v")))]
1689 "vsl<VI_char> %0,%1,%2"
1690 [(set_attr "type" "vecsimple")])
1692 (define_insn "*altivec_vsr<VI_char>"
1693 [(set (match_operand:VI2 0 "register_operand" "=v")
1694 (lshiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
1695 (match_operand:VI2 2 "register_operand" "v")))]
1697 "vsr<VI_char> %0,%1,%2"
1698 [(set_attr "type" "vecsimple")])
1700 (define_insn "*altivec_vsra<VI_char>"
1701 [(set (match_operand:VI2 0 "register_operand" "=v")
1702 (ashiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
1703 (match_operand:VI2 2 "register_operand" "v")))]
1705 "vsra<VI_char> %0,%1,%2"
1706 [(set_attr "type" "vecsimple")])
1708 (define_insn "altivec_vsr"
1709 [(set (match_operand:V4SI 0 "register_operand" "=v")
1710 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1711 (match_operand:V4SI 2 "register_operand" "v")]
1715 [(set_attr "type" "vecperm")])
1717 (define_insn "altivec_vsro"
1718 [(set (match_operand:V4SI 0 "register_operand" "=v")
1719 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1720 (match_operand:V4SI 2 "register_operand" "v")]
1724 [(set_attr "type" "vecperm")])
1726 (define_insn "altivec_vsum4ubs"
1727 [(set (match_operand:V4SI 0 "register_operand" "=v")
1728 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")
1729 (match_operand:V4SI 2 "register_operand" "v")]
1731 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1734 [(set_attr "type" "veccomplex")])
1736 (define_insn "altivec_vsum4s<VI_char>s"
1737 [(set (match_operand:V4SI 0 "register_operand" "=v")
1738 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
1739 (match_operand:V4SI 2 "register_operand" "v")]
1741 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1743 "vsum4s<VI_char>s %0,%1,%2"
1744 [(set_attr "type" "veccomplex")])
1746 ;; FIXME: For the following two patterns, the scratch should only be
1747 ;; allocated for !VECTOR_ELT_ORDER_BIG, and the instructions should
1748 ;; be emitted separately.
1749 (define_insn "altivec_vsum2sws"
1750 [(set (match_operand:V4SI 0 "register_operand" "=v")
1751 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1752 (match_operand:V4SI 2 "register_operand" "v")]
1754 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))
1755 (clobber (match_scratch:V4SI 3 "=v"))]
1758 if (VECTOR_ELT_ORDER_BIG)
1759 return "vsum2sws %0,%1,%2";
1761 return "vsldoi %3,%2,%2,12\n\tvsum2sws %3,%1,%3\n\tvsldoi %0,%3,%3,4";
1763 [(set_attr "type" "veccomplex")
1764 (set (attr "length")
1766 (match_test "VECTOR_ELT_ORDER_BIG")
1768 (const_string "12")))])
1770 (define_insn "altivec_vsumsws"
1771 [(set (match_operand:V4SI 0 "register_operand" "=v")
1772 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1773 (match_operand:V4SI 2 "register_operand" "v")]
1775 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))
1776 (clobber (match_scratch:V4SI 3 "=v"))]
1779 if (VECTOR_ELT_ORDER_BIG)
1780 return "vsumsws %0,%1,%2";
1782 return "vspltw %3,%2,0\n\tvsumsws %3,%1,%3\n\tvsldoi %0,%3,%3,12";
1784 [(set_attr "type" "veccomplex")
1785 (set (attr "length")
1787 (match_test "(VECTOR_ELT_ORDER_BIG)")
1789 (const_string "12")))])
1791 (define_insn "altivec_vsumsws_direct"
1792 [(set (match_operand:V4SI 0 "register_operand" "=v")
1793 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1794 (match_operand:V4SI 2 "register_operand" "v")]
1795 UNSPEC_VSUMSWS_DIRECT))
1796 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1799 [(set_attr "type" "veccomplex")])
1801 (define_expand "altivec_vspltb"
1802 [(use (match_operand:V16QI 0 "register_operand" ""))
1803 (use (match_operand:V16QI 1 "register_operand" ""))
1804 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1810 /* Special handling for LE with -maltivec=be. We have to reflect
1811 the actual selected index for the splat in the RTL. */
1812 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1813 operands[2] = GEN_INT (15 - INTVAL (operands[2]));
1815 v = gen_rtvec (1, operands[2]);
1816 x = gen_rtx_VEC_SELECT (QImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1817 x = gen_rtx_VEC_DUPLICATE (V16QImode, x);
1818 emit_insn (gen_rtx_SET (operands[0], x));
1822 (define_insn "*altivec_vspltb_internal"
1823 [(set (match_operand:V16QI 0 "register_operand" "=v")
1824 (vec_duplicate:V16QI
1825 (vec_select:QI (match_operand:V16QI 1 "register_operand" "v")
1827 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))]
1830 /* For true LE, this adjusts the selected index. For LE with
1831 -maltivec=be, this reverses what was done in the define_expand
1832 because the instruction already has big-endian bias. */
1833 if (!BYTES_BIG_ENDIAN)
1834 operands[2] = GEN_INT (15 - INTVAL (operands[2]));
1836 return "vspltb %0,%1,%2";
1838 [(set_attr "type" "vecperm")])
1840 (define_insn "altivec_vspltb_direct"
1841 [(set (match_operand:V16QI 0 "register_operand" "=v")
1842 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1843 (match_operand:QI 2 "u5bit_cint_operand" "i")]
1844 UNSPEC_VSPLT_DIRECT))]
1847 [(set_attr "type" "vecperm")])
1849 (define_expand "altivec_vsplth"
1850 [(use (match_operand:V8HI 0 "register_operand" ""))
1851 (use (match_operand:V8HI 1 "register_operand" ""))
1852 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1858 /* Special handling for LE with -maltivec=be. We have to reflect
1859 the actual selected index for the splat in the RTL. */
1860 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1861 operands[2] = GEN_INT (7 - INTVAL (operands[2]));
1863 v = gen_rtvec (1, operands[2]);
1864 x = gen_rtx_VEC_SELECT (HImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1865 x = gen_rtx_VEC_DUPLICATE (V8HImode, x);
1866 emit_insn (gen_rtx_SET (operands[0], x));
1870 (define_insn "*altivec_vsplth_internal"
1871 [(set (match_operand:V8HI 0 "register_operand" "=v")
1873 (vec_select:HI (match_operand:V8HI 1 "register_operand" "v")
1875 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))]
1878 /* For true LE, this adjusts the selected index. For LE with
1879 -maltivec=be, this reverses what was done in the define_expand
1880 because the instruction already has big-endian bias. */
1881 if (!BYTES_BIG_ENDIAN)
1882 operands[2] = GEN_INT (7 - INTVAL (operands[2]));
1884 return "vsplth %0,%1,%2";
1886 [(set_attr "type" "vecperm")])
1888 (define_insn "altivec_vsplth_direct"
1889 [(set (match_operand:V8HI 0 "register_operand" "=v")
1890 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1891 (match_operand:QI 2 "u5bit_cint_operand" "i")]
1892 UNSPEC_VSPLT_DIRECT))]
1895 [(set_attr "type" "vecperm")])
1897 (define_expand "altivec_vspltw"
1898 [(use (match_operand:V4SI 0 "register_operand" ""))
1899 (use (match_operand:V4SI 1 "register_operand" ""))
1900 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1906 /* Special handling for LE with -maltivec=be. We have to reflect
1907 the actual selected index for the splat in the RTL. */
1908 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1909 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1911 v = gen_rtvec (1, operands[2]);
1912 x = gen_rtx_VEC_SELECT (SImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1913 x = gen_rtx_VEC_DUPLICATE (V4SImode, x);
1914 emit_insn (gen_rtx_SET (operands[0], x));
1918 (define_insn "*altivec_vspltw_internal"
1919 [(set (match_operand:V4SI 0 "register_operand" "=v")
1921 (vec_select:SI (match_operand:V4SI 1 "register_operand" "v")
1923 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))]
1926 /* For true LE, this adjusts the selected index. For LE with
1927 -maltivec=be, this reverses what was done in the define_expand
1928 because the instruction already has big-endian bias. */
1929 if (!BYTES_BIG_ENDIAN)
1930 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1932 return "vspltw %0,%1,%2";
1934 [(set_attr "type" "vecperm")])
1936 (define_insn "altivec_vspltw_direct"
1937 [(set (match_operand:V4SI 0 "register_operand" "=v")
1938 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1939 (match_operand:QI 2 "u5bit_cint_operand" "i")]
1940 UNSPEC_VSPLT_DIRECT))]
1943 [(set_attr "type" "vecperm")])
1945 (define_expand "altivec_vspltsf"
1946 [(use (match_operand:V4SF 0 "register_operand" ""))
1947 (use (match_operand:V4SF 1 "register_operand" ""))
1948 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1954 /* Special handling for LE with -maltivec=be. We have to reflect
1955 the actual selected index for the splat in the RTL. */
1956 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1957 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1959 v = gen_rtvec (1, operands[2]);
1960 x = gen_rtx_VEC_SELECT (SFmode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1961 x = gen_rtx_VEC_DUPLICATE (V4SFmode, x);
1962 emit_insn (gen_rtx_SET (operands[0], x));
1966 (define_insn "*altivec_vspltsf_internal"
1967 [(set (match_operand:V4SF 0 "register_operand" "=v")
1969 (vec_select:SF (match_operand:V4SF 1 "register_operand" "v")
1971 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))]
1972 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1974 /* For true LE, this adjusts the selected index. For LE with
1975 -maltivec=be, this reverses what was done in the define_expand
1976 because the instruction already has big-endian bias. */
1977 if (!BYTES_BIG_ENDIAN)
1978 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1980 return "vspltw %0,%1,%2";
1982 [(set_attr "type" "vecperm")])
1984 (define_insn "altivec_vspltis<VI_char>"
1985 [(set (match_operand:VI 0 "register_operand" "=v")
1987 (match_operand:QI 1 "s5bit_cint_operand" "i")))]
1989 "vspltis<VI_char> %0,%1"
1990 [(set_attr "type" "vecperm")])
1992 (define_insn "*altivec_vrfiz"
1993 [(set (match_operand:V4SF 0 "register_operand" "=v")
1994 (fix:V4SF (match_operand:V4SF 1 "register_operand" "v")))]
1995 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1997 [(set_attr "type" "vecfloat")])
1999 (define_expand "altivec_vperm_<mode>"
2000 [(set (match_operand:VM 0 "register_operand" "")
2001 (unspec:VM [(match_operand:VM 1 "register_operand" "")
2002 (match_operand:VM 2 "register_operand" "")
2003 (match_operand:V16QI 3 "register_operand" "")]
2007 if (!VECTOR_ELT_ORDER_BIG)
2009 altivec_expand_vec_perm_le (operands);
2014 ;; Slightly prefer vperm, since the target does not overlap the source
2015 (define_insn "*altivec_vperm_<mode>_internal"
2016 [(set (match_operand:VM 0 "register_operand" "=v,?wo")
2017 (unspec:VM [(match_operand:VM 1 "register_operand" "v,wo")
2018 (match_operand:VM 2 "register_operand" "v,0")
2019 (match_operand:V16QI 3 "register_operand" "v,wo")]
2025 [(set_attr "type" "vecperm")
2026 (set_attr "length" "4")])
2028 (define_insn "altivec_vperm_v8hiv16qi"
2029 [(set (match_operand:V16QI 0 "register_operand" "=v,?wo")
2030 (unspec:V16QI [(match_operand:V8HI 1 "register_operand" "v,wo")
2031 (match_operand:V8HI 2 "register_operand" "v,0")
2032 (match_operand:V16QI 3 "register_operand" "v,wo")]
2038 [(set_attr "type" "vecperm")
2039 (set_attr "length" "4")])
2041 (define_expand "altivec_vperm_<mode>_uns"
2042 [(set (match_operand:VM 0 "register_operand" "")
2043 (unspec:VM [(match_operand:VM 1 "register_operand" "")
2044 (match_operand:VM 2 "register_operand" "")
2045 (match_operand:V16QI 3 "register_operand" "")]
2049 if (!VECTOR_ELT_ORDER_BIG)
2051 altivec_expand_vec_perm_le (operands);
2056 (define_insn "*altivec_vperm_<mode>_uns_internal"
2057 [(set (match_operand:VM 0 "register_operand" "=v,?wo")
2058 (unspec:VM [(match_operand:VM 1 "register_operand" "v,wo")
2059 (match_operand:VM 2 "register_operand" "v,0")
2060 (match_operand:V16QI 3 "register_operand" "v,wo")]
2066 [(set_attr "type" "vecperm")
2067 (set_attr "length" "4")])
2069 (define_expand "vec_permv16qi"
2070 [(set (match_operand:V16QI 0 "register_operand" "")
2071 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "")
2072 (match_operand:V16QI 2 "register_operand" "")
2073 (match_operand:V16QI 3 "register_operand" "")]
2077 if (!BYTES_BIG_ENDIAN) {
2078 altivec_expand_vec_perm_le (operands);
2083 (define_expand "vec_perm_constv16qi"
2084 [(match_operand:V16QI 0 "register_operand" "")
2085 (match_operand:V16QI 1 "register_operand" "")
2086 (match_operand:V16QI 2 "register_operand" "")
2087 (match_operand:V16QI 3 "" "")]
2090 if (altivec_expand_vec_perm_const (operands))
2096 (define_insn "*altivec_vpermr_<mode>_internal"
2097 [(set (match_operand:VM 0 "register_operand" "=v,?wo")
2098 (unspec:VM [(match_operand:VM 1 "register_operand" "v,wo")
2099 (match_operand:VM 2 "register_operand" "v,0")
2100 (match_operand:V16QI 3 "register_operand" "v,wo")]
2105 xxpermr %x0,%x1,%x3"
2106 [(set_attr "type" "vecperm")
2107 (set_attr "length" "4")])
2109 (define_insn "altivec_vrfip" ; ceil
2110 [(set (match_operand:V4SF 0 "register_operand" "=v")
2111 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2115 [(set_attr "type" "vecfloat")])
2117 (define_insn "altivec_vrfin"
2118 [(set (match_operand:V4SF 0 "register_operand" "=v")
2119 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2123 [(set_attr "type" "vecfloat")])
2125 (define_insn "*altivec_vrfim" ; floor
2126 [(set (match_operand:V4SF 0 "register_operand" "=v")
2127 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2131 [(set_attr "type" "vecfloat")])
2133 (define_insn "altivec_vcfux"
2134 [(set (match_operand:V4SF 0 "register_operand" "=v")
2135 (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
2136 (match_operand:QI 2 "immediate_operand" "i")]
2140 [(set_attr "type" "vecfloat")])
2142 (define_insn "altivec_vcfsx"
2143 [(set (match_operand:V4SF 0 "register_operand" "=v")
2144 (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
2145 (match_operand:QI 2 "immediate_operand" "i")]
2149 [(set_attr "type" "vecfloat")])
2151 (define_insn "altivec_vctuxs"
2152 [(set (match_operand:V4SI 0 "register_operand" "=v")
2153 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2154 (match_operand:QI 2 "immediate_operand" "i")]
2156 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2159 [(set_attr "type" "vecfloat")])
2161 (define_insn "altivec_vctsxs"
2162 [(set (match_operand:V4SI 0 "register_operand" "=v")
2163 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2164 (match_operand:QI 2 "immediate_operand" "i")]
2166 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2169 [(set_attr "type" "vecfloat")])
2171 (define_insn "altivec_vlogefp"
2172 [(set (match_operand:V4SF 0 "register_operand" "=v")
2173 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2177 [(set_attr "type" "vecfloat")])
2179 (define_insn "altivec_vexptefp"
2180 [(set (match_operand:V4SF 0 "register_operand" "=v")
2181 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2185 [(set_attr "type" "vecfloat")])
2187 (define_insn "*altivec_vrsqrtefp"
2188 [(set (match_operand:V4SF 0 "register_operand" "=v")
2189 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2191 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2193 [(set_attr "type" "vecfloat")])
2195 (define_insn "altivec_vrefp"
2196 [(set (match_operand:V4SF 0 "register_operand" "=v")
2197 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2199 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2201 [(set_attr "type" "vecfloat")])
2203 (define_expand "altivec_copysign_v4sf3"
2204 [(use (match_operand:V4SF 0 "register_operand" ""))
2205 (use (match_operand:V4SF 1 "register_operand" ""))
2206 (use (match_operand:V4SF 2 "register_operand" ""))]
2207 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2210 rtx mask = gen_reg_rtx (V4SImode);
2211 rtvec v = rtvec_alloc (4);
2212 unsigned HOST_WIDE_INT mask_val = ((unsigned HOST_WIDE_INT)1) << 31;
2214 RTVEC_ELT (v, 0) = GEN_INT (mask_val);
2215 RTVEC_ELT (v, 1) = GEN_INT (mask_val);
2216 RTVEC_ELT (v, 2) = GEN_INT (mask_val);
2217 RTVEC_ELT (v, 3) = GEN_INT (mask_val);
2219 emit_insn (gen_vec_initv4sisi (mask, gen_rtx_PARALLEL (V4SImode, v)));
2220 emit_insn (gen_vector_select_v4sf (operands[0], operands[1], operands[2],
2221 gen_lowpart (V4SFmode, mask)));
2225 (define_insn "altivec_vsldoi_<mode>"
2226 [(set (match_operand:VM 0 "register_operand" "=v")
2227 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
2228 (match_operand:VM 2 "register_operand" "v")
2229 (match_operand:QI 3 "immediate_operand" "i")]
2232 "vsldoi %0,%1,%2,%3"
2233 [(set_attr "type" "vecperm")])
2235 (define_insn "altivec_vupkhs<VU_char>"
2236 [(set (match_operand:VP 0 "register_operand" "=v")
2237 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2238 UNSPEC_VUNPACK_HI_SIGN))]
2241 if (VECTOR_ELT_ORDER_BIG)
2242 return "vupkhs<VU_char> %0,%1";
2244 return "vupkls<VU_char> %0,%1";
2246 [(set_attr "type" "vecperm")])
2248 (define_insn "*altivec_vupkhs<VU_char>_direct"
2249 [(set (match_operand:VP 0 "register_operand" "=v")
2250 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2251 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2253 "vupkhs<VU_char> %0,%1"
2254 [(set_attr "type" "vecperm")])
2256 (define_insn "altivec_vupkls<VU_char>"
2257 [(set (match_operand:VP 0 "register_operand" "=v")
2258 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2259 UNSPEC_VUNPACK_LO_SIGN))]
2262 if (VECTOR_ELT_ORDER_BIG)
2263 return "vupkls<VU_char> %0,%1";
2265 return "vupkhs<VU_char> %0,%1";
2267 [(set_attr "type" "vecperm")])
2269 (define_insn "*altivec_vupkls<VU_char>_direct"
2270 [(set (match_operand:VP 0 "register_operand" "=v")
2271 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2272 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2274 "vupkls<VU_char> %0,%1"
2275 [(set_attr "type" "vecperm")])
2277 (define_insn "altivec_vupkhpx"
2278 [(set (match_operand:V4SI 0 "register_operand" "=v")
2279 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2283 if (VECTOR_ELT_ORDER_BIG)
2284 return "vupkhpx %0,%1";
2286 return "vupklpx %0,%1";
2288 [(set_attr "type" "vecperm")])
2290 (define_insn "altivec_vupklpx"
2291 [(set (match_operand:V4SI 0 "register_operand" "=v")
2292 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2296 if (VECTOR_ELT_ORDER_BIG)
2297 return "vupklpx %0,%1";
2299 return "vupkhpx %0,%1";
2301 [(set_attr "type" "vecperm")])
2303 ;; Compare vectors producing a vector result and a predicate, setting CR6 to
2304 ;; indicate a combined status
2305 (define_insn "*altivec_vcmpequ<VI_char>_p"
2306 [(set (reg:CC CR6_REGNO)
2307 (unspec:CC [(eq:CC (match_operand:VI2 1 "register_operand" "v")
2308 (match_operand:VI2 2 "register_operand" "v"))]
2310 (set (match_operand:VI2 0 "register_operand" "=v")
2311 (eq:VI2 (match_dup 1)
2314 "vcmpequ<VI_char>. %0,%1,%2"
2315 [(set_attr "type" "veccmpfx")])
2317 (define_insn "*altivec_vcmpgts<VI_char>_p"
2318 [(set (reg:CC CR6_REGNO)
2319 (unspec:CC [(gt:CC (match_operand:VI2 1 "register_operand" "v")
2320 (match_operand:VI2 2 "register_operand" "v"))]
2322 (set (match_operand:VI2 0 "register_operand" "=v")
2323 (gt:VI2 (match_dup 1)
2326 "vcmpgts<VI_char>. %0,%1,%2"
2327 [(set_attr "type" "veccmpfx")])
2329 (define_insn "*altivec_vcmpgtu<VI_char>_p"
2330 [(set (reg:CC CR6_REGNO)
2331 (unspec:CC [(gtu:CC (match_operand:VI2 1 "register_operand" "v")
2332 (match_operand:VI2 2 "register_operand" "v"))]
2334 (set (match_operand:VI2 0 "register_operand" "=v")
2335 (gtu:VI2 (match_dup 1)
2338 "vcmpgtu<VI_char>. %0,%1,%2"
2339 [(set_attr "type" "veccmpfx")])
2341 (define_insn "*altivec_vcmpeqfp_p"
2342 [(set (reg:CC CR6_REGNO)
2343 (unspec:CC [(eq:CC (match_operand:V4SF 1 "register_operand" "v")
2344 (match_operand:V4SF 2 "register_operand" "v"))]
2346 (set (match_operand:V4SF 0 "register_operand" "=v")
2347 (eq:V4SF (match_dup 1)
2349 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2350 "vcmpeqfp. %0,%1,%2"
2351 [(set_attr "type" "veccmp")])
2353 (define_insn "*altivec_vcmpgtfp_p"
2354 [(set (reg:CC CR6_REGNO)
2355 (unspec:CC [(gt:CC (match_operand:V4SF 1 "register_operand" "v")
2356 (match_operand:V4SF 2 "register_operand" "v"))]
2358 (set (match_operand:V4SF 0 "register_operand" "=v")
2359 (gt:V4SF (match_dup 1)
2361 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2362 "vcmpgtfp. %0,%1,%2"
2363 [(set_attr "type" "veccmp")])
2365 (define_insn "*altivec_vcmpgefp_p"
2366 [(set (reg:CC CR6_REGNO)
2367 (unspec:CC [(ge:CC (match_operand:V4SF 1 "register_operand" "v")
2368 (match_operand:V4SF 2 "register_operand" "v"))]
2370 (set (match_operand:V4SF 0 "register_operand" "=v")
2371 (ge:V4SF (match_dup 1)
2373 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2374 "vcmpgefp. %0,%1,%2"
2375 [(set_attr "type" "veccmp")])
2377 (define_insn "altivec_vcmpbfp_p"
2378 [(set (reg:CC CR6_REGNO)
2379 (unspec:CC [(match_operand:V4SF 1 "register_operand" "v")
2380 (match_operand:V4SF 2 "register_operand" "v")]
2382 (set (match_operand:V4SF 0 "register_operand" "=v")
2383 (unspec:V4SF [(match_dup 1)
2386 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode)"
2388 [(set_attr "type" "veccmp")])
2390 (define_insn "altivec_mtvscr"
2391 [(set (reg:SI VSCR_REGNO)
2393 [(match_operand:V4SI 0 "register_operand" "v")] UNSPECV_MTVSCR))]
2396 [(set_attr "type" "vecsimple")])
2398 (define_insn "altivec_mfvscr"
2399 [(set (match_operand:V8HI 0 "register_operand" "=v")
2400 (unspec_volatile:V8HI [(reg:SI VSCR_REGNO)] UNSPECV_MFVSCR))]
2403 [(set_attr "type" "vecsimple")])
2405 (define_insn "altivec_dssall"
2406 [(unspec_volatile [(const_int 0)] UNSPECV_DSSALL)]
2409 [(set_attr "type" "vecsimple")])
2411 (define_insn "altivec_dss"
2412 [(unspec_volatile [(match_operand:QI 0 "immediate_operand" "i")]
2416 [(set_attr "type" "vecsimple")])
2418 (define_insn "altivec_dst"
2419 [(unspec [(match_operand 0 "register_operand" "b")
2420 (match_operand:SI 1 "register_operand" "r")
2421 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DST)]
2422 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2424 [(set_attr "type" "vecsimple")])
2426 (define_insn "altivec_dstt"
2427 [(unspec [(match_operand 0 "register_operand" "b")
2428 (match_operand:SI 1 "register_operand" "r")
2429 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTT)]
2430 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2432 [(set_attr "type" "vecsimple")])
2434 (define_insn "altivec_dstst"
2435 [(unspec [(match_operand 0 "register_operand" "b")
2436 (match_operand:SI 1 "register_operand" "r")
2437 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTST)]
2438 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2440 [(set_attr "type" "vecsimple")])
2442 (define_insn "altivec_dststt"
2443 [(unspec [(match_operand 0 "register_operand" "b")
2444 (match_operand:SI 1 "register_operand" "r")
2445 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTSTT)]
2446 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2448 [(set_attr "type" "vecsimple")])
2450 (define_expand "altivec_lvsl"
2451 [(use (match_operand:V16QI 0 "register_operand" ""))
2452 (use (match_operand:V16QI 1 "memory_operand" ""))]
2455 if (VECTOR_ELT_ORDER_BIG)
2456 emit_insn (gen_altivec_lvsl_direct (operands[0], operands[1]));
2460 rtx mask, perm[16], constv, vperm;
2461 mask = gen_reg_rtx (V16QImode);
2462 emit_insn (gen_altivec_lvsl_direct (mask, operands[1]));
2463 for (i = 0; i < 16; ++i)
2464 perm[i] = GEN_INT (i);
2465 constv = gen_rtx_CONST_VECTOR (V16QImode, gen_rtvec_v (16, perm));
2466 constv = force_reg (V16QImode, constv);
2467 vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2469 emit_insn (gen_rtx_SET (operands[0], vperm));
2474 (define_insn "altivec_lvsl_direct"
2475 [(set (match_operand:V16QI 0 "register_operand" "=v")
2476 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2480 [(set_attr "type" "vecload")])
2482 (define_expand "altivec_lvsr"
2483 [(use (match_operand:V16QI 0 "register_operand" ""))
2484 (use (match_operand:V16QI 1 "memory_operand" ""))]
2487 if (VECTOR_ELT_ORDER_BIG)
2488 emit_insn (gen_altivec_lvsr_direct (operands[0], operands[1]));
2492 rtx mask, perm[16], constv, vperm;
2493 mask = gen_reg_rtx (V16QImode);
2494 emit_insn (gen_altivec_lvsr_direct (mask, operands[1]));
2495 for (i = 0; i < 16; ++i)
2496 perm[i] = GEN_INT (i);
2497 constv = gen_rtx_CONST_VECTOR (V16QImode, gen_rtvec_v (16, perm));
2498 constv = force_reg (V16QImode, constv);
2499 vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2501 emit_insn (gen_rtx_SET (operands[0], vperm));
2506 (define_insn "altivec_lvsr_direct"
2507 [(set (match_operand:V16QI 0 "register_operand" "=v")
2508 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2512 [(set_attr "type" "vecload")])
2514 (define_expand "build_vector_mask_for_load"
2515 [(set (match_operand:V16QI 0 "register_operand" "")
2516 (unspec:V16QI [(match_operand 1 "memory_operand" "")] UNSPEC_LVSR))]
2523 gcc_assert (GET_CODE (operands[1]) == MEM);
2525 addr = XEXP (operands[1], 0);
2526 temp = gen_reg_rtx (GET_MODE (addr));
2527 emit_insn (gen_rtx_SET (temp, gen_rtx_NEG (GET_MODE (addr), addr)));
2528 emit_insn (gen_altivec_lvsr (operands[0],
2529 replace_equiv_address (operands[1], temp)));
2533 ;; Parallel some of the LVE* and STV*'s with unspecs because some have
2534 ;; identical rtl but different instructions-- and gcc gets confused.
2536 (define_expand "altivec_lve<VI_char>x"
2538 [(set (match_operand:VI 0 "register_operand" "=v")
2539 (match_operand:VI 1 "memory_operand" "Z"))
2540 (unspec [(const_int 0)] UNSPEC_LVE)])]
2543 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2545 altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_LVE);
2550 (define_insn "*altivec_lve<VI_char>x_internal"
2552 [(set (match_operand:VI 0 "register_operand" "=v")
2553 (match_operand:VI 1 "memory_operand" "Z"))
2554 (unspec [(const_int 0)] UNSPEC_LVE)])]
2556 "lve<VI_char>x %0,%y1"
2557 [(set_attr "type" "vecload")])
2559 (define_insn "*altivec_lvesfx"
2561 [(set (match_operand:V4SF 0 "register_operand" "=v")
2562 (match_operand:V4SF 1 "memory_operand" "Z"))
2563 (unspec [(const_int 0)] UNSPEC_LVE)])]
2566 [(set_attr "type" "vecload")])
2568 (define_expand "altivec_lvxl_<mode>"
2570 [(set (match_operand:VM2 0 "register_operand" "=v")
2571 (match_operand:VM2 1 "memory_operand" "Z"))
2572 (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
2575 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2577 altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_SET_VSCR);
2582 (define_insn "*altivec_lvxl_<mode>_internal"
2584 [(set (match_operand:VM2 0 "register_operand" "=v")
2585 (match_operand:VM2 1 "memory_operand" "Z"))
2586 (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
2589 [(set_attr "type" "vecload")])
2591 ; This version of lvx is used only in cases where we need to force an lvx
2592 ; over any other load, and we don't care about losing CSE opportunities.
2593 ; Its primary use is for prologue register saves.
2594 (define_insn "altivec_lvx_<mode>_internal"
2596 [(set (match_operand:VM2 0 "register_operand" "=v")
2597 (match_operand:VM2 1 "memory_operand" "Z"))
2598 (unspec [(const_int 0)] UNSPEC_LVX)])]
2601 [(set_attr "type" "vecload")])
2603 ; The next two patterns embody what lvx should usually look like.
2604 (define_insn "altivec_lvx_<mode>_2op"
2605 [(set (match_operand:VM2 0 "register_operand" "=v")
2606 (mem:VM2 (and:DI (plus:DI (match_operand:DI 1 "register_operand" "b")
2607 (match_operand:DI 2 "register_operand" "r"))
2609 "TARGET_ALTIVEC && TARGET_64BIT"
2611 [(set_attr "type" "vecload")])
2613 (define_insn "altivec_lvx_<mode>_1op"
2614 [(set (match_operand:VM2 0 "register_operand" "=v")
2615 (mem:VM2 (and:DI (match_operand:DI 1 "register_operand" "r")
2617 "TARGET_ALTIVEC && TARGET_64BIT"
2619 [(set_attr "type" "vecload")])
2621 ; 32-bit versions of the above.
2622 (define_insn "altivec_lvx_<mode>_2op_si"
2623 [(set (match_operand:VM2 0 "register_operand" "=v")
2624 (mem:VM2 (and:SI (plus:SI (match_operand:SI 1 "register_operand" "b")
2625 (match_operand:SI 2 "register_operand" "r"))
2627 "TARGET_ALTIVEC && TARGET_32BIT"
2629 [(set_attr "type" "vecload")])
2631 (define_insn "altivec_lvx_<mode>_1op_si"
2632 [(set (match_operand:VM2 0 "register_operand" "=v")
2633 (mem:VM2 (and:SI (match_operand:SI 1 "register_operand" "r")
2635 "TARGET_ALTIVEC && TARGET_32BIT"
2637 [(set_attr "type" "vecload")])
2639 ; This version of stvx is used only in cases where we need to force an stvx
2640 ; over any other store, and we don't care about losing CSE opportunities.
2641 ; Its primary use is for epilogue register restores.
2642 (define_insn "altivec_stvx_<mode>_internal"
2644 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2645 (match_operand:VM2 1 "register_operand" "v"))
2646 (unspec [(const_int 0)] UNSPEC_STVX)])]
2649 [(set_attr "type" "vecstore")])
2651 ; The next two patterns embody what stvx should usually look like.
2652 (define_insn "altivec_stvx_<mode>_2op"
2653 [(set (mem:VM2 (and:DI (plus:DI (match_operand:DI 1 "register_operand" "b")
2654 (match_operand:DI 2 "register_operand" "r"))
2656 (match_operand:VM2 0 "register_operand" "v"))]
2657 "TARGET_ALTIVEC && TARGET_64BIT"
2659 [(set_attr "type" "vecstore")])
2661 (define_insn "altivec_stvx_<mode>_1op"
2662 [(set (mem:VM2 (and:DI (match_operand:DI 1 "register_operand" "r")
2664 (match_operand:VM2 0 "register_operand" "v"))]
2665 "TARGET_ALTIVEC && TARGET_64BIT"
2667 [(set_attr "type" "vecstore")])
2669 ; 32-bit versions of the above.
2670 (define_insn "altivec_stvx_<mode>_2op_si"
2671 [(set (mem:VM2 (and:SI (plus:SI (match_operand:SI 1 "register_operand" "b")
2672 (match_operand:SI 2 "register_operand" "r"))
2674 (match_operand:VM2 0 "register_operand" "v"))]
2675 "TARGET_ALTIVEC && TARGET_32BIT"
2677 [(set_attr "type" "vecstore")])
2679 (define_insn "altivec_stvx_<mode>_1op_si"
2680 [(set (mem:VM2 (and:SI (match_operand:SI 1 "register_operand" "r")
2682 (match_operand:VM2 0 "register_operand" "v"))]
2683 "TARGET_ALTIVEC && TARGET_32BIT"
2685 [(set_attr "type" "vecstore")])
2687 (define_expand "altivec_stvxl_<mode>"
2689 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2690 (match_operand:VM2 1 "register_operand" "v"))
2691 (unspec [(const_int 0)] UNSPEC_STVXL)])]
2694 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2696 altivec_expand_stvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVXL);
2701 (define_insn "*altivec_stvxl_<mode>_internal"
2703 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2704 (match_operand:VM2 1 "register_operand" "v"))
2705 (unspec [(const_int 0)] UNSPEC_STVXL)])]
2708 [(set_attr "type" "vecstore")])
2710 (define_expand "altivec_stve<VI_char>x"
2711 [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
2712 (unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
2715 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2717 altivec_expand_stvex_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVE);
2722 (define_insn "*altivec_stve<VI_char>x_internal"
2723 [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
2724 (unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
2726 "stve<VI_char>x %1,%y0"
2727 [(set_attr "type" "vecstore")])
2729 (define_insn "*altivec_stvesfx"
2730 [(set (match_operand:SF 0 "memory_operand" "=Z")
2731 (unspec:SF [(match_operand:V4SF 1 "register_operand" "v")] UNSPEC_STVE))]
2734 [(set_attr "type" "vecstore")])
2737 ;; xxlxor/vxor SCRATCH0,SCRATCH0,SCRATCH0
2738 ;; vsubu?m SCRATCH2,SCRATCH1,%1
2739 ;; vmaxs? %0,%1,SCRATCH2"
2740 (define_expand "abs<mode>2"
2741 [(set (match_dup 2) (match_dup 3))
2743 (minus:VI2 (match_dup 2)
2744 (match_operand:VI2 1 "register_operand" "v")))
2745 (set (match_operand:VI2 0 "register_operand" "=v")
2746 (smax:VI2 (match_dup 1) (match_dup 4)))]
2749 operands[2] = gen_reg_rtx (<MODE>mode);
2750 operands[3] = CONST0_RTX (<MODE>mode);
2751 operands[4] = gen_reg_rtx (<MODE>mode);
2755 ;; vspltisw SCRATCH1,0
2756 ;; vsubu?m SCRATCH2,SCRATCH1,%1
2757 ;; vmins? %0,%1,SCRATCH2"
2758 (define_expand "nabs<mode>2"
2759 [(set (match_dup 2) (match_dup 3))
2761 (minus:VI2 (match_dup 2)
2762 (match_operand:VI2 1 "register_operand" "v")))
2763 (set (match_operand:VI2 0 "register_operand" "=v")
2764 (smin:VI2 (match_dup 1) (match_dup 4)))]
2767 operands[2] = gen_reg_rtx (<MODE>mode);
2768 operands[3] = CONST0_RTX (<MODE>mode);
2769 operands[4] = gen_reg_rtx (<MODE>mode);
2773 ;; vspltisw SCRATCH1,-1
2774 ;; vslw SCRATCH2,SCRATCH1,SCRATCH1
2775 ;; vandc %0,%1,SCRATCH2
2776 (define_expand "altivec_absv4sf2"
2778 (vec_duplicate:V4SI (const_int -1)))
2780 (ashift:V4SI (match_dup 2) (match_dup 2)))
2781 (set (match_operand:V4SF 0 "register_operand" "=v")
2782 (and:V4SF (not:V4SF (subreg:V4SF (match_dup 3) 0))
2783 (match_operand:V4SF 1 "register_operand" "v")))]
2786 operands[2] = gen_reg_rtx (V4SImode);
2787 operands[3] = gen_reg_rtx (V4SImode);
2791 ;; vspltis? SCRATCH0,0
2792 ;; vsubs?s SCRATCH2,SCRATCH1,%1
2793 ;; vmaxs? %0,%1,SCRATCH2"
2794 (define_expand "altivec_abss_<mode>"
2795 [(set (match_dup 2) (vec_duplicate:VI (const_int 0)))
2796 (parallel [(set (match_dup 3)
2797 (unspec:VI [(match_dup 2)
2798 (match_operand:VI 1 "register_operand" "v")]
2800 (set (reg:SI VSCR_REGNO)
2801 (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))])
2802 (set (match_operand:VI 0 "register_operand" "=v")
2803 (smax:VI (match_dup 1) (match_dup 3)))]
2806 operands[2] = gen_reg_rtx (GET_MODE (operands[0]));
2807 operands[3] = gen_reg_rtx (GET_MODE (operands[0]));
2810 (define_expand "reduc_plus_scal_<mode>"
2811 [(set (match_operand:<VI_scalar> 0 "register_operand" "=v")
2812 (unspec:VIshort [(match_operand:VIshort 1 "register_operand" "v")]
2813 UNSPEC_REDUC_PLUS))]
2816 rtx vzero = gen_reg_rtx (V4SImode);
2817 rtx vtmp1 = gen_reg_rtx (V4SImode);
2818 rtx vtmp2 = gen_reg_rtx (<MODE>mode);
2819 rtx dest = gen_lowpart (V4SImode, vtmp2);
2820 int elt = VECTOR_ELT_ORDER_BIG ? GET_MODE_NUNITS (<MODE>mode) - 1 : 0;
2822 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2823 emit_insn (gen_altivec_vsum4s<VI_char>s (vtmp1, operands[1], vzero));
2824 emit_insn (gen_altivec_vsumsws_direct (dest, vtmp1, vzero));
2825 rs6000_expand_vector_extract (operands[0], vtmp2, GEN_INT (elt));
2829 (define_insn "*p9_neg<mode>2"
2830 [(set (match_operand:VNEG 0 "altivec_register_operand" "=v")
2831 (neg:VNEG (match_operand:VNEG 1 "altivec_register_operand" "v")))]
2833 "vneg<VI_char> %0,%1"
2834 [(set_attr "type" "vecsimple")])
2836 (define_expand "neg<mode>2"
2837 [(set (match_operand:VI2 0 "register_operand" "")
2838 (neg:VI2 (match_operand:VI2 1 "register_operand" "")))]
2841 if (!TARGET_P9_VECTOR || (<MODE>mode != V4SImode && <MODE>mode != V2DImode))
2845 vzero = gen_reg_rtx (GET_MODE (operands[0]));
2846 emit_move_insn (vzero, CONST0_RTX (<MODE>mode));
2847 emit_insn (gen_sub<mode>3 (operands[0], vzero, operands[1]));
2852 (define_expand "udot_prod<mode>"
2853 [(set (match_operand:V4SI 0 "register_operand" "=v")
2854 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
2855 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
2856 (match_operand:VIshort 2 "register_operand" "v")]
2861 emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], operands[2], operands[3]));
2865 (define_expand "sdot_prodv8hi"
2866 [(set (match_operand:V4SI 0 "register_operand" "=v")
2867 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
2868 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
2869 (match_operand:V8HI 2 "register_operand" "v")]
2874 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], operands[2], operands[3]));
2878 (define_expand "widen_usum<mode>3"
2879 [(set (match_operand:V4SI 0 "register_operand" "=v")
2880 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2881 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")]
2886 rtx vones = gen_reg_rtx (GET_MODE (operands[1]));
2888 emit_insn (gen_altivec_vspltis<VI_char> (vones, const1_rtx));
2889 emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], vones, operands[2]));
2893 (define_expand "widen_ssumv16qi3"
2894 [(set (match_operand:V4SI 0 "register_operand" "=v")
2895 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2896 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")]
2901 rtx vones = gen_reg_rtx (V16QImode);
2903 emit_insn (gen_altivec_vspltisb (vones, const1_rtx));
2904 emit_insn (gen_altivec_vmsummbm (operands[0], operands[1], vones, operands[2]));
2908 (define_expand "widen_ssumv8hi3"
2909 [(set (match_operand:V4SI 0 "register_operand" "=v")
2910 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2911 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2916 rtx vones = gen_reg_rtx (V8HImode);
2918 emit_insn (gen_altivec_vspltish (vones, const1_rtx));
2919 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], vones, operands[2]));
2923 (define_expand "vec_unpacks_hi_<VP_small_lc>"
2924 [(set (match_operand:VP 0 "register_operand" "=v")
2925 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2926 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2930 (define_expand "vec_unpacks_lo_<VP_small_lc>"
2931 [(set (match_operand:VP 0 "register_operand" "=v")
2932 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2933 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2937 (define_insn "vperm_v8hiv4si"
2938 [(set (match_operand:V4SI 0 "register_operand" "=v,?wo")
2939 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v,wo")
2940 (match_operand:V4SI 2 "register_operand" "v,0")
2941 (match_operand:V16QI 3 "register_operand" "v,wo")]
2947 [(set_attr "type" "vecperm")
2948 (set_attr "length" "4")])
2950 (define_insn "vperm_v16qiv8hi"
2951 [(set (match_operand:V8HI 0 "register_operand" "=v,?wo")
2952 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v,wo")
2953 (match_operand:V8HI 2 "register_operand" "v,0")
2954 (match_operand:V16QI 3 "register_operand" "v,wo")]
2960 [(set_attr "type" "vecperm")
2961 (set_attr "length" "4")])
2964 (define_expand "vec_unpacku_hi_v16qi"
2965 [(set (match_operand:V8HI 0 "register_operand" "=v")
2966 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
2971 rtx vzero = gen_reg_rtx (V8HImode);
2972 rtx mask = gen_reg_rtx (V16QImode);
2973 rtvec v = rtvec_alloc (16);
2974 bool be = BYTES_BIG_ENDIAN;
2976 emit_insn (gen_altivec_vspltish (vzero, const0_rtx));
2978 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 7);
2979 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 0 : 16);
2980 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 16 : 6);
2981 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 1 : 16);
2982 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 5);
2983 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 2 : 16);
2984 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 16 : 4);
2985 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 3 : 16);
2986 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 3);
2987 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 4 : 16);
2988 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 16 : 2);
2989 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 5 : 16);
2990 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 1);
2991 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 6 : 16);
2992 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 16 : 0);
2993 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 7 : 16);
2995 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2996 emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask));
3000 (define_expand "vec_unpacku_hi_v8hi"
3001 [(set (match_operand:V4SI 0 "register_operand" "=v")
3002 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
3007 rtx vzero = gen_reg_rtx (V4SImode);
3008 rtx mask = gen_reg_rtx (V16QImode);
3009 rtvec v = rtvec_alloc (16);
3010 bool be = BYTES_BIG_ENDIAN;
3012 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
3014 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 7);
3015 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 17 : 6);
3016 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 0 : 17);
3017 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 1 : 16);
3018 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 5);
3019 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 17 : 4);
3020 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 2 : 17);
3021 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 3 : 16);
3022 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 3);
3023 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 17 : 2);
3024 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 4 : 17);
3025 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 5 : 16);
3026 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 1);
3027 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 17 : 0);
3028 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 6 : 17);
3029 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 7 : 16);
3031 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3032 emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask));
3036 (define_expand "vec_unpacku_lo_v16qi"
3037 [(set (match_operand:V8HI 0 "register_operand" "=v")
3038 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
3043 rtx vzero = gen_reg_rtx (V8HImode);
3044 rtx mask = gen_reg_rtx (V16QImode);
3045 rtvec v = rtvec_alloc (16);
3046 bool be = BYTES_BIG_ENDIAN;
3048 emit_insn (gen_altivec_vspltish (vzero, const0_rtx));
3050 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 15);
3051 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 8 : 16);
3052 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 16 : 14);
3053 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 9 : 16);
3054 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 13);
3055 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 10 : 16);
3056 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 16 : 12);
3057 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 11 : 16);
3058 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 11);
3059 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 12 : 16);
3060 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 16 : 10);
3061 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 13 : 16);
3062 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 9);
3063 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 14 : 16);
3064 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 16 : 8);
3065 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 15 : 16);
3067 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3068 emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask));
3072 (define_expand "vec_unpacku_lo_v8hi"
3073 [(set (match_operand:V4SI 0 "register_operand" "=v")
3074 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
3079 rtx vzero = gen_reg_rtx (V4SImode);
3080 rtx mask = gen_reg_rtx (V16QImode);
3081 rtvec v = rtvec_alloc (16);
3082 bool be = BYTES_BIG_ENDIAN;
3084 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
3086 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 15);
3087 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 17 : 14);
3088 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 8 : 17);
3089 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 9 : 16);
3090 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 13);
3091 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 17 : 12);
3092 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 10 : 17);
3093 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 11 : 16);
3094 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 11);
3095 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 17 : 10);
3096 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 12 : 17);
3097 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 13 : 16);
3098 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 9);
3099 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 17 : 8);
3100 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 14 : 17);
3101 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 15 : 16);
3103 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3104 emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask));
3108 (define_expand "vec_widen_umult_hi_v16qi"
3109 [(set (match_operand:V8HI 0 "register_operand" "=v")
3110 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3111 (match_operand:V16QI 2 "register_operand" "v")]
3116 rtx ve = gen_reg_rtx (V8HImode);
3117 rtx vo = gen_reg_rtx (V8HImode);
3119 if (BYTES_BIG_ENDIAN)
3121 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
3122 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
3123 emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
3127 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3128 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3129 emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
3134 (define_expand "vec_widen_umult_lo_v16qi"
3135 [(set (match_operand:V8HI 0 "register_operand" "=v")
3136 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3137 (match_operand:V16QI 2 "register_operand" "v")]
3142 rtx ve = gen_reg_rtx (V8HImode);
3143 rtx vo = gen_reg_rtx (V8HImode);
3145 if (BYTES_BIG_ENDIAN)
3147 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
3148 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
3149 emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
3153 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3154 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3155 emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
3160 (define_expand "vec_widen_smult_hi_v16qi"
3161 [(set (match_operand:V8HI 0 "register_operand" "=v")
3162 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3163 (match_operand:V16QI 2 "register_operand" "v")]
3168 rtx ve = gen_reg_rtx (V8HImode);
3169 rtx vo = gen_reg_rtx (V8HImode);
3171 if (BYTES_BIG_ENDIAN)
3173 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3174 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3175 emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
3179 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3180 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3181 emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
3186 (define_expand "vec_widen_smult_lo_v16qi"
3187 [(set (match_operand:V8HI 0 "register_operand" "=v")
3188 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3189 (match_operand:V16QI 2 "register_operand" "v")]
3194 rtx ve = gen_reg_rtx (V8HImode);
3195 rtx vo = gen_reg_rtx (V8HImode);
3197 if (BYTES_BIG_ENDIAN)
3199 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3200 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3201 emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
3205 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3206 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3207 emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
3212 (define_expand "vec_widen_umult_hi_v8hi"
3213 [(set (match_operand:V4SI 0 "register_operand" "=v")
3214 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3215 (match_operand:V8HI 2 "register_operand" "v")]
3220 rtx ve = gen_reg_rtx (V4SImode);
3221 rtx vo = gen_reg_rtx (V4SImode);
3223 if (BYTES_BIG_ENDIAN)
3225 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3226 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3227 emit_insn (gen_altivec_vmrghw_direct (operands[0], ve, vo));
3231 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3232 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3233 emit_insn (gen_altivec_vmrghw_direct (operands[0], vo, ve));
3238 (define_expand "vec_widen_umult_lo_v8hi"
3239 [(set (match_operand:V4SI 0 "register_operand" "=v")
3240 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3241 (match_operand:V8HI 2 "register_operand" "v")]
3246 rtx ve = gen_reg_rtx (V4SImode);
3247 rtx vo = gen_reg_rtx (V4SImode);
3249 if (BYTES_BIG_ENDIAN)
3251 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3252 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3253 emit_insn (gen_altivec_vmrglw_direct (operands[0], ve, vo));
3257 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3258 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3259 emit_insn (gen_altivec_vmrglw_direct (operands[0], vo, ve));
3264 (define_expand "vec_widen_smult_hi_v8hi"
3265 [(set (match_operand:V4SI 0 "register_operand" "=v")
3266 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3267 (match_operand:V8HI 2 "register_operand" "v")]
3272 rtx ve = gen_reg_rtx (V4SImode);
3273 rtx vo = gen_reg_rtx (V4SImode);
3275 if (BYTES_BIG_ENDIAN)
3277 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3278 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3279 emit_insn (gen_altivec_vmrghw_direct (operands[0], ve, vo));
3283 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3284 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3285 emit_insn (gen_altivec_vmrghw_direct (operands[0], vo, ve));
3290 (define_expand "vec_widen_smult_lo_v8hi"
3291 [(set (match_operand:V4SI 0 "register_operand" "=v")
3292 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3293 (match_operand:V8HI 2 "register_operand" "v")]
3298 rtx ve = gen_reg_rtx (V4SImode);
3299 rtx vo = gen_reg_rtx (V4SImode);
3301 if (BYTES_BIG_ENDIAN)
3303 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3304 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3305 emit_insn (gen_altivec_vmrglw_direct (operands[0], ve, vo));
3309 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3310 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3311 emit_insn (gen_altivec_vmrglw_direct (operands[0], vo, ve));
3316 (define_expand "vec_pack_trunc_<mode>"
3317 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
3318 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
3319 (match_operand:VP 2 "register_operand" "v")]
3320 UNSPEC_VPACK_UNS_UNS_MOD))]
3324 (define_expand "mulv16qi3"
3325 [(set (match_operand:V16QI 0 "register_operand" "=v")
3326 (mult:V16QI (match_operand:V16QI 1 "register_operand" "v")
3327 (match_operand:V16QI 2 "register_operand" "v")))]
3331 rtx even = gen_reg_rtx (V8HImode);
3332 rtx odd = gen_reg_rtx (V8HImode);
3333 rtx mask = gen_reg_rtx (V16QImode);
3334 rtvec v = rtvec_alloc (16);
3337 for (i = 0; i < 8; ++i) {
3338 RTVEC_ELT (v, 2 * i)
3339 = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 1 : 31 - 2 * i);
3340 RTVEC_ELT (v, 2 * i + 1)
3341 = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 17 : 15 - 2 * i);
3344 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3345 emit_insn (gen_altivec_vmulesb (even, operands[1], operands[2]));
3346 emit_insn (gen_altivec_vmulosb (odd, operands[1], operands[2]));
3347 emit_insn (gen_altivec_vperm_v8hiv16qi (operands[0], even, odd, mask));
3351 (define_expand "altivec_negv4sf2"
3352 [(use (match_operand:V4SF 0 "register_operand" ""))
3353 (use (match_operand:V4SF 1 "register_operand" ""))]
3359 /* Generate [-0.0, -0.0, -0.0, -0.0]. */
3360 neg0 = gen_reg_rtx (V4SImode);
3361 emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
3362 emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
3365 emit_insn (gen_xorv4sf3 (operands[0],
3366 gen_lowpart (V4SFmode, neg0), operands[1]));
3371 ;; Vector SIMD PEM v2.06c defines LVLX, LVLXL, LVRX, LVRXL,
3372 ;; STVLX, STVLXL, STVVRX, STVRXL are available only on Cell.
3373 (define_insn "altivec_lvlx"
3374 [(set (match_operand:V16QI 0 "register_operand" "=v")
3375 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3377 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3379 [(set_attr "type" "vecload")])
3381 (define_insn "altivec_lvlxl"
3382 [(set (match_operand:V16QI 0 "register_operand" "=v")
3383 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3385 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3387 [(set_attr "type" "vecload")])
3389 (define_insn "altivec_lvrx"
3390 [(set (match_operand:V16QI 0 "register_operand" "=v")
3391 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3393 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3395 [(set_attr "type" "vecload")])
3397 (define_insn "altivec_lvrxl"
3398 [(set (match_operand:V16QI 0 "register_operand" "=v")
3399 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3401 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3403 [(set_attr "type" "vecload")])
3405 (define_insn "altivec_stvlx"
3407 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3408 (match_operand:V16QI 1 "register_operand" "v"))
3409 (unspec [(const_int 0)] UNSPEC_STVLX)])]
3410 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3412 [(set_attr "type" "vecstore")])
3414 (define_insn "altivec_stvlxl"
3416 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3417 (match_operand:V16QI 1 "register_operand" "v"))
3418 (unspec [(const_int 0)] UNSPEC_STVLXL)])]
3419 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3421 [(set_attr "type" "vecstore")])
3423 (define_insn "altivec_stvrx"
3425 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3426 (match_operand:V16QI 1 "register_operand" "v"))
3427 (unspec [(const_int 0)] UNSPEC_STVRX)])]
3428 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3430 [(set_attr "type" "vecstore")])
3432 (define_insn "altivec_stvrxl"
3434 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3435 (match_operand:V16QI 1 "register_operand" "v"))
3436 (unspec [(const_int 0)] UNSPEC_STVRXL)])]
3437 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3439 [(set_attr "type" "vecstore")])
3441 (define_expand "vec_unpacks_float_hi_v8hi"
3442 [(set (match_operand:V4SF 0 "register_operand" "")
3443 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3444 UNSPEC_VUPKHS_V4SF))]
3448 rtx tmp = gen_reg_rtx (V4SImode);
3450 emit_insn (gen_vec_unpacks_hi_v8hi (tmp, operands[1]));
3451 emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
3455 (define_expand "vec_unpacks_float_lo_v8hi"
3456 [(set (match_operand:V4SF 0 "register_operand" "")
3457 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3458 UNSPEC_VUPKLS_V4SF))]
3462 rtx tmp = gen_reg_rtx (V4SImode);
3464 emit_insn (gen_vec_unpacks_lo_v8hi (tmp, operands[1]));
3465 emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
3469 (define_expand "vec_unpacku_float_hi_v8hi"
3470 [(set (match_operand:V4SF 0 "register_operand" "")
3471 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3472 UNSPEC_VUPKHU_V4SF))]
3476 rtx tmp = gen_reg_rtx (V4SImode);
3478 emit_insn (gen_vec_unpacku_hi_v8hi (tmp, operands[1]));
3479 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
3483 (define_expand "vec_unpacku_float_lo_v8hi"
3484 [(set (match_operand:V4SF 0 "register_operand" "")
3485 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3486 UNSPEC_VUPKLU_V4SF))]
3490 rtx tmp = gen_reg_rtx (V4SImode);
3492 emit_insn (gen_vec_unpacku_lo_v8hi (tmp, operands[1]));
3493 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
3498 ;; Power8/power9 vector instructions encoded as Altivec instructions
3500 ;; Vector count leading zeros
3501 (define_insn "*p8v_clz<mode>2"
3502 [(set (match_operand:VI2 0 "register_operand" "=v")
3503 (clz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
3506 [(set_attr "length" "4")
3507 (set_attr "type" "vecsimple")])
3509 ;; Vector absolute difference unsigned
3510 (define_expand "vadu<mode>3"
3511 [(set (match_operand:VI 0 "register_operand")
3512 (unspec:VI [(match_operand:VI 1 "register_operand")
3513 (match_operand:VI 2 "register_operand")]
3517 ;; Vector absolute difference unsigned
3518 (define_insn "*p9_vadu<mode>3"
3519 [(set (match_operand:VI 0 "register_operand" "=v")
3520 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
3521 (match_operand:VI 2 "register_operand" "v")]
3524 "vabsdu<wd> %0,%1,%2"
3525 [(set_attr "type" "vecsimple")])
3527 ;; Vector count trailing zeros
3528 (define_insn "*p9v_ctz<mode>2"
3529 [(set (match_operand:VI2 0 "register_operand" "=v")
3530 (ctz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
3533 [(set_attr "length" "4")
3534 (set_attr "type" "vecsimple")])
3536 ;; Vector population count
3537 (define_insn "*p8v_popcount<mode>2"
3538 [(set (match_operand:VI2 0 "register_operand" "=v")
3539 (popcount:VI2 (match_operand:VI2 1 "register_operand" "v")))]
3542 [(set_attr "length" "4")
3543 (set_attr "type" "vecsimple")])
3546 (define_insn "*p9v_parity<mode>2"
3547 [(set (match_operand:VParity 0 "register_operand" "=v")
3548 (parity:VParity (match_operand:VParity 1 "register_operand" "v")))]
3551 [(set_attr "length" "4")
3552 (set_attr "type" "vecsimple")])
3554 ;; Vector Gather Bits by Bytes by Doubleword
3555 (define_insn "p8v_vgbbd"
3556 [(set (match_operand:V16QI 0 "register_operand" "=v")
3557 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")]
3561 [(set_attr "length" "4")
3562 (set_attr "type" "vecsimple")])
3565 ;; 128-bit binary integer arithmetic
3566 ;; We have a special container type (V1TImode) to allow operations using the
3567 ;; ISA 2.07 128-bit binary support to target the VMX/altivec registers without
3568 ;; having to worry about the register allocator deciding GPRs are better.
3570 (define_insn "altivec_vadduqm"
3571 [(set (match_operand:V1TI 0 "register_operand" "=v")
3572 (plus:V1TI (match_operand:V1TI 1 "register_operand" "v")
3573 (match_operand:V1TI 2 "register_operand" "v")))]
3576 [(set_attr "length" "4")
3577 (set_attr "type" "vecsimple")])
3579 (define_insn "altivec_vaddcuq"
3580 [(set (match_operand:V1TI 0 "register_operand" "=v")
3581 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3582 (match_operand:V1TI 2 "register_operand" "v")]
3586 [(set_attr "length" "4")
3587 (set_attr "type" "vecsimple")])
3589 (define_insn "altivec_vsubuqm"
3590 [(set (match_operand:V1TI 0 "register_operand" "=v")
3591 (minus:V1TI (match_operand:V1TI 1 "register_operand" "v")
3592 (match_operand:V1TI 2 "register_operand" "v")))]
3595 [(set_attr "length" "4")
3596 (set_attr "type" "vecsimple")])
3598 (define_insn "altivec_vsubcuq"
3599 [(set (match_operand:V1TI 0 "register_operand" "=v")
3600 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3601 (match_operand:V1TI 2 "register_operand" "v")]
3605 [(set_attr "length" "4")
3606 (set_attr "type" "vecsimple")])
3608 (define_insn "altivec_vaddeuqm"
3609 [(set (match_operand:V1TI 0 "register_operand" "=v")
3610 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3611 (match_operand:V1TI 2 "register_operand" "v")
3612 (match_operand:V1TI 3 "register_operand" "v")]
3615 "vaddeuqm %0,%1,%2,%3"
3616 [(set_attr "length" "4")
3617 (set_attr "type" "vecsimple")])
3619 (define_insn "altivec_vaddecuq"
3620 [(set (match_operand:V1TI 0 "register_operand" "=v")
3621 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3622 (match_operand:V1TI 2 "register_operand" "v")
3623 (match_operand:V1TI 3 "register_operand" "v")]
3626 "vaddecuq %0,%1,%2,%3"
3627 [(set_attr "length" "4")
3628 (set_attr "type" "vecsimple")])
3630 (define_insn "altivec_vsubeuqm"
3631 [(set (match_operand:V1TI 0 "register_operand" "=v")
3632 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3633 (match_operand:V1TI 2 "register_operand" "v")
3634 (match_operand:V1TI 3 "register_operand" "v")]
3637 "vsubeuqm %0,%1,%2,%3"
3638 [(set_attr "length" "4")
3639 (set_attr "type" "vecsimple")])
3641 (define_insn "altivec_vsubecuq"
3642 [(set (match_operand:V1TI 0 "register_operand" "=v")
3643 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3644 (match_operand:V1TI 2 "register_operand" "v")
3645 (match_operand:V1TI 3 "register_operand" "v")]
3648 "vsubecuq %0,%1,%2,%3"
3649 [(set_attr "length" "4")
3650 (set_attr "type" "vecsimple")])
3652 ;; We use V2DI as the output type to simplify converting the permute
3653 ;; bits into an integer
3654 (define_insn "altivec_vbpermq"
3655 [(set (match_operand:V2DI 0 "register_operand" "=v")
3656 (unspec:V2DI [(match_operand:V16QI 1 "register_operand" "v")
3657 (match_operand:V16QI 2 "register_operand" "v")]
3661 [(set_attr "type" "vecperm")])
3663 ; One of the vector API interfaces requires returning vector unsigned char.
3664 (define_insn "altivec_vbpermq2"
3665 [(set (match_operand:V16QI 0 "register_operand" "=v")
3666 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
3667 (match_operand:V16QI 2 "register_operand" "v")]
3671 [(set_attr "type" "vecperm")])
3673 (define_insn "altivec_vbpermd"
3674 [(set (match_operand:V2DI 0 "register_operand" "=v")
3675 (unspec:V2DI [(match_operand:V2DI 1 "register_operand" "v")
3676 (match_operand:V16QI 2 "register_operand" "v")]
3680 [(set_attr "type" "vecsimple")])
3682 ;; Decimal Integer operations
3683 (define_int_iterator UNSPEC_BCD_ADD_SUB [UNSPEC_BCDADD UNSPEC_BCDSUB])
3685 (define_int_attr bcd_add_sub [(UNSPEC_BCDADD "add")
3686 (UNSPEC_BCDSUB "sub")])
3688 (define_code_iterator BCD_TEST [eq lt gt unordered])
3690 (define_insn "bcd<bcd_add_sub>"
3691 [(set (match_operand:V1TI 0 "gpc_reg_operand" "=v")
3692 (unspec:V1TI [(match_operand:V1TI 1 "gpc_reg_operand" "v")
3693 (match_operand:V1TI 2 "gpc_reg_operand" "v")
3694 (match_operand:QI 3 "const_0_to_1_operand" "n")]
3695 UNSPEC_BCD_ADD_SUB))
3696 (clobber (reg:CCFP CR6_REGNO))]
3698 "bcd<bcd_add_sub>. %0,%1,%2,%3"
3699 [(set_attr "length" "4")
3700 (set_attr "type" "vecsimple")])
3702 ;; Use a floating point type (V2DFmode) for the compare to set CR6 so that we
3703 ;; can use the unordered test for BCD nans and add/subtracts that overflow. An
3704 ;; UNORDERED test on an integer type (like V1TImode) is not defined. The type
3705 ;; probably should be one that can go in the VMX (Altivec) registers, so we
3706 ;; can't use DDmode or DFmode.
3707 (define_insn "*bcd<bcd_add_sub>_test"
3708 [(set (reg:CCFP CR6_REGNO)
3710 (unspec:V2DF [(match_operand:V1TI 1 "register_operand" "v")
3711 (match_operand:V1TI 2 "register_operand" "v")
3712 (match_operand:QI 3 "const_0_to_1_operand" "i")]
3714 (match_operand:V2DF 4 "zero_constant" "j")))
3715 (clobber (match_scratch:V1TI 0 "=v"))]
3717 "bcd<bcd_add_sub>. %0,%1,%2,%3"
3718 [(set_attr "length" "4")
3719 (set_attr "type" "vecsimple")])
3721 (define_insn "*bcd<bcd_add_sub>_test2"
3722 [(set (match_operand:V1TI 0 "register_operand" "=v")
3723 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3724 (match_operand:V1TI 2 "register_operand" "v")
3725 (match_operand:QI 3 "const_0_to_1_operand" "i")]
3726 UNSPEC_BCD_ADD_SUB))
3727 (set (reg:CCFP CR6_REGNO)
3729 (unspec:V2DF [(match_dup 1)
3733 (match_operand:V2DF 4 "zero_constant" "j")))]
3735 "bcd<bcd_add_sub>. %0,%1,%2,%3"
3736 [(set_attr "length" "4")
3737 (set_attr "type" "vecsimple")])
3739 (define_insn "darn_32"
3740 [(set (match_operand:SI 0 "register_operand" "=r")
3741 (unspec:SI [(const_int 0)] UNSPEC_DARN_32))]
3744 [(set_attr "type" "integer")])
3746 (define_insn "darn_raw"
3747 [(set (match_operand:DI 0 "register_operand" "=r")
3748 (unspec:DI [(const_int 0)] UNSPEC_DARN_RAW))]
3749 "TARGET_P9_MISC && TARGET_64BIT"
3751 [(set_attr "type" "integer")])
3754 [(set (match_operand:DI 0 "register_operand" "=r")
3755 (unspec:DI [(const_int 0)] UNSPEC_DARN))]
3756 "TARGET_P9_MISC && TARGET_64BIT"
3758 [(set_attr "type" "integer")])
3760 ;; Test byte within range.
3762 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
3763 ;; represents a byte whose value is ignored in this context and
3764 ;; vv, the least significant byte, holds the byte value that is to
3765 ;; be tested for membership within the range specified by operand 2.
3766 ;; The bytes of operand 2 are organized as xx:xx:hi:lo.
3768 ;; Return in target register operand 0 a value of 1 if lo <= vv and
3769 ;; vv <= hi. Otherwise, set register operand 0 to 0.
3771 ;; Though the instructions to which this expansion maps operate on
3772 ;; 64-bit registers, the current implementation only operates on
3773 ;; SI-mode operands as the high-order bits provide no information
3774 ;; that is not already available in the low-order bits. To avoid the
3775 ;; costs of data widening operations, future enhancements might allow
3776 ;; DI mode for operand 0 and/or might allow operand 1 to be QI mode.
3777 (define_expand "cmprb"
3779 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
3780 (match_operand:SI 2 "gpc_reg_operand" "r")]
3782 (set (match_operand:SI 0 "gpc_reg_operand" "=r")
3783 (if_then_else:SI (lt (match_dup 3)
3786 (if_then_else (gt (match_dup 3)
3792 operands[3] = gen_reg_rtx (CCmode);
3795 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
3796 ;; represents a byte whose value is ignored in this context and
3797 ;; vv, the least significant byte, holds the byte value that is to
3798 ;; be tested for membership within the range specified by operand 2.
3799 ;; The bytes of operand 2 are organized as xx:xx:hi:lo.
3801 ;; Set bit 1 (the GT bit, 0x4) of CR register operand 0 to 1 if
3802 ;; lo <= vv and vv <= hi. Otherwise, set the GT bit to 0. The other
3803 ;; 3 bits of the target CR register are all set to 0.
3804 (define_insn "*cmprb_internal"
3805 [(set (match_operand:CC 0 "cc_reg_operand" "=y")
3806 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
3807 (match_operand:SI 2 "gpc_reg_operand" "r")]
3811 [(set_attr "type" "logical")])
3813 ;; Set operand 0 register to -1 if the LT bit (0x8) of condition
3814 ;; register operand 1 is on. Otherwise, set operand 0 register to 1
3815 ;; if the GT bit (0x4) of condition register operand 1 is on.
3816 ;; Otherwise, set operand 0 to 0. Note that the result stored into
3817 ;; register operand 0 is non-zero iff either the LT or GT bits are on
3818 ;; within condition register operand 1.
3819 (define_insn "setb_signed"
3820 [(set (match_operand:SI 0 "gpc_reg_operand" "=r")
3821 (if_then_else:SI (lt (match_operand:CC 1 "cc_reg_operand" "y")
3824 (if_then_else (gt (match_dup 1)
3830 [(set_attr "type" "logical")])
3832 (define_insn "setb_unsigned"
3833 [(set (match_operand:SI 0 "gpc_reg_operand" "=r")
3834 (if_then_else:SI (ltu (match_operand:CCUNS 1 "cc_reg_operand" "y")
3837 (if_then_else (gtu (match_dup 1)
3843 [(set_attr "type" "logical")])
3845 ;; Test byte within two ranges.
3847 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
3848 ;; represents a byte whose value is ignored in this context and
3849 ;; vv, the least significant byte, holds the byte value that is to
3850 ;; be tested for membership within the range specified by operand 2.
3851 ;; The bytes of operand 2 are organized as hi_1:lo_1:hi_2:lo_2.
3853 ;; Return in target register operand 0 a value of 1 if (lo_1 <= vv and
3854 ;; vv <= hi_1) or if (lo_2 <= vv and vv <= hi_2). Otherwise, set register
3857 ;; Though the instructions to which this expansion maps operate on
3858 ;; 64-bit registers, the current implementation only operates on
3859 ;; SI-mode operands as the high-order bits provide no information
3860 ;; that is not already available in the low-order bits. To avoid the
3861 ;; costs of data widening operations, future enhancements might allow
3862 ;; DI mode for operand 0 and/or might allow operand 1 to be QI mode.
3863 (define_expand "cmprb2"
3865 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
3866 (match_operand:SI 2 "gpc_reg_operand" "r")]
3868 (set (match_operand:SI 0 "gpc_reg_operand" "=r")
3869 (if_then_else:SI (lt (match_dup 3)
3872 (if_then_else (gt (match_dup 3)
3878 operands[3] = gen_reg_rtx (CCmode);
3881 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
3882 ;; represents a byte whose value is ignored in this context and
3883 ;; vv, the least significant byte, holds the byte value that is to
3884 ;; be tested for membership within the ranges specified by operand 2.
3885 ;; The bytes of operand 2 are organized as hi_1:lo_1:hi_2:lo_2.
3887 ;; Set bit 1 (the GT bit, 0x4) of CR register operand 0 to 1 if
3888 ;; (lo_1 <= vv and vv <= hi_1) or if (lo_2 <= vv and vv <= hi_2).
3889 ;; Otherwise, set the GT bit to 0. The other 3 bits of the target
3890 ;; CR register are all set to 0.
3891 (define_insn "*cmprb2_internal"
3892 [(set (match_operand:CC 0 "cc_reg_operand" "=y")
3893 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
3894 (match_operand:SI 2 "gpc_reg_operand" "r")]
3898 [(set_attr "type" "logical")])
3900 ;; Test byte membership within set of 8 bytes.
3902 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
3903 ;; represents a byte whose value is ignored in this context and
3904 ;; vv, the least significant byte, holds the byte value that is to
3905 ;; be tested for membership within the set specified by operand 2.
3906 ;; The bytes of operand 2 are organized as e0:e1:e2:e3:e4:e5:e6:e7.
3908 ;; Return in target register operand 0 a value of 1 if vv equals one
3909 ;; of the values e0, e1, e2, e3, e4, e5, e6, or e7. Otherwise, set
3910 ;; register operand 0 to 0. Note that the 8 byte values held within
3911 ;; operand 2 need not be unique.
3913 ;; Though the instructions to which this expansion maps operate on
3914 ;; 64-bit registers, the current implementation requires that operands
3915 ;; 0 and 1 have mode SI as the high-order bits provide no information
3916 ;; that is not already available in the low-order bits. To avoid the
3917 ;; costs of data widening operations, future enhancements might allow
3918 ;; DI mode for operand 0 and/or might allow operand 1 to be QI mode.
3919 (define_expand "cmpeqb"
3921 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
3922 (match_operand:DI 2 "gpc_reg_operand" "r")]
3924 (set (match_operand:SI 0 "gpc_reg_operand" "=r")
3925 (if_then_else:SI (lt (match_dup 3)
3928 (if_then_else (gt (match_dup 3)
3932 "TARGET_P9_MISC && TARGET_64BIT"
3934 operands[3] = gen_reg_rtx (CCmode);
3937 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
3938 ;; represents a byte whose value is ignored in this context and
3939 ;; vv, the least significant byte, holds the byte value that is to
3940 ;; be tested for membership within the set specified by operand 2.
3941 ;; The bytes of operand 2 are organized as e0:e1:e2:e3:e4:e5:e6:e7.
3943 ;; Set bit 1 (the GT bit, 0x4) of CR register operand 0 to 1 if vv
3944 ;; equals one of the values e0, e1, e2, e3, e4, e5, e6, or e7. Otherwise,
3945 ;; set the GT bit to zero. The other 3 bits of the target CR register
3946 ;; are all set to 0.
3947 (define_insn "*cmpeqb_internal"
3948 [(set (match_operand:CC 0 "cc_reg_operand" "=y")
3949 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
3950 (match_operand:DI 2 "gpc_reg_operand" "r")]
3952 "TARGET_P9_MISC && TARGET_64BIT"
3954 [(set_attr "type" "logical")])
3956 (define_expand "bcd<bcd_add_sub>_<code>"
3957 [(parallel [(set (reg:CCFP CR6_REGNO)
3959 (unspec:V2DF [(match_operand:V1TI 1 "register_operand" "")
3960 (match_operand:V1TI 2 "register_operand" "")
3961 (match_operand:QI 3 "const_0_to_1_operand" "")]
3964 (clobber (match_scratch:V1TI 5 ""))])
3965 (set (match_operand:SI 0 "register_operand" "")
3966 (BCD_TEST:SI (reg:CCFP CR6_REGNO)
3970 operands[4] = CONST0_RTX (V2DFmode);
3973 ;; Peephole2 pattern to combine a bcdadd/bcdsub that calculates the value and
3974 ;; the bcdadd/bcdsub that tests the value. The combiner won't work since
3975 ;; CR6 is a hard coded register. Unfortunately, all of the Altivec predicate
3976 ;; support is hard coded to use the fixed register CR6 instead of creating
3977 ;; a register class for CR6.
3980 [(parallel [(set (match_operand:V1TI 0 "register_operand" "")
3981 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "")
3982 (match_operand:V1TI 2 "register_operand" "")
3983 (match_operand:QI 3 "const_0_to_1_operand" "")]
3984 UNSPEC_BCD_ADD_SUB))
3985 (clobber (reg:CCFP CR6_REGNO))])
3986 (parallel [(set (reg:CCFP CR6_REGNO)
3988 (unspec:V2DF [(match_dup 1)
3992 (match_operand:V2DF 4 "zero_constant" "")))
3993 (clobber (match_operand:V1TI 5 "register_operand" ""))])]
3995 [(parallel [(set (match_dup 0)
3996 (unspec:V1TI [(match_dup 1)
3999 UNSPEC_BCD_ADD_SUB))
4000 (set (reg:CCFP CR6_REGNO)
4002 (unspec:V2DF [(match_dup 1)