2 ;; Copyright (C) 2002-2016 Free Software Foundation, Inc.
3 ;; Contributed by Aldy Hernandez (aldy@quesejoda.com)
5 ;; This file is part of GCC.
7 ;; GCC is free software; you can redistribute it and/or modify it
8 ;; under the terms of the GNU General Public License as published
9 ;; by the Free Software Foundation; either version 3, or (at your
10 ;; option) any later version.
12 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
13 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 ;; License for more details.
17 ;; You should have received a copy of the GNU General Public License
18 ;; along with GCC; see the file COPYING3. If not see
19 ;; <http://www.gnu.org/licenses/>.
21 (define_c_enum "unspec"
44 UNSPEC_VPACK_SIGN_SIGN_SAT
45 UNSPEC_VPACK_SIGN_UNS_SAT
46 UNSPEC_VPACK_UNS_UNS_SAT
47 UNSPEC_VPACK_UNS_UNS_MOD
48 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT
71 UNSPEC_VUNPACK_HI_SIGN
72 UNSPEC_VUNPACK_LO_SIGN
73 UNSPEC_VUNPACK_HI_SIGN_DIRECT
74 UNSPEC_VUNPACK_LO_SIGN_DIRECT
145 UNSPEC_VSUMSWS_DIRECT
158 (define_c_enum "unspecv"
166 ;; Like VI, defined in vector.md, but add ISA 2.07 integer vector ops
167 (define_mode_iterator VI2 [V4SI V8HI V16QI V2DI])
168 ;; Short vec in modes
169 (define_mode_iterator VIshort [V8HI V16QI])
171 (define_mode_iterator VF [V4SF])
172 ;; Vec modes, pity mode iterators are not composable
173 (define_mode_iterator V [V4SI V8HI V16QI V4SF])
174 ;; Vec modes for move/logical/permute ops, include vector types for move not
175 ;; otherwise handled by altivec (v2df, v2di, ti)
176 (define_mode_iterator VM [V4SI
184 (KF "FLOAT128_VECTOR_P (KFmode)")
185 (TF "FLOAT128_VECTOR_P (TFmode)")])
187 ;; Like VM, except don't do TImode
188 (define_mode_iterator VM2 [V4SI
195 (KF "FLOAT128_VECTOR_P (KFmode)")
196 (TF "FLOAT128_VECTOR_P (TFmode)")])
198 ;; Specific iterator for parity which does not have a byte/half-word form, but
199 ;; does have a quad word form
200 (define_mode_iterator VParity [V4SI
203 (TI "TARGET_VSX_TIMODE")])
205 (define_mode_attr VI_char [(V2DI "d") (V4SI "w") (V8HI "h") (V16QI "b")])
206 (define_mode_attr VI_scalar [(V2DI "DI") (V4SI "SI") (V8HI "HI") (V16QI "QI")])
207 (define_mode_attr VI_unit [(V16QI "VECTOR_UNIT_ALTIVEC_P (V16QImode)")
208 (V8HI "VECTOR_UNIT_ALTIVEC_P (V8HImode)")
209 (V4SI "VECTOR_UNIT_ALTIVEC_P (V4SImode)")
210 (V2DI "VECTOR_UNIT_P8_VECTOR_P (V2DImode)")
211 (V1TI "VECTOR_UNIT_ALTIVEC_P (V1TImode)")])
213 ;; Vector pack/unpack
214 (define_mode_iterator VP [V2DI V4SI V8HI])
215 (define_mode_attr VP_small [(V2DI "V4SI") (V4SI "V8HI") (V8HI "V16QI")])
216 (define_mode_attr VP_small_lc [(V2DI "v4si") (V4SI "v8hi") (V8HI "v16qi")])
217 (define_mode_attr VU_char [(V2DI "w") (V4SI "h") (V8HI "b")])
220 (define_mode_iterator VNEG [V4SI V2DI])
222 ;; Vector move instructions.
223 (define_insn "*altivec_mov<mode>"
224 [(set (match_operand:VM2 0 "nonimmediate_operand" "=Z,v,v,?Y,?*r,?*r,v,v,?*r")
225 (match_operand:VM2 1 "input_operand" "v,Z,v,*r,Y,*r,j,W,W"))]
226 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)
227 && (register_operand (operands[0], <MODE>mode)
228 || register_operand (operands[1], <MODE>mode))"
230 switch (which_alternative)
232 case 0: return "stvx %1,%y0";
233 case 1: return "lvx %0,%y1";
234 case 2: return "vor %0,%1,%1";
238 case 6: return "vxor %0,%0,%0";
239 case 7: return output_vec_const_move (operands);
241 default: gcc_unreachable ();
244 [(set_attr "type" "vecstore,vecload,veclogical,store,load,*,veclogical,*,*")
245 (set_attr "length" "4,4,4,20,20,20,4,8,32")])
247 ;; Unlike other altivec moves, allow the GPRs, since a normal use of TImode
248 ;; is for unions. However for plain data movement, slightly favor the vector
250 (define_insn "*altivec_movti"
251 [(set (match_operand:TI 0 "nonimmediate_operand" "=Z,v,v,?Y,?r,?r,v,v")
252 (match_operand:TI 1 "input_operand" "v,Z,v,r,Y,r,j,W"))]
253 "VECTOR_MEM_ALTIVEC_P (TImode)
254 && (register_operand (operands[0], TImode)
255 || register_operand (operands[1], TImode))"
257 switch (which_alternative)
259 case 0: return "stvx %1,%y0";
260 case 1: return "lvx %0,%y1";
261 case 2: return "vor %0,%1,%1";
265 case 6: return "vxor %0,%0,%0";
266 case 7: return output_vec_const_move (operands);
267 default: gcc_unreachable ();
270 [(set_attr "type" "vecstore,vecload,veclogical,store,load,*,veclogical,*")])
272 ;; Load up a vector with the most significant bit set by loading up -1 and
273 ;; doing a shift left
275 [(set (match_operand:VM 0 "altivec_register_operand" "")
276 (match_operand:VM 1 "easy_vector_constant_msb" ""))]
277 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
280 rtx dest = operands[0];
281 machine_mode mode = GET_MODE (operands[0]);
285 if (mode == V4SFmode)
288 dest = gen_lowpart (V4SImode, dest);
291 num_elements = GET_MODE_NUNITS (mode);
292 v = rtvec_alloc (num_elements);
293 for (i = 0; i < num_elements; i++)
294 RTVEC_ELT (v, i) = constm1_rtx;
296 emit_insn (gen_vec_initv4si (dest, gen_rtx_PARALLEL (mode, v)));
297 emit_insn (gen_rtx_SET (dest, gen_rtx_ASHIFT (mode, dest, dest)));
302 [(set (match_operand:VM 0 "altivec_register_operand" "")
303 (match_operand:VM 1 "easy_vector_constant_add_self" ""))]
304 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
305 [(set (match_dup 0) (match_dup 3))
306 (set (match_dup 0) (match_dup 4))]
308 rtx dup = gen_easy_altivec_constant (operands[1]);
310 machine_mode op_mode = <MODE>mode;
312 /* Divide the operand of the resulting VEC_DUPLICATE, and use
313 simplify_rtx to make a CONST_VECTOR. */
314 XEXP (dup, 0) = simplify_const_binary_operation (ASHIFTRT, QImode,
315 XEXP (dup, 0), const1_rtx);
316 const_vec = simplify_rtx (dup);
318 if (op_mode == V4SFmode)
321 operands[0] = gen_lowpart (op_mode, operands[0]);
323 if (GET_MODE (const_vec) == op_mode)
324 operands[3] = const_vec;
326 operands[3] = gen_lowpart (op_mode, const_vec);
327 operands[4] = gen_rtx_PLUS (op_mode, operands[0], operands[0]);
331 [(set (match_operand:VM 0 "altivec_register_operand" "")
332 (match_operand:VM 1 "easy_vector_constant_vsldoi" ""))]
333 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && can_create_pseudo_p ()"
334 [(set (match_dup 2) (match_dup 3))
335 (set (match_dup 4) (match_dup 5))
337 (unspec:VM [(match_dup 2)
342 rtx op1 = operands[1];
343 int elt = (BYTES_BIG_ENDIAN) ? 0 : GET_MODE_NUNITS (<MODE>mode) - 1;
344 HOST_WIDE_INT val = const_vector_elt_as_int (op1, elt);
345 rtx rtx_val = GEN_INT (val);
346 int shift = vspltis_shifted (op1);
347 int nunits = GET_MODE_NUNITS (<MODE>mode);
350 gcc_assert (shift != 0);
351 operands[2] = gen_reg_rtx (<MODE>mode);
352 operands[3] = gen_rtx_CONST_VECTOR (<MODE>mode, rtvec_alloc (nunits));
353 operands[4] = gen_reg_rtx (<MODE>mode);
357 operands[5] = CONSTM1_RTX (<MODE>mode);
358 operands[6] = GEN_INT (-shift);
362 operands[5] = CONST0_RTX (<MODE>mode);
363 operands[6] = GEN_INT (shift);
366 /* Populate the constant vectors. */
367 for (i = 0; i < nunits; i++)
368 XVECEXP (operands[3], 0, i) = rtx_val;
371 (define_insn "get_vrsave_internal"
372 [(set (match_operand:SI 0 "register_operand" "=r")
373 (unspec:SI [(reg:SI VRSAVE_REGNO)] UNSPEC_GET_VRSAVE))]
377 return "mfspr %0,256";
379 return "mfvrsave %0";
381 [(set_attr "type" "*")])
383 (define_insn "*set_vrsave_internal"
384 [(match_parallel 0 "vrsave_operation"
385 [(set (reg:SI VRSAVE_REGNO)
386 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "r")
387 (reg:SI VRSAVE_REGNO)] UNSPECV_SET_VRSAVE))])]
391 return "mtspr 256,%1";
393 return "mtvrsave %1";
395 [(set_attr "type" "*")])
397 (define_insn "*save_world"
398 [(match_parallel 0 "save_world_operation"
399 [(clobber (reg:SI LR_REGNO))
400 (use (match_operand:SI 1 "call_operand" "s"))])]
401 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
403 [(set_attr "type" "branch")
404 (set_attr "length" "4")])
406 (define_insn "*restore_world"
407 [(match_parallel 0 "restore_world_operation"
409 (use (reg:SI LR_REGNO))
410 (use (match_operand:SI 1 "call_operand" "s"))
411 (clobber (match_operand:SI 2 "gpc_reg_operand" "=r"))])]
412 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
415 ;; The save_vregs and restore_vregs patterns don't use memory_operand
416 ;; because (plus (reg) (const_int)) is not a valid vector address.
417 ;; This way is more compact than describing exactly what happens in
418 ;; the out-of-line functions, ie. loading the constant into r11/r12
419 ;; then using indexed addressing, and requires less editing of rtl
420 ;; to describe the operation to dwarf2out_frame_debug_expr.
421 (define_insn "*save_vregs_<mode>_r11"
422 [(match_parallel 0 "any_parallel_operand"
423 [(clobber (reg:P LR_REGNO))
424 (use (match_operand:P 1 "symbol_ref_operand" "s"))
427 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
428 (match_operand:P 3 "short_cint_operand" "I")))
429 (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
432 [(set_attr "type" "branch")
433 (set_attr "length" "4")])
435 (define_insn "*save_vregs_<mode>_r12"
436 [(match_parallel 0 "any_parallel_operand"
437 [(clobber (reg:P LR_REGNO))
438 (use (match_operand:P 1 "symbol_ref_operand" "s"))
441 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
442 (match_operand:P 3 "short_cint_operand" "I")))
443 (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
446 [(set_attr "type" "branch")
447 (set_attr "length" "4")])
449 (define_insn "*restore_vregs_<mode>_r11"
450 [(match_parallel 0 "any_parallel_operand"
451 [(clobber (reg:P LR_REGNO))
452 (use (match_operand:P 1 "symbol_ref_operand" "s"))
455 (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
456 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
457 (match_operand:P 4 "short_cint_operand" "I"))))])]
460 [(set_attr "type" "branch")
461 (set_attr "length" "4")])
463 (define_insn "*restore_vregs_<mode>_r12"
464 [(match_parallel 0 "any_parallel_operand"
465 [(clobber (reg:P LR_REGNO))
466 (use (match_operand:P 1 "symbol_ref_operand" "s"))
469 (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
470 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
471 (match_operand:P 4 "short_cint_operand" "I"))))])]
474 [(set_attr "type" "branch")
475 (set_attr "length" "4")])
477 ;; Simple binary operations.
480 (define_insn "add<mode>3"
481 [(set (match_operand:VI2 0 "register_operand" "=v")
482 (plus:VI2 (match_operand:VI2 1 "register_operand" "v")
483 (match_operand:VI2 2 "register_operand" "v")))]
485 "vaddu<VI_char>m %0,%1,%2"
486 [(set_attr "type" "vecsimple")])
488 (define_insn "*altivec_addv4sf3"
489 [(set (match_operand:V4SF 0 "register_operand" "=v")
490 (plus:V4SF (match_operand:V4SF 1 "register_operand" "v")
491 (match_operand:V4SF 2 "register_operand" "v")))]
492 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
494 [(set_attr "type" "vecfloat")])
496 (define_insn "altivec_vaddcuw"
497 [(set (match_operand:V4SI 0 "register_operand" "=v")
498 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
499 (match_operand:V4SI 2 "register_operand" "v")]
501 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
503 [(set_attr "type" "vecsimple")])
505 (define_insn "altivec_vaddu<VI_char>s"
506 [(set (match_operand:VI 0 "register_operand" "=v")
507 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
508 (match_operand:VI 2 "register_operand" "v")]
510 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
512 "vaddu<VI_char>s %0,%1,%2"
513 [(set_attr "type" "vecsimple")])
515 (define_insn "altivec_vadds<VI_char>s"
516 [(set (match_operand:VI 0 "register_operand" "=v")
517 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
518 (match_operand:VI 2 "register_operand" "v")]
520 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
521 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
522 "vadds<VI_char>s %0,%1,%2"
523 [(set_attr "type" "vecsimple")])
526 (define_insn "sub<mode>3"
527 [(set (match_operand:VI2 0 "register_operand" "=v")
528 (minus:VI2 (match_operand:VI2 1 "register_operand" "v")
529 (match_operand:VI2 2 "register_operand" "v")))]
531 "vsubu<VI_char>m %0,%1,%2"
532 [(set_attr "type" "vecsimple")])
534 (define_insn "*altivec_subv4sf3"
535 [(set (match_operand:V4SF 0 "register_operand" "=v")
536 (minus:V4SF (match_operand:V4SF 1 "register_operand" "v")
537 (match_operand:V4SF 2 "register_operand" "v")))]
538 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
540 [(set_attr "type" "vecfloat")])
542 (define_insn "altivec_vsubcuw"
543 [(set (match_operand:V4SI 0 "register_operand" "=v")
544 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
545 (match_operand:V4SI 2 "register_operand" "v")]
547 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
549 [(set_attr "type" "vecsimple")])
551 (define_insn "altivec_vsubu<VI_char>s"
552 [(set (match_operand:VI 0 "register_operand" "=v")
553 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
554 (match_operand:VI 2 "register_operand" "v")]
556 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
557 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
558 "vsubu<VI_char>s %0,%1,%2"
559 [(set_attr "type" "vecsimple")])
561 (define_insn "altivec_vsubs<VI_char>s"
562 [(set (match_operand:VI 0 "register_operand" "=v")
563 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
564 (match_operand:VI 2 "register_operand" "v")]
566 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
567 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
568 "vsubs<VI_char>s %0,%1,%2"
569 [(set_attr "type" "vecsimple")])
572 (define_insn "altivec_vavgu<VI_char>"
573 [(set (match_operand:VI 0 "register_operand" "=v")
574 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
575 (match_operand:VI 2 "register_operand" "v")]
578 "vavgu<VI_char> %0,%1,%2"
579 [(set_attr "type" "vecsimple")])
581 (define_insn "altivec_vavgs<VI_char>"
582 [(set (match_operand:VI 0 "register_operand" "=v")
583 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
584 (match_operand:VI 2 "register_operand" "v")]
586 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
587 "vavgs<VI_char> %0,%1,%2"
588 [(set_attr "type" "vecsimple")])
590 (define_insn "altivec_vcmpbfp"
591 [(set (match_operand:V4SI 0 "register_operand" "=v")
592 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
593 (match_operand:V4SF 2 "register_operand" "v")]
595 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
597 [(set_attr "type" "veccmp")])
599 (define_insn "*altivec_eq<mode>"
600 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
601 (eq:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
602 (match_operand:VI2 2 "altivec_register_operand" "v")))]
604 "vcmpequ<VI_char> %0,%1,%2"
605 [(set_attr "type" "veccmpfx")])
607 (define_insn "*altivec_gt<mode>"
608 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
609 (gt:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
610 (match_operand:VI2 2 "altivec_register_operand" "v")))]
612 "vcmpgts<VI_char> %0,%1,%2"
613 [(set_attr "type" "veccmpfx")])
615 (define_insn "*altivec_gtu<mode>"
616 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
617 (gtu:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
618 (match_operand:VI2 2 "altivec_register_operand" "v")))]
620 "vcmpgtu<VI_char> %0,%1,%2"
621 [(set_attr "type" "veccmpfx")])
623 (define_insn "*altivec_eqv4sf"
624 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
625 (eq:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
626 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
627 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
629 [(set_attr "type" "veccmp")])
631 (define_insn "*altivec_gtv4sf"
632 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
633 (gt:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
634 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
635 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
637 [(set_attr "type" "veccmp")])
639 (define_insn "*altivec_gev4sf"
640 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
641 (ge:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
642 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
643 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
645 [(set_attr "type" "veccmp")])
647 (define_insn "*altivec_vsel<mode>"
648 [(set (match_operand:VM 0 "altivec_register_operand" "=v")
650 (ne:CC (match_operand:VM 1 "altivec_register_operand" "v")
651 (match_operand:VM 4 "zero_constant" ""))
652 (match_operand:VM 2 "altivec_register_operand" "v")
653 (match_operand:VM 3 "altivec_register_operand" "v")))]
654 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
656 [(set_attr "type" "vecmove")])
658 (define_insn "*altivec_vsel<mode>_uns"
659 [(set (match_operand:VM 0 "altivec_register_operand" "=v")
661 (ne:CCUNS (match_operand:VM 1 "altivec_register_operand" "v")
662 (match_operand:VM 4 "zero_constant" ""))
663 (match_operand:VM 2 "altivec_register_operand" "v")
664 (match_operand:VM 3 "altivec_register_operand" "v")))]
665 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
667 [(set_attr "type" "vecmove")])
669 ;; Fused multiply add.
671 (define_insn "*altivec_fmav4sf4"
672 [(set (match_operand:V4SF 0 "register_operand" "=v")
673 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
674 (match_operand:V4SF 2 "register_operand" "v")
675 (match_operand:V4SF 3 "register_operand" "v")))]
676 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
677 "vmaddfp %0,%1,%2,%3"
678 [(set_attr "type" "vecfloat")])
680 ;; We do multiply as a fused multiply-add with an add of a -0.0 vector.
682 (define_expand "altivec_mulv4sf3"
683 [(set (match_operand:V4SF 0 "register_operand" "")
684 (fma:V4SF (match_operand:V4SF 1 "register_operand" "")
685 (match_operand:V4SF 2 "register_operand" "")
687 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
691 /* Generate [-0.0, -0.0, -0.0, -0.0]. */
692 neg0 = gen_reg_rtx (V4SImode);
693 emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
694 emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
696 operands[3] = gen_lowpart (V4SFmode, neg0);
699 ;; 32-bit integer multiplication
700 ;; A_high = Operand_0 & 0xFFFF0000 >> 16
701 ;; A_low = Operand_0 & 0xFFFF
702 ;; B_high = Operand_1 & 0xFFFF0000 >> 16
703 ;; B_low = Operand_1 & 0xFFFF
704 ;; result = A_low * B_low + (A_high * B_low + B_high * A_low) << 16
706 ;; (define_insn "mulv4si3"
707 ;; [(set (match_operand:V4SI 0 "register_operand" "=v")
708 ;; (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
709 ;; (match_operand:V4SI 2 "register_operand" "v")))]
710 (define_insn "mulv4si3_p8"
711 [(set (match_operand:V4SI 0 "register_operand" "=v")
712 (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
713 (match_operand:V4SI 2 "register_operand" "v")))]
716 [(set_attr "type" "veccomplex")])
718 (define_expand "mulv4si3"
719 [(use (match_operand:V4SI 0 "register_operand" ""))
720 (use (match_operand:V4SI 1 "register_operand" ""))
721 (use (match_operand:V4SI 2 "register_operand" ""))]
733 if (TARGET_P8_VECTOR)
735 emit_insn (gen_mulv4si3_p8 (operands[0], operands[1], operands[2]));
739 zero = gen_reg_rtx (V4SImode);
740 emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
742 sixteen = gen_reg_rtx (V4SImode);
743 emit_insn (gen_altivec_vspltisw (sixteen, gen_rtx_CONST_INT (V4SImode, -16)));
745 swap = gen_reg_rtx (V4SImode);
746 emit_insn (gen_vrotlv4si3 (swap, operands[2], sixteen));
748 one = gen_reg_rtx (V8HImode);
749 convert_move (one, operands[1], 0);
751 two = gen_reg_rtx (V8HImode);
752 convert_move (two, operands[2], 0);
754 small_swap = gen_reg_rtx (V8HImode);
755 convert_move (small_swap, swap, 0);
757 low_product = gen_reg_rtx (V4SImode);
758 emit_insn (gen_altivec_vmulouh (low_product, one, two));
760 high_product = gen_reg_rtx (V4SImode);
761 emit_insn (gen_altivec_vmsumuhm (high_product, one, small_swap, zero));
763 emit_insn (gen_vashlv4si3 (high_product, high_product, sixteen));
765 emit_insn (gen_addv4si3 (operands[0], high_product, low_product));
770 (define_expand "mulv8hi3"
771 [(use (match_operand:V8HI 0 "register_operand" ""))
772 (use (match_operand:V8HI 1 "register_operand" ""))
773 (use (match_operand:V8HI 2 "register_operand" ""))]
776 rtx zero = gen_reg_rtx (V8HImode);
778 emit_insn (gen_altivec_vspltish (zero, const0_rtx));
779 emit_insn (gen_altivec_vmladduhm(operands[0], operands[1], operands[2], zero));
784 ;; Fused multiply subtract
785 (define_insn "*altivec_vnmsubfp"
786 [(set (match_operand:V4SF 0 "register_operand" "=v")
788 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
789 (match_operand:V4SF 2 "register_operand" "v")
791 (match_operand:V4SF 3 "register_operand" "v")))))]
792 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
793 "vnmsubfp %0,%1,%2,%3"
794 [(set_attr "type" "vecfloat")])
796 (define_insn "altivec_vmsumu<VI_char>m"
797 [(set (match_operand:V4SI 0 "register_operand" "=v")
798 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
799 (match_operand:VIshort 2 "register_operand" "v")
800 (match_operand:V4SI 3 "register_operand" "v")]
803 "vmsumu<VI_char>m %0,%1,%2,%3"
804 [(set_attr "type" "veccomplex")])
806 (define_insn "altivec_vmsumm<VI_char>m"
807 [(set (match_operand:V4SI 0 "register_operand" "=v")
808 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
809 (match_operand:VIshort 2 "register_operand" "v")
810 (match_operand:V4SI 3 "register_operand" "v")]
813 "vmsumm<VI_char>m %0,%1,%2,%3"
814 [(set_attr "type" "veccomplex")])
816 (define_insn "altivec_vmsumshm"
817 [(set (match_operand:V4SI 0 "register_operand" "=v")
818 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
819 (match_operand:V8HI 2 "register_operand" "v")
820 (match_operand:V4SI 3 "register_operand" "v")]
823 "vmsumshm %0,%1,%2,%3"
824 [(set_attr "type" "veccomplex")])
826 (define_insn "altivec_vmsumuhs"
827 [(set (match_operand:V4SI 0 "register_operand" "=v")
828 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
829 (match_operand:V8HI 2 "register_operand" "v")
830 (match_operand:V4SI 3 "register_operand" "v")]
832 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
834 "vmsumuhs %0,%1,%2,%3"
835 [(set_attr "type" "veccomplex")])
837 (define_insn "altivec_vmsumshs"
838 [(set (match_operand:V4SI 0 "register_operand" "=v")
839 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
840 (match_operand:V8HI 2 "register_operand" "v")
841 (match_operand:V4SI 3 "register_operand" "v")]
843 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
845 "vmsumshs %0,%1,%2,%3"
846 [(set_attr "type" "veccomplex")])
850 (define_insn "umax<mode>3"
851 [(set (match_operand:VI2 0 "register_operand" "=v")
852 (umax:VI2 (match_operand:VI2 1 "register_operand" "v")
853 (match_operand:VI2 2 "register_operand" "v")))]
855 "vmaxu<VI_char> %0,%1,%2"
856 [(set_attr "type" "vecsimple")])
858 (define_insn "smax<mode>3"
859 [(set (match_operand:VI2 0 "register_operand" "=v")
860 (smax:VI2 (match_operand:VI2 1 "register_operand" "v")
861 (match_operand:VI2 2 "register_operand" "v")))]
863 "vmaxs<VI_char> %0,%1,%2"
864 [(set_attr "type" "vecsimple")])
866 (define_insn "*altivec_smaxv4sf3"
867 [(set (match_operand:V4SF 0 "register_operand" "=v")
868 (smax:V4SF (match_operand:V4SF 1 "register_operand" "v")
869 (match_operand:V4SF 2 "register_operand" "v")))]
870 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
872 [(set_attr "type" "veccmp")])
874 (define_insn "umin<mode>3"
875 [(set (match_operand:VI2 0 "register_operand" "=v")
876 (umin:VI2 (match_operand:VI2 1 "register_operand" "v")
877 (match_operand:VI2 2 "register_operand" "v")))]
879 "vminu<VI_char> %0,%1,%2"
880 [(set_attr "type" "vecsimple")])
882 (define_insn "smin<mode>3"
883 [(set (match_operand:VI2 0 "register_operand" "=v")
884 (smin:VI2 (match_operand:VI2 1 "register_operand" "v")
885 (match_operand:VI2 2 "register_operand" "v")))]
887 "vmins<VI_char> %0,%1,%2"
888 [(set_attr "type" "vecsimple")])
890 (define_insn "*altivec_sminv4sf3"
891 [(set (match_operand:V4SF 0 "register_operand" "=v")
892 (smin:V4SF (match_operand:V4SF 1 "register_operand" "v")
893 (match_operand:V4SF 2 "register_operand" "v")))]
894 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
896 [(set_attr "type" "veccmp")])
898 (define_insn "altivec_vmhaddshs"
899 [(set (match_operand:V8HI 0 "register_operand" "=v")
900 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
901 (match_operand:V8HI 2 "register_operand" "v")
902 (match_operand:V8HI 3 "register_operand" "v")]
904 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
906 "vmhaddshs %0,%1,%2,%3"
907 [(set_attr "type" "veccomplex")])
909 (define_insn "altivec_vmhraddshs"
910 [(set (match_operand:V8HI 0 "register_operand" "=v")
911 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
912 (match_operand:V8HI 2 "register_operand" "v")
913 (match_operand:V8HI 3 "register_operand" "v")]
915 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
917 "vmhraddshs %0,%1,%2,%3"
918 [(set_attr "type" "veccomplex")])
920 (define_insn "altivec_vmladduhm"
921 [(set (match_operand:V8HI 0 "register_operand" "=v")
922 (plus:V8HI (mult:V8HI (match_operand:V8HI 1 "register_operand" "v")
923 (match_operand:V8HI 2 "register_operand" "v"))
924 (match_operand:V8HI 3 "register_operand" "v")))]
926 "vmladduhm %0,%1,%2,%3"
927 [(set_attr "type" "veccomplex")])
929 (define_expand "altivec_vmrghb"
930 [(use (match_operand:V16QI 0 "register_operand" ""))
931 (use (match_operand:V16QI 1 "register_operand" ""))
932 (use (match_operand:V16QI 2 "register_operand" ""))]
938 /* Special handling for LE with -maltivec=be. */
939 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
941 v = gen_rtvec (16, GEN_INT (8), GEN_INT (24), GEN_INT (9), GEN_INT (25),
942 GEN_INT (10), GEN_INT (26), GEN_INT (11), GEN_INT (27),
943 GEN_INT (12), GEN_INT (28), GEN_INT (13), GEN_INT (29),
944 GEN_INT (14), GEN_INT (30), GEN_INT (15), GEN_INT (31));
945 x = gen_rtx_VEC_CONCAT (V32QImode, operands[2], operands[1]);
949 v = gen_rtvec (16, GEN_INT (0), GEN_INT (16), GEN_INT (1), GEN_INT (17),
950 GEN_INT (2), GEN_INT (18), GEN_INT (3), GEN_INT (19),
951 GEN_INT (4), GEN_INT (20), GEN_INT (5), GEN_INT (21),
952 GEN_INT (6), GEN_INT (22), GEN_INT (7), GEN_INT (23));
953 x = gen_rtx_VEC_CONCAT (V32QImode, operands[1], operands[2]);
956 x = gen_rtx_VEC_SELECT (V16QImode, x, gen_rtx_PARALLEL (VOIDmode, v));
957 emit_insn (gen_rtx_SET (operands[0], x));
961 (define_insn "*altivec_vmrghb_internal"
962 [(set (match_operand:V16QI 0 "register_operand" "=v")
965 (match_operand:V16QI 1 "register_operand" "v")
966 (match_operand:V16QI 2 "register_operand" "v"))
967 (parallel [(const_int 0) (const_int 16)
968 (const_int 1) (const_int 17)
969 (const_int 2) (const_int 18)
970 (const_int 3) (const_int 19)
971 (const_int 4) (const_int 20)
972 (const_int 5) (const_int 21)
973 (const_int 6) (const_int 22)
974 (const_int 7) (const_int 23)])))]
977 if (BYTES_BIG_ENDIAN)
978 return "vmrghb %0,%1,%2";
980 return "vmrglb %0,%2,%1";
982 [(set_attr "type" "vecperm")])
984 (define_insn "altivec_vmrghb_direct"
985 [(set (match_operand:V16QI 0 "register_operand" "=v")
986 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
987 (match_operand:V16QI 2 "register_operand" "v")]
988 UNSPEC_VMRGH_DIRECT))]
991 [(set_attr "type" "vecperm")])
993 (define_expand "altivec_vmrghh"
994 [(use (match_operand:V8HI 0 "register_operand" ""))
995 (use (match_operand:V8HI 1 "register_operand" ""))
996 (use (match_operand:V8HI 2 "register_operand" ""))]
1002 /* Special handling for LE with -maltivec=be. */
1003 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1005 v = gen_rtvec (8, GEN_INT (4), GEN_INT (12), GEN_INT (5), GEN_INT (13),
1006 GEN_INT (6), GEN_INT (14), GEN_INT (7), GEN_INT (15));
1007 x = gen_rtx_VEC_CONCAT (V16HImode, operands[2], operands[1]);
1011 v = gen_rtvec (8, GEN_INT (0), GEN_INT (8), GEN_INT (1), GEN_INT (9),
1012 GEN_INT (2), GEN_INT (10), GEN_INT (3), GEN_INT (11));
1013 x = gen_rtx_VEC_CONCAT (V16HImode, operands[1], operands[2]);
1016 x = gen_rtx_VEC_SELECT (V8HImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1017 emit_insn (gen_rtx_SET (operands[0], x));
1021 (define_insn "*altivec_vmrghh_internal"
1022 [(set (match_operand:V8HI 0 "register_operand" "=v")
1025 (match_operand:V8HI 1 "register_operand" "v")
1026 (match_operand:V8HI 2 "register_operand" "v"))
1027 (parallel [(const_int 0) (const_int 8)
1028 (const_int 1) (const_int 9)
1029 (const_int 2) (const_int 10)
1030 (const_int 3) (const_int 11)])))]
1033 if (BYTES_BIG_ENDIAN)
1034 return "vmrghh %0,%1,%2";
1036 return "vmrglh %0,%2,%1";
1038 [(set_attr "type" "vecperm")])
1040 (define_insn "altivec_vmrghh_direct"
1041 [(set (match_operand:V8HI 0 "register_operand" "=v")
1042 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1043 (match_operand:V8HI 2 "register_operand" "v")]
1044 UNSPEC_VMRGH_DIRECT))]
1047 [(set_attr "type" "vecperm")])
1049 (define_expand "altivec_vmrghw"
1050 [(use (match_operand:V4SI 0 "register_operand" ""))
1051 (use (match_operand:V4SI 1 "register_operand" ""))
1052 (use (match_operand:V4SI 2 "register_operand" ""))]
1053 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1058 /* Special handling for LE with -maltivec=be. */
1059 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1061 v = gen_rtvec (4, GEN_INT (2), GEN_INT (6), GEN_INT (3), GEN_INT (7));
1062 x = gen_rtx_VEC_CONCAT (V8SImode, operands[2], operands[1]);
1066 v = gen_rtvec (4, GEN_INT (0), GEN_INT (4), GEN_INT (1), GEN_INT (5));
1067 x = gen_rtx_VEC_CONCAT (V8SImode, operands[1], operands[2]);
1070 x = gen_rtx_VEC_SELECT (V4SImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1071 emit_insn (gen_rtx_SET (operands[0], x));
1075 (define_insn "*altivec_vmrghw_internal"
1076 [(set (match_operand:V4SI 0 "register_operand" "=v")
1079 (match_operand:V4SI 1 "register_operand" "v")
1080 (match_operand:V4SI 2 "register_operand" "v"))
1081 (parallel [(const_int 0) (const_int 4)
1082 (const_int 1) (const_int 5)])))]
1083 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1085 if (BYTES_BIG_ENDIAN)
1086 return "vmrghw %0,%1,%2";
1088 return "vmrglw %0,%2,%1";
1090 [(set_attr "type" "vecperm")])
1092 (define_insn "altivec_vmrghw_direct"
1093 [(set (match_operand:V4SI 0 "register_operand" "=v")
1094 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1095 (match_operand:V4SI 2 "register_operand" "v")]
1096 UNSPEC_VMRGH_DIRECT))]
1099 [(set_attr "type" "vecperm")])
1101 (define_insn "*altivec_vmrghsf"
1102 [(set (match_operand:V4SF 0 "register_operand" "=v")
1105 (match_operand:V4SF 1 "register_operand" "v")
1106 (match_operand:V4SF 2 "register_operand" "v"))
1107 (parallel [(const_int 0) (const_int 4)
1108 (const_int 1) (const_int 5)])))]
1109 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1111 if (BYTES_BIG_ENDIAN)
1112 return "vmrghw %0,%1,%2";
1114 return "vmrglw %0,%2,%1";
1116 [(set_attr "type" "vecperm")])
1118 (define_expand "altivec_vmrglb"
1119 [(use (match_operand:V16QI 0 "register_operand" ""))
1120 (use (match_operand:V16QI 1 "register_operand" ""))
1121 (use (match_operand:V16QI 2 "register_operand" ""))]
1127 /* Special handling for LE with -maltivec=be. */
1128 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1130 v = gen_rtvec (16, GEN_INT (0), GEN_INT (16), GEN_INT (1), GEN_INT (17),
1131 GEN_INT (2), GEN_INT (18), GEN_INT (3), GEN_INT (19),
1132 GEN_INT (4), GEN_INT (20), GEN_INT (5), GEN_INT (21),
1133 GEN_INT (6), GEN_INT (22), GEN_INT (7), GEN_INT (23));
1134 x = gen_rtx_VEC_CONCAT (V32QImode, operands[2], operands[1]);
1138 v = gen_rtvec (16, GEN_INT (8), GEN_INT (24), GEN_INT (9), GEN_INT (25),
1139 GEN_INT (10), GEN_INT (26), GEN_INT (11), GEN_INT (27),
1140 GEN_INT (12), GEN_INT (28), GEN_INT (13), GEN_INT (29),
1141 GEN_INT (14), GEN_INT (30), GEN_INT (15), GEN_INT (31));
1142 x = gen_rtx_VEC_CONCAT (V32QImode, operands[1], operands[2]);
1145 x = gen_rtx_VEC_SELECT (V16QImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1146 emit_insn (gen_rtx_SET (operands[0], x));
1150 (define_insn "*altivec_vmrglb_internal"
1151 [(set (match_operand:V16QI 0 "register_operand" "=v")
1154 (match_operand:V16QI 1 "register_operand" "v")
1155 (match_operand:V16QI 2 "register_operand" "v"))
1156 (parallel [(const_int 8) (const_int 24)
1157 (const_int 9) (const_int 25)
1158 (const_int 10) (const_int 26)
1159 (const_int 11) (const_int 27)
1160 (const_int 12) (const_int 28)
1161 (const_int 13) (const_int 29)
1162 (const_int 14) (const_int 30)
1163 (const_int 15) (const_int 31)])))]
1166 if (BYTES_BIG_ENDIAN)
1167 return "vmrglb %0,%1,%2";
1169 return "vmrghb %0,%2,%1";
1171 [(set_attr "type" "vecperm")])
1173 (define_insn "altivec_vmrglb_direct"
1174 [(set (match_operand:V16QI 0 "register_operand" "=v")
1175 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1176 (match_operand:V16QI 2 "register_operand" "v")]
1177 UNSPEC_VMRGL_DIRECT))]
1180 [(set_attr "type" "vecperm")])
1182 (define_expand "altivec_vmrglh"
1183 [(use (match_operand:V8HI 0 "register_operand" ""))
1184 (use (match_operand:V8HI 1 "register_operand" ""))
1185 (use (match_operand:V8HI 2 "register_operand" ""))]
1191 /* Special handling for LE with -maltivec=be. */
1192 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1194 v = gen_rtvec (8, GEN_INT (0), GEN_INT (8), GEN_INT (1), GEN_INT (9),
1195 GEN_INT (2), GEN_INT (10), GEN_INT (3), GEN_INT (11));
1196 x = gen_rtx_VEC_CONCAT (V16HImode, operands[2], operands[1]);
1200 v = gen_rtvec (8, GEN_INT (4), GEN_INT (12), GEN_INT (5), GEN_INT (13),
1201 GEN_INT (6), GEN_INT (14), GEN_INT (7), GEN_INT (15));
1202 x = gen_rtx_VEC_CONCAT (V16HImode, operands[1], operands[2]);
1205 x = gen_rtx_VEC_SELECT (V8HImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1206 emit_insn (gen_rtx_SET (operands[0], x));
1210 (define_insn "*altivec_vmrglh_internal"
1211 [(set (match_operand:V8HI 0 "register_operand" "=v")
1214 (match_operand:V8HI 1 "register_operand" "v")
1215 (match_operand:V8HI 2 "register_operand" "v"))
1216 (parallel [(const_int 4) (const_int 12)
1217 (const_int 5) (const_int 13)
1218 (const_int 6) (const_int 14)
1219 (const_int 7) (const_int 15)])))]
1222 if (BYTES_BIG_ENDIAN)
1223 return "vmrglh %0,%1,%2";
1225 return "vmrghh %0,%2,%1";
1227 [(set_attr "type" "vecperm")])
1229 (define_insn "altivec_vmrglh_direct"
1230 [(set (match_operand:V8HI 0 "register_operand" "=v")
1231 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1232 (match_operand:V8HI 2 "register_operand" "v")]
1233 UNSPEC_VMRGL_DIRECT))]
1236 [(set_attr "type" "vecperm")])
1238 (define_expand "altivec_vmrglw"
1239 [(use (match_operand:V4SI 0 "register_operand" ""))
1240 (use (match_operand:V4SI 1 "register_operand" ""))
1241 (use (match_operand:V4SI 2 "register_operand" ""))]
1242 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1247 /* Special handling for LE with -maltivec=be. */
1248 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1250 v = gen_rtvec (4, GEN_INT (0), GEN_INT (4), GEN_INT (1), GEN_INT (5));
1251 x = gen_rtx_VEC_CONCAT (V8SImode, operands[2], operands[1]);
1255 v = gen_rtvec (4, GEN_INT (2), GEN_INT (6), GEN_INT (3), GEN_INT (7));
1256 x = gen_rtx_VEC_CONCAT (V8SImode, operands[1], operands[2]);
1259 x = gen_rtx_VEC_SELECT (V4SImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1260 emit_insn (gen_rtx_SET (operands[0], x));
1264 (define_insn "*altivec_vmrglw_internal"
1265 [(set (match_operand:V4SI 0 "register_operand" "=v")
1268 (match_operand:V4SI 1 "register_operand" "v")
1269 (match_operand:V4SI 2 "register_operand" "v"))
1270 (parallel [(const_int 2) (const_int 6)
1271 (const_int 3) (const_int 7)])))]
1272 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1274 if (BYTES_BIG_ENDIAN)
1275 return "vmrglw %0,%1,%2";
1277 return "vmrghw %0,%2,%1";
1279 [(set_attr "type" "vecperm")])
1281 (define_insn "altivec_vmrglw_direct"
1282 [(set (match_operand:V4SI 0 "register_operand" "=v")
1283 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1284 (match_operand:V4SI 2 "register_operand" "v")]
1285 UNSPEC_VMRGL_DIRECT))]
1288 [(set_attr "type" "vecperm")])
1290 (define_insn "*altivec_vmrglsf"
1291 [(set (match_operand:V4SF 0 "register_operand" "=v")
1294 (match_operand:V4SF 1 "register_operand" "v")
1295 (match_operand:V4SF 2 "register_operand" "v"))
1296 (parallel [(const_int 2) (const_int 6)
1297 (const_int 3) (const_int 7)])))]
1298 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1300 if (BYTES_BIG_ENDIAN)
1301 return "vmrglw %0,%1,%2";
1303 return "vmrghw %0,%2,%1";
1305 [(set_attr "type" "vecperm")])
1307 ;; Power8 vector merge even/odd
1308 (define_insn "p8_vmrgew"
1309 [(set (match_operand:V4SI 0 "register_operand" "=v")
1312 (match_operand:V4SI 1 "register_operand" "v")
1313 (match_operand:V4SI 2 "register_operand" "v"))
1314 (parallel [(const_int 0) (const_int 4)
1315 (const_int 2) (const_int 6)])))]
1318 if (BYTES_BIG_ENDIAN)
1319 return "vmrgew %0,%1,%2";
1321 return "vmrgow %0,%2,%1";
1323 [(set_attr "type" "vecperm")])
1325 (define_insn "p8_vmrgow"
1326 [(set (match_operand:V4SI 0 "register_operand" "=v")
1329 (match_operand:V4SI 1 "register_operand" "v")
1330 (match_operand:V4SI 2 "register_operand" "v"))
1331 (parallel [(const_int 1) (const_int 5)
1332 (const_int 3) (const_int 7)])))]
1335 if (BYTES_BIG_ENDIAN)
1336 return "vmrgow %0,%1,%2";
1338 return "vmrgew %0,%2,%1";
1340 [(set_attr "type" "vecperm")])
1342 (define_insn "p8_vmrgew_v4sf_direct"
1343 [(set (match_operand:V4SF 0 "register_operand" "=v")
1344 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")
1345 (match_operand:V4SF 2 "register_operand" "v")]
1346 UNSPEC_VMRGEW_DIRECT))]
1349 [(set_attr "type" "vecperm")])
1351 (define_expand "vec_widen_umult_even_v16qi"
1352 [(use (match_operand:V8HI 0 "register_operand" ""))
1353 (use (match_operand:V16QI 1 "register_operand" ""))
1354 (use (match_operand:V16QI 2 "register_operand" ""))]
1357 if (VECTOR_ELT_ORDER_BIG)
1358 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1360 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1364 (define_expand "vec_widen_smult_even_v16qi"
1365 [(use (match_operand:V8HI 0 "register_operand" ""))
1366 (use (match_operand:V16QI 1 "register_operand" ""))
1367 (use (match_operand:V16QI 2 "register_operand" ""))]
1370 if (VECTOR_ELT_ORDER_BIG)
1371 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1373 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1377 (define_expand "vec_widen_umult_even_v8hi"
1378 [(use (match_operand:V4SI 0 "register_operand" ""))
1379 (use (match_operand:V8HI 1 "register_operand" ""))
1380 (use (match_operand:V8HI 2 "register_operand" ""))]
1383 if (VECTOR_ELT_ORDER_BIG)
1384 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1386 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1390 (define_expand "vec_widen_smult_even_v8hi"
1391 [(use (match_operand:V4SI 0 "register_operand" ""))
1392 (use (match_operand:V8HI 1 "register_operand" ""))
1393 (use (match_operand:V8HI 2 "register_operand" ""))]
1396 if (VECTOR_ELT_ORDER_BIG)
1397 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1399 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1403 (define_expand "vec_widen_umult_odd_v16qi"
1404 [(use (match_operand:V8HI 0 "register_operand" ""))
1405 (use (match_operand:V16QI 1 "register_operand" ""))
1406 (use (match_operand:V16QI 2 "register_operand" ""))]
1409 if (VECTOR_ELT_ORDER_BIG)
1410 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1412 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1416 (define_expand "vec_widen_smult_odd_v16qi"
1417 [(use (match_operand:V8HI 0 "register_operand" ""))
1418 (use (match_operand:V16QI 1 "register_operand" ""))
1419 (use (match_operand:V16QI 2 "register_operand" ""))]
1422 if (VECTOR_ELT_ORDER_BIG)
1423 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1425 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1429 (define_expand "vec_widen_umult_odd_v8hi"
1430 [(use (match_operand:V4SI 0 "register_operand" ""))
1431 (use (match_operand:V8HI 1 "register_operand" ""))
1432 (use (match_operand:V8HI 2 "register_operand" ""))]
1435 if (VECTOR_ELT_ORDER_BIG)
1436 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1438 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1442 (define_expand "vec_widen_smult_odd_v8hi"
1443 [(use (match_operand:V4SI 0 "register_operand" ""))
1444 (use (match_operand:V8HI 1 "register_operand" ""))
1445 (use (match_operand:V8HI 2 "register_operand" ""))]
1448 if (VECTOR_ELT_ORDER_BIG)
1449 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1451 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1455 (define_insn "altivec_vmuleub"
1456 [(set (match_operand:V8HI 0 "register_operand" "=v")
1457 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1458 (match_operand:V16QI 2 "register_operand" "v")]
1462 [(set_attr "type" "veccomplex")])
1464 (define_insn "altivec_vmuloub"
1465 [(set (match_operand:V8HI 0 "register_operand" "=v")
1466 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1467 (match_operand:V16QI 2 "register_operand" "v")]
1471 [(set_attr "type" "veccomplex")])
1473 (define_insn "altivec_vmulesb"
1474 [(set (match_operand:V8HI 0 "register_operand" "=v")
1475 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1476 (match_operand:V16QI 2 "register_operand" "v")]
1480 [(set_attr "type" "veccomplex")])
1482 (define_insn "altivec_vmulosb"
1483 [(set (match_operand:V8HI 0 "register_operand" "=v")
1484 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1485 (match_operand:V16QI 2 "register_operand" "v")]
1489 [(set_attr "type" "veccomplex")])
1491 (define_insn "altivec_vmuleuh"
1492 [(set (match_operand:V4SI 0 "register_operand" "=v")
1493 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1494 (match_operand:V8HI 2 "register_operand" "v")]
1498 [(set_attr "type" "veccomplex")])
1500 (define_insn "altivec_vmulouh"
1501 [(set (match_operand:V4SI 0 "register_operand" "=v")
1502 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1503 (match_operand:V8HI 2 "register_operand" "v")]
1507 [(set_attr "type" "veccomplex")])
1509 (define_insn "altivec_vmulesh"
1510 [(set (match_operand:V4SI 0 "register_operand" "=v")
1511 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1512 (match_operand:V8HI 2 "register_operand" "v")]
1516 [(set_attr "type" "veccomplex")])
1518 (define_insn "altivec_vmulosh"
1519 [(set (match_operand:V4SI 0 "register_operand" "=v")
1520 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1521 (match_operand:V8HI 2 "register_operand" "v")]
1525 [(set_attr "type" "veccomplex")])
1528 ;; Vector pack/unpack
1529 (define_insn "altivec_vpkpx"
1530 [(set (match_operand:V8HI 0 "register_operand" "=v")
1531 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v")
1532 (match_operand:V4SI 2 "register_operand" "v")]
1537 if (VECTOR_ELT_ORDER_BIG)
1538 return \"vpkpx %0,%1,%2\";
1540 return \"vpkpx %0,%2,%1\";
1542 [(set_attr "type" "vecperm")])
1544 (define_insn "altivec_vpks<VI_char>ss"
1545 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1546 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1547 (match_operand:VP 2 "register_operand" "v")]
1548 UNSPEC_VPACK_SIGN_SIGN_SAT))]
1552 if (VECTOR_ELT_ORDER_BIG)
1553 return \"vpks<VI_char>ss %0,%1,%2\";
1555 return \"vpks<VI_char>ss %0,%2,%1\";
1557 [(set_attr "type" "vecperm")])
1559 (define_insn "altivec_vpks<VI_char>us"
1560 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1561 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1562 (match_operand:VP 2 "register_operand" "v")]
1563 UNSPEC_VPACK_SIGN_UNS_SAT))]
1567 if (VECTOR_ELT_ORDER_BIG)
1568 return \"vpks<VI_char>us %0,%1,%2\";
1570 return \"vpks<VI_char>us %0,%2,%1\";
1572 [(set_attr "type" "vecperm")])
1574 (define_insn "altivec_vpku<VI_char>us"
1575 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1576 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1577 (match_operand:VP 2 "register_operand" "v")]
1578 UNSPEC_VPACK_UNS_UNS_SAT))]
1582 if (VECTOR_ELT_ORDER_BIG)
1583 return \"vpku<VI_char>us %0,%1,%2\";
1585 return \"vpku<VI_char>us %0,%2,%1\";
1587 [(set_attr "type" "vecperm")])
1589 (define_insn "altivec_vpku<VI_char>um"
1590 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1591 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1592 (match_operand:VP 2 "register_operand" "v")]
1593 UNSPEC_VPACK_UNS_UNS_MOD))]
1597 if (VECTOR_ELT_ORDER_BIG)
1598 return \"vpku<VI_char>um %0,%1,%2\";
1600 return \"vpku<VI_char>um %0,%2,%1\";
1602 [(set_attr "type" "vecperm")])
1604 (define_insn "altivec_vpku<VI_char>um_direct"
1605 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1606 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1607 (match_operand:VP 2 "register_operand" "v")]
1608 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT))]
1612 if (BYTES_BIG_ENDIAN)
1613 return \"vpku<VI_char>um %0,%1,%2\";
1615 return \"vpku<VI_char>um %0,%2,%1\";
1617 [(set_attr "type" "vecperm")])
1619 (define_insn "*altivec_vrl<VI_char>"
1620 [(set (match_operand:VI2 0 "register_operand" "=v")
1621 (rotate:VI2 (match_operand:VI2 1 "register_operand" "v")
1622 (match_operand:VI2 2 "register_operand" "v")))]
1624 "vrl<VI_char> %0,%1,%2"
1625 [(set_attr "type" "vecsimple")])
1627 (define_insn "altivec_vsl"
1628 [(set (match_operand:V4SI 0 "register_operand" "=v")
1629 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1630 (match_operand:V4SI 2 "register_operand" "v")]
1634 [(set_attr "type" "vecperm")])
1636 (define_insn "altivec_vslo"
1637 [(set (match_operand:V4SI 0 "register_operand" "=v")
1638 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1639 (match_operand:V4SI 2 "register_operand" "v")]
1643 [(set_attr "type" "vecperm")])
1646 [(set (match_operand:V16QI 0 "register_operand" "=v")
1647 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1648 (match_operand:V16QI 2 "register_operand" "v")]
1652 [(set_attr "type" "vecsimple")])
1655 [(set (match_operand:V16QI 0 "register_operand" "=v")
1656 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1657 (match_operand:V16QI 2 "register_operand" "v")]
1661 [(set_attr "type" "vecsimple")])
1663 (define_insn "*altivec_vsl<VI_char>"
1664 [(set (match_operand:VI2 0 "register_operand" "=v")
1665 (ashift:VI2 (match_operand:VI2 1 "register_operand" "v")
1666 (match_operand:VI2 2 "register_operand" "v")))]
1668 "vsl<VI_char> %0,%1,%2"
1669 [(set_attr "type" "vecsimple")])
1671 (define_insn "*altivec_vsr<VI_char>"
1672 [(set (match_operand:VI2 0 "register_operand" "=v")
1673 (lshiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
1674 (match_operand:VI2 2 "register_operand" "v")))]
1676 "vsr<VI_char> %0,%1,%2"
1677 [(set_attr "type" "vecsimple")])
1679 (define_insn "*altivec_vsra<VI_char>"
1680 [(set (match_operand:VI2 0 "register_operand" "=v")
1681 (ashiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
1682 (match_operand:VI2 2 "register_operand" "v")))]
1684 "vsra<VI_char> %0,%1,%2"
1685 [(set_attr "type" "vecsimple")])
1687 (define_insn "altivec_vsr"
1688 [(set (match_operand:V4SI 0 "register_operand" "=v")
1689 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1690 (match_operand:V4SI 2 "register_operand" "v")]
1694 [(set_attr "type" "vecperm")])
1696 (define_insn "altivec_vsro"
1697 [(set (match_operand:V4SI 0 "register_operand" "=v")
1698 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1699 (match_operand:V4SI 2 "register_operand" "v")]
1703 [(set_attr "type" "vecperm")])
1705 (define_insn "altivec_vsum4ubs"
1706 [(set (match_operand:V4SI 0 "register_operand" "=v")
1707 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")
1708 (match_operand:V4SI 2 "register_operand" "v")]
1710 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1713 [(set_attr "type" "veccomplex")])
1715 (define_insn "altivec_vsum4s<VI_char>s"
1716 [(set (match_operand:V4SI 0 "register_operand" "=v")
1717 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
1718 (match_operand:V4SI 2 "register_operand" "v")]
1720 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1722 "vsum4s<VI_char>s %0,%1,%2"
1723 [(set_attr "type" "veccomplex")])
1725 ;; FIXME: For the following two patterns, the scratch should only be
1726 ;; allocated for !VECTOR_ELT_ORDER_BIG, and the instructions should
1727 ;; be emitted separately.
1728 (define_insn "altivec_vsum2sws"
1729 [(set (match_operand:V4SI 0 "register_operand" "=v")
1730 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1731 (match_operand:V4SI 2 "register_operand" "v")]
1733 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))
1734 (clobber (match_scratch:V4SI 3 "=v"))]
1737 if (VECTOR_ELT_ORDER_BIG)
1738 return "vsum2sws %0,%1,%2";
1740 return "vsldoi %3,%2,%2,12\n\tvsum2sws %3,%1,%3\n\tvsldoi %0,%3,%3,4";
1742 [(set_attr "type" "veccomplex")
1743 (set (attr "length")
1745 (match_test "VECTOR_ELT_ORDER_BIG")
1747 (const_string "12")))])
1749 (define_insn "altivec_vsumsws"
1750 [(set (match_operand:V4SI 0 "register_operand" "=v")
1751 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1752 (match_operand:V4SI 2 "register_operand" "v")]
1754 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))
1755 (clobber (match_scratch:V4SI 3 "=v"))]
1758 if (VECTOR_ELT_ORDER_BIG)
1759 return "vsumsws %0,%1,%2";
1761 return "vspltw %3,%2,0\n\tvsumsws %3,%1,%3\n\tvsldoi %0,%3,%3,12";
1763 [(set_attr "type" "veccomplex")
1764 (set (attr "length")
1766 (match_test "(VECTOR_ELT_ORDER_BIG)")
1768 (const_string "12")))])
1770 (define_insn "altivec_vsumsws_direct"
1771 [(set (match_operand:V4SI 0 "register_operand" "=v")
1772 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1773 (match_operand:V4SI 2 "register_operand" "v")]
1774 UNSPEC_VSUMSWS_DIRECT))
1775 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1778 [(set_attr "type" "veccomplex")])
1780 (define_expand "altivec_vspltb"
1781 [(use (match_operand:V16QI 0 "register_operand" ""))
1782 (use (match_operand:V16QI 1 "register_operand" ""))
1783 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1789 /* Special handling for LE with -maltivec=be. We have to reflect
1790 the actual selected index for the splat in the RTL. */
1791 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1792 operands[2] = GEN_INT (15 - INTVAL (operands[2]));
1794 v = gen_rtvec (1, operands[2]);
1795 x = gen_rtx_VEC_SELECT (QImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1796 x = gen_rtx_VEC_DUPLICATE (V16QImode, x);
1797 emit_insn (gen_rtx_SET (operands[0], x));
1801 (define_insn "*altivec_vspltb_internal"
1802 [(set (match_operand:V16QI 0 "register_operand" "=v")
1803 (vec_duplicate:V16QI
1804 (vec_select:QI (match_operand:V16QI 1 "register_operand" "v")
1806 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))]
1809 /* For true LE, this adjusts the selected index. For LE with
1810 -maltivec=be, this reverses what was done in the define_expand
1811 because the instruction already has big-endian bias. */
1812 if (!BYTES_BIG_ENDIAN)
1813 operands[2] = GEN_INT (15 - INTVAL (operands[2]));
1815 return "vspltb %0,%1,%2";
1817 [(set_attr "type" "vecperm")])
1819 (define_insn "altivec_vspltb_direct"
1820 [(set (match_operand:V16QI 0 "register_operand" "=v")
1821 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1822 (match_operand:QI 2 "u5bit_cint_operand" "i")]
1823 UNSPEC_VSPLT_DIRECT))]
1826 [(set_attr "type" "vecperm")])
1828 (define_expand "altivec_vsplth"
1829 [(use (match_operand:V8HI 0 "register_operand" ""))
1830 (use (match_operand:V8HI 1 "register_operand" ""))
1831 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1837 /* Special handling for LE with -maltivec=be. We have to reflect
1838 the actual selected index for the splat in the RTL. */
1839 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1840 operands[2] = GEN_INT (7 - INTVAL (operands[2]));
1842 v = gen_rtvec (1, operands[2]);
1843 x = gen_rtx_VEC_SELECT (HImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1844 x = gen_rtx_VEC_DUPLICATE (V8HImode, x);
1845 emit_insn (gen_rtx_SET (operands[0], x));
1849 (define_insn "*altivec_vsplth_internal"
1850 [(set (match_operand:V8HI 0 "register_operand" "=v")
1852 (vec_select:HI (match_operand:V8HI 1 "register_operand" "v")
1854 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))]
1857 /* For true LE, this adjusts the selected index. For LE with
1858 -maltivec=be, this reverses what was done in the define_expand
1859 because the instruction already has big-endian bias. */
1860 if (!BYTES_BIG_ENDIAN)
1861 operands[2] = GEN_INT (7 - INTVAL (operands[2]));
1863 return "vsplth %0,%1,%2";
1865 [(set_attr "type" "vecperm")])
1867 (define_insn "altivec_vsplth_direct"
1868 [(set (match_operand:V8HI 0 "register_operand" "=v")
1869 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1870 (match_operand:QI 2 "u5bit_cint_operand" "i")]
1871 UNSPEC_VSPLT_DIRECT))]
1874 [(set_attr "type" "vecperm")])
1876 (define_expand "altivec_vspltw"
1877 [(use (match_operand:V4SI 0 "register_operand" ""))
1878 (use (match_operand:V4SI 1 "register_operand" ""))
1879 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1885 /* Special handling for LE with -maltivec=be. We have to reflect
1886 the actual selected index for the splat in the RTL. */
1887 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1888 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1890 v = gen_rtvec (1, operands[2]);
1891 x = gen_rtx_VEC_SELECT (SImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1892 x = gen_rtx_VEC_DUPLICATE (V4SImode, x);
1893 emit_insn (gen_rtx_SET (operands[0], x));
1897 (define_insn "*altivec_vspltw_internal"
1898 [(set (match_operand:V4SI 0 "register_operand" "=v")
1900 (vec_select:SI (match_operand:V4SI 1 "register_operand" "v")
1902 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))]
1905 /* For true LE, this adjusts the selected index. For LE with
1906 -maltivec=be, this reverses what was done in the define_expand
1907 because the instruction already has big-endian bias. */
1908 if (!BYTES_BIG_ENDIAN)
1909 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1911 return "vspltw %0,%1,%2";
1913 [(set_attr "type" "vecperm")])
1915 (define_insn "altivec_vspltw_direct"
1916 [(set (match_operand:V4SI 0 "register_operand" "=v")
1917 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1918 (match_operand:QI 2 "u5bit_cint_operand" "i")]
1919 UNSPEC_VSPLT_DIRECT))]
1922 [(set_attr "type" "vecperm")])
1924 (define_expand "altivec_vspltsf"
1925 [(use (match_operand:V4SF 0 "register_operand" ""))
1926 (use (match_operand:V4SF 1 "register_operand" ""))
1927 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1933 /* Special handling for LE with -maltivec=be. We have to reflect
1934 the actual selected index for the splat in the RTL. */
1935 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1936 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1938 v = gen_rtvec (1, operands[2]);
1939 x = gen_rtx_VEC_SELECT (SFmode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1940 x = gen_rtx_VEC_DUPLICATE (V4SFmode, x);
1941 emit_insn (gen_rtx_SET (operands[0], x));
1945 (define_insn "*altivec_vspltsf_internal"
1946 [(set (match_operand:V4SF 0 "register_operand" "=v")
1948 (vec_select:SF (match_operand:V4SF 1 "register_operand" "v")
1950 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))]
1951 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1953 /* For true LE, this adjusts the selected index. For LE with
1954 -maltivec=be, this reverses what was done in the define_expand
1955 because the instruction already has big-endian bias. */
1956 if (!BYTES_BIG_ENDIAN)
1957 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1959 return "vspltw %0,%1,%2";
1961 [(set_attr "type" "vecperm")])
1963 (define_insn "altivec_vspltis<VI_char>"
1964 [(set (match_operand:VI 0 "register_operand" "=v")
1966 (match_operand:QI 1 "s5bit_cint_operand" "i")))]
1968 "vspltis<VI_char> %0,%1"
1969 [(set_attr "type" "vecperm")])
1971 (define_insn "*altivec_vrfiz"
1972 [(set (match_operand:V4SF 0 "register_operand" "=v")
1973 (fix:V4SF (match_operand:V4SF 1 "register_operand" "v")))]
1974 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1976 [(set_attr "type" "vecfloat")])
1978 (define_expand "altivec_vperm_<mode>"
1979 [(set (match_operand:VM 0 "register_operand" "")
1980 (unspec:VM [(match_operand:VM 1 "register_operand" "")
1981 (match_operand:VM 2 "register_operand" "")
1982 (match_operand:V16QI 3 "register_operand" "")]
1986 if (!VECTOR_ELT_ORDER_BIG)
1988 altivec_expand_vec_perm_le (operands);
1993 ;; Slightly prefer vperm, since the target does not overlap the source
1994 (define_insn "*altivec_vperm_<mode>_internal"
1995 [(set (match_operand:VM 0 "register_operand" "=v,?wo")
1996 (unspec:VM [(match_operand:VM 1 "register_operand" "v,wo")
1997 (match_operand:VM 2 "register_operand" "v,0")
1998 (match_operand:V16QI 3 "register_operand" "v,wo")]
2004 [(set_attr "type" "vecperm")
2005 (set_attr "length" "4")])
2007 (define_insn "altivec_vperm_v8hiv16qi"
2008 [(set (match_operand:V16QI 0 "register_operand" "=v,?wo")
2009 (unspec:V16QI [(match_operand:V8HI 1 "register_operand" "v,wo")
2010 (match_operand:V8HI 2 "register_operand" "v,0")
2011 (match_operand:V16QI 3 "register_operand" "v,wo")]
2017 [(set_attr "type" "vecperm")
2018 (set_attr "length" "4")])
2020 (define_expand "altivec_vperm_<mode>_uns"
2021 [(set (match_operand:VM 0 "register_operand" "")
2022 (unspec:VM [(match_operand:VM 1 "register_operand" "")
2023 (match_operand:VM 2 "register_operand" "")
2024 (match_operand:V16QI 3 "register_operand" "")]
2028 if (!VECTOR_ELT_ORDER_BIG)
2030 altivec_expand_vec_perm_le (operands);
2035 (define_insn "*altivec_vperm_<mode>_uns_internal"
2036 [(set (match_operand:VM 0 "register_operand" "=v,?wo")
2037 (unspec:VM [(match_operand:VM 1 "register_operand" "v,wo")
2038 (match_operand:VM 2 "register_operand" "v,0")
2039 (match_operand:V16QI 3 "register_operand" "v,wo")]
2045 [(set_attr "type" "vecperm")
2046 (set_attr "length" "4")])
2048 (define_expand "vec_permv16qi"
2049 [(set (match_operand:V16QI 0 "register_operand" "")
2050 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "")
2051 (match_operand:V16QI 2 "register_operand" "")
2052 (match_operand:V16QI 3 "register_operand" "")]
2056 if (!BYTES_BIG_ENDIAN) {
2057 altivec_expand_vec_perm_le (operands);
2062 (define_expand "vec_perm_constv16qi"
2063 [(match_operand:V16QI 0 "register_operand" "")
2064 (match_operand:V16QI 1 "register_operand" "")
2065 (match_operand:V16QI 2 "register_operand" "")
2066 (match_operand:V16QI 3 "" "")]
2069 if (altivec_expand_vec_perm_const (operands))
2075 (define_insn "*altivec_vpermr_<mode>_internal"
2076 [(set (match_operand:VM 0 "register_operand" "=v,?wo")
2077 (unspec:VM [(match_operand:VM 1 "register_operand" "v,wo")
2078 (match_operand:VM 2 "register_operand" "v,0")
2079 (match_operand:V16QI 3 "register_operand" "v,wo")]
2084 xxpermr %x0,%x1,%x3"
2085 [(set_attr "type" "vecperm")
2086 (set_attr "length" "4")])
2088 (define_insn "altivec_vrfip" ; ceil
2089 [(set (match_operand:V4SF 0 "register_operand" "=v")
2090 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2094 [(set_attr "type" "vecfloat")])
2096 (define_insn "altivec_vrfin"
2097 [(set (match_operand:V4SF 0 "register_operand" "=v")
2098 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2102 [(set_attr "type" "vecfloat")])
2104 (define_insn "*altivec_vrfim" ; floor
2105 [(set (match_operand:V4SF 0 "register_operand" "=v")
2106 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2110 [(set_attr "type" "vecfloat")])
2112 (define_insn "altivec_vcfux"
2113 [(set (match_operand:V4SF 0 "register_operand" "=v")
2114 (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
2115 (match_operand:QI 2 "immediate_operand" "i")]
2119 [(set_attr "type" "vecfloat")])
2121 (define_insn "altivec_vcfsx"
2122 [(set (match_operand:V4SF 0 "register_operand" "=v")
2123 (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
2124 (match_operand:QI 2 "immediate_operand" "i")]
2128 [(set_attr "type" "vecfloat")])
2130 (define_insn "altivec_vctuxs"
2131 [(set (match_operand:V4SI 0 "register_operand" "=v")
2132 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2133 (match_operand:QI 2 "immediate_operand" "i")]
2135 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2138 [(set_attr "type" "vecfloat")])
2140 (define_insn "altivec_vctsxs"
2141 [(set (match_operand:V4SI 0 "register_operand" "=v")
2142 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2143 (match_operand:QI 2 "immediate_operand" "i")]
2145 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2148 [(set_attr "type" "vecfloat")])
2150 (define_insn "altivec_vlogefp"
2151 [(set (match_operand:V4SF 0 "register_operand" "=v")
2152 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2156 [(set_attr "type" "vecfloat")])
2158 (define_insn "altivec_vexptefp"
2159 [(set (match_operand:V4SF 0 "register_operand" "=v")
2160 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2164 [(set_attr "type" "vecfloat")])
2166 (define_insn "*altivec_vrsqrtefp"
2167 [(set (match_operand:V4SF 0 "register_operand" "=v")
2168 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2170 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2172 [(set_attr "type" "vecfloat")])
2174 (define_insn "altivec_vrefp"
2175 [(set (match_operand:V4SF 0 "register_operand" "=v")
2176 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2178 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2180 [(set_attr "type" "vecfloat")])
2182 (define_expand "altivec_copysign_v4sf3"
2183 [(use (match_operand:V4SF 0 "register_operand" ""))
2184 (use (match_operand:V4SF 1 "register_operand" ""))
2185 (use (match_operand:V4SF 2 "register_operand" ""))]
2186 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2189 rtx mask = gen_reg_rtx (V4SImode);
2190 rtvec v = rtvec_alloc (4);
2191 unsigned HOST_WIDE_INT mask_val = ((unsigned HOST_WIDE_INT)1) << 31;
2193 RTVEC_ELT (v, 0) = GEN_INT (mask_val);
2194 RTVEC_ELT (v, 1) = GEN_INT (mask_val);
2195 RTVEC_ELT (v, 2) = GEN_INT (mask_val);
2196 RTVEC_ELT (v, 3) = GEN_INT (mask_val);
2198 emit_insn (gen_vec_initv4si (mask, gen_rtx_PARALLEL (V4SImode, v)));
2199 emit_insn (gen_vector_select_v4sf (operands[0], operands[1], operands[2],
2200 gen_lowpart (V4SFmode, mask)));
2204 (define_insn "altivec_vsldoi_<mode>"
2205 [(set (match_operand:VM 0 "register_operand" "=v")
2206 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
2207 (match_operand:VM 2 "register_operand" "v")
2208 (match_operand:QI 3 "immediate_operand" "i")]
2211 "vsldoi %0,%1,%2,%3"
2212 [(set_attr "type" "vecperm")])
2214 (define_insn "altivec_vupkhs<VU_char>"
2215 [(set (match_operand:VP 0 "register_operand" "=v")
2216 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2217 UNSPEC_VUNPACK_HI_SIGN))]
2220 if (VECTOR_ELT_ORDER_BIG)
2221 return "vupkhs<VU_char> %0,%1";
2223 return "vupkls<VU_char> %0,%1";
2225 [(set_attr "type" "vecperm")])
2227 (define_insn "*altivec_vupkhs<VU_char>_direct"
2228 [(set (match_operand:VP 0 "register_operand" "=v")
2229 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2230 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2232 "vupkhs<VU_char> %0,%1"
2233 [(set_attr "type" "vecperm")])
2235 (define_insn "altivec_vupkls<VU_char>"
2236 [(set (match_operand:VP 0 "register_operand" "=v")
2237 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2238 UNSPEC_VUNPACK_LO_SIGN))]
2241 if (VECTOR_ELT_ORDER_BIG)
2242 return "vupkls<VU_char> %0,%1";
2244 return "vupkhs<VU_char> %0,%1";
2246 [(set_attr "type" "vecperm")])
2248 (define_insn "*altivec_vupkls<VU_char>_direct"
2249 [(set (match_operand:VP 0 "register_operand" "=v")
2250 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2251 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2253 "vupkls<VU_char> %0,%1"
2254 [(set_attr "type" "vecperm")])
2256 (define_insn "altivec_vupkhpx"
2257 [(set (match_operand:V4SI 0 "register_operand" "=v")
2258 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2262 if (VECTOR_ELT_ORDER_BIG)
2263 return "vupkhpx %0,%1";
2265 return "vupklpx %0,%1";
2267 [(set_attr "type" "vecperm")])
2269 (define_insn "altivec_vupklpx"
2270 [(set (match_operand:V4SI 0 "register_operand" "=v")
2271 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2275 if (VECTOR_ELT_ORDER_BIG)
2276 return "vupklpx %0,%1";
2278 return "vupkhpx %0,%1";
2280 [(set_attr "type" "vecperm")])
2282 ;; Compare vectors producing a vector result and a predicate, setting CR6 to
2283 ;; indicate a combined status
2284 (define_insn "*altivec_vcmpequ<VI_char>_p"
2285 [(set (reg:CC CR6_REGNO)
2286 (unspec:CC [(eq:CC (match_operand:VI2 1 "register_operand" "v")
2287 (match_operand:VI2 2 "register_operand" "v"))]
2289 (set (match_operand:VI2 0 "register_operand" "=v")
2290 (eq:VI2 (match_dup 1)
2293 "vcmpequ<VI_char>. %0,%1,%2"
2294 [(set_attr "type" "veccmpfx")])
2296 (define_insn "*altivec_vcmpgts<VI_char>_p"
2297 [(set (reg:CC CR6_REGNO)
2298 (unspec:CC [(gt:CC (match_operand:VI2 1 "register_operand" "v")
2299 (match_operand:VI2 2 "register_operand" "v"))]
2301 (set (match_operand:VI2 0 "register_operand" "=v")
2302 (gt:VI2 (match_dup 1)
2305 "vcmpgts<VI_char>. %0,%1,%2"
2306 [(set_attr "type" "veccmpfx")])
2308 (define_insn "*altivec_vcmpgtu<VI_char>_p"
2309 [(set (reg:CC CR6_REGNO)
2310 (unspec:CC [(gtu:CC (match_operand:VI2 1 "register_operand" "v")
2311 (match_operand:VI2 2 "register_operand" "v"))]
2313 (set (match_operand:VI2 0 "register_operand" "=v")
2314 (gtu:VI2 (match_dup 1)
2317 "vcmpgtu<VI_char>. %0,%1,%2"
2318 [(set_attr "type" "veccmpfx")])
2320 (define_insn "*altivec_vcmpeqfp_p"
2321 [(set (reg:CC CR6_REGNO)
2322 (unspec:CC [(eq:CC (match_operand:V4SF 1 "register_operand" "v")
2323 (match_operand:V4SF 2 "register_operand" "v"))]
2325 (set (match_operand:V4SF 0 "register_operand" "=v")
2326 (eq:V4SF (match_dup 1)
2328 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2329 "vcmpeqfp. %0,%1,%2"
2330 [(set_attr "type" "veccmp")])
2332 (define_insn "*altivec_vcmpgtfp_p"
2333 [(set (reg:CC CR6_REGNO)
2334 (unspec:CC [(gt:CC (match_operand:V4SF 1 "register_operand" "v")
2335 (match_operand:V4SF 2 "register_operand" "v"))]
2337 (set (match_operand:V4SF 0 "register_operand" "=v")
2338 (gt:V4SF (match_dup 1)
2340 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2341 "vcmpgtfp. %0,%1,%2"
2342 [(set_attr "type" "veccmp")])
2344 (define_insn "*altivec_vcmpgefp_p"
2345 [(set (reg:CC CR6_REGNO)
2346 (unspec:CC [(ge:CC (match_operand:V4SF 1 "register_operand" "v")
2347 (match_operand:V4SF 2 "register_operand" "v"))]
2349 (set (match_operand:V4SF 0 "register_operand" "=v")
2350 (ge:V4SF (match_dup 1)
2352 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2353 "vcmpgefp. %0,%1,%2"
2354 [(set_attr "type" "veccmp")])
2356 (define_insn "altivec_vcmpbfp_p"
2357 [(set (reg:CC CR6_REGNO)
2358 (unspec:CC [(match_operand:V4SF 1 "register_operand" "v")
2359 (match_operand:V4SF 2 "register_operand" "v")]
2361 (set (match_operand:V4SF 0 "register_operand" "=v")
2362 (unspec:V4SF [(match_dup 1)
2365 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode)"
2367 [(set_attr "type" "veccmp")])
2369 (define_insn "altivec_mtvscr"
2370 [(set (reg:SI VSCR_REGNO)
2372 [(match_operand:V4SI 0 "register_operand" "v")] UNSPECV_MTVSCR))]
2375 [(set_attr "type" "vecsimple")])
2377 (define_insn "altivec_mfvscr"
2378 [(set (match_operand:V8HI 0 "register_operand" "=v")
2379 (unspec_volatile:V8HI [(reg:SI VSCR_REGNO)] UNSPECV_MFVSCR))]
2382 [(set_attr "type" "vecsimple")])
2384 (define_insn "altivec_dssall"
2385 [(unspec_volatile [(const_int 0)] UNSPECV_DSSALL)]
2388 [(set_attr "type" "vecsimple")])
2390 (define_insn "altivec_dss"
2391 [(unspec_volatile [(match_operand:QI 0 "immediate_operand" "i")]
2395 [(set_attr "type" "vecsimple")])
2397 (define_insn "altivec_dst"
2398 [(unspec [(match_operand 0 "register_operand" "b")
2399 (match_operand:SI 1 "register_operand" "r")
2400 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DST)]
2401 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2403 [(set_attr "type" "vecsimple")])
2405 (define_insn "altivec_dstt"
2406 [(unspec [(match_operand 0 "register_operand" "b")
2407 (match_operand:SI 1 "register_operand" "r")
2408 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTT)]
2409 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2411 [(set_attr "type" "vecsimple")])
2413 (define_insn "altivec_dstst"
2414 [(unspec [(match_operand 0 "register_operand" "b")
2415 (match_operand:SI 1 "register_operand" "r")
2416 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTST)]
2417 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2419 [(set_attr "type" "vecsimple")])
2421 (define_insn "altivec_dststt"
2422 [(unspec [(match_operand 0 "register_operand" "b")
2423 (match_operand:SI 1 "register_operand" "r")
2424 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTSTT)]
2425 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2427 [(set_attr "type" "vecsimple")])
2429 (define_expand "altivec_lvsl"
2430 [(use (match_operand:V16QI 0 "register_operand" ""))
2431 (use (match_operand:V16QI 1 "memory_operand" ""))]
2434 if (VECTOR_ELT_ORDER_BIG)
2435 emit_insn (gen_altivec_lvsl_direct (operands[0], operands[1]));
2439 rtx mask, perm[16], constv, vperm;
2440 mask = gen_reg_rtx (V16QImode);
2441 emit_insn (gen_altivec_lvsl_direct (mask, operands[1]));
2442 for (i = 0; i < 16; ++i)
2443 perm[i] = GEN_INT (i);
2444 constv = gen_rtx_CONST_VECTOR (V16QImode, gen_rtvec_v (16, perm));
2445 constv = force_reg (V16QImode, constv);
2446 vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2448 emit_insn (gen_rtx_SET (operands[0], vperm));
2453 (define_insn "altivec_lvsl_direct"
2454 [(set (match_operand:V16QI 0 "register_operand" "=v")
2455 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2459 [(set_attr "type" "vecload")])
2461 (define_expand "altivec_lvsr"
2462 [(use (match_operand:V16QI 0 "register_operand" ""))
2463 (use (match_operand:V16QI 1 "memory_operand" ""))]
2466 if (VECTOR_ELT_ORDER_BIG)
2467 emit_insn (gen_altivec_lvsr_direct (operands[0], operands[1]));
2471 rtx mask, perm[16], constv, vperm;
2472 mask = gen_reg_rtx (V16QImode);
2473 emit_insn (gen_altivec_lvsr_direct (mask, operands[1]));
2474 for (i = 0; i < 16; ++i)
2475 perm[i] = GEN_INT (i);
2476 constv = gen_rtx_CONST_VECTOR (V16QImode, gen_rtvec_v (16, perm));
2477 constv = force_reg (V16QImode, constv);
2478 vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2480 emit_insn (gen_rtx_SET (operands[0], vperm));
2485 (define_insn "altivec_lvsr_direct"
2486 [(set (match_operand:V16QI 0 "register_operand" "=v")
2487 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2491 [(set_attr "type" "vecload")])
2493 (define_expand "build_vector_mask_for_load"
2494 [(set (match_operand:V16QI 0 "register_operand" "")
2495 (unspec:V16QI [(match_operand 1 "memory_operand" "")] UNSPEC_LVSR))]
2502 gcc_assert (GET_CODE (operands[1]) == MEM);
2504 addr = XEXP (operands[1], 0);
2505 temp = gen_reg_rtx (GET_MODE (addr));
2506 emit_insn (gen_rtx_SET (temp, gen_rtx_NEG (GET_MODE (addr), addr)));
2507 emit_insn (gen_altivec_lvsr (operands[0],
2508 replace_equiv_address (operands[1], temp)));
2512 ;; Parallel some of the LVE* and STV*'s with unspecs because some have
2513 ;; identical rtl but different instructions-- and gcc gets confused.
2515 (define_expand "altivec_lve<VI_char>x"
2517 [(set (match_operand:VI 0 "register_operand" "=v")
2518 (match_operand:VI 1 "memory_operand" "Z"))
2519 (unspec [(const_int 0)] UNSPEC_LVE)])]
2522 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2524 altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_LVE);
2529 (define_insn "*altivec_lve<VI_char>x_internal"
2531 [(set (match_operand:VI 0 "register_operand" "=v")
2532 (match_operand:VI 1 "memory_operand" "Z"))
2533 (unspec [(const_int 0)] UNSPEC_LVE)])]
2535 "lve<VI_char>x %0,%y1"
2536 [(set_attr "type" "vecload")])
2538 (define_insn "*altivec_lvesfx"
2540 [(set (match_operand:V4SF 0 "register_operand" "=v")
2541 (match_operand:V4SF 1 "memory_operand" "Z"))
2542 (unspec [(const_int 0)] UNSPEC_LVE)])]
2545 [(set_attr "type" "vecload")])
2547 (define_expand "altivec_lvxl_<mode>"
2549 [(set (match_operand:VM2 0 "register_operand" "=v")
2550 (match_operand:VM2 1 "memory_operand" "Z"))
2551 (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
2554 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2556 altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_SET_VSCR);
2561 (define_insn "*altivec_lvxl_<mode>_internal"
2563 [(set (match_operand:VM2 0 "register_operand" "=v")
2564 (match_operand:VM2 1 "memory_operand" "Z"))
2565 (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
2568 [(set_attr "type" "vecload")])
2570 ; This version of lvx is used only in cases where we need to force an lvx
2571 ; over any other load, and we don't care about losing CSE opportunities.
2572 ; Its primary use is for prologue register saves.
2573 (define_insn "altivec_lvx_<mode>_internal"
2575 [(set (match_operand:VM2 0 "register_operand" "=v")
2576 (match_operand:VM2 1 "memory_operand" "Z"))
2577 (unspec [(const_int 0)] UNSPEC_LVX)])]
2580 [(set_attr "type" "vecload")])
2582 ; The next two patterns embody what lvx should usually look like.
2583 (define_insn "altivec_lvx_<mode>_2op"
2584 [(set (match_operand:VM2 0 "register_operand" "=v")
2585 (mem:VM2 (and:DI (plus:DI (match_operand:DI 1 "register_operand" "b")
2586 (match_operand:DI 2 "register_operand" "r"))
2588 "TARGET_ALTIVEC && TARGET_64BIT"
2590 [(set_attr "type" "vecload")])
2592 (define_insn "altivec_lvx_<mode>_1op"
2593 [(set (match_operand:VM2 0 "register_operand" "=v")
2594 (mem:VM2 (and:DI (match_operand:DI 1 "register_operand" "r")
2596 "TARGET_ALTIVEC && TARGET_64BIT"
2598 [(set_attr "type" "vecload")])
2600 ; 32-bit versions of the above.
2601 (define_insn "altivec_lvx_<mode>_2op_si"
2602 [(set (match_operand:VM2 0 "register_operand" "=v")
2603 (mem:VM2 (and:SI (plus:SI (match_operand:SI 1 "register_operand" "b")
2604 (match_operand:SI 2 "register_operand" "r"))
2606 "TARGET_ALTIVEC && TARGET_32BIT"
2608 [(set_attr "type" "vecload")])
2610 (define_insn "altivec_lvx_<mode>_1op_si"
2611 [(set (match_operand:VM2 0 "register_operand" "=v")
2612 (mem:VM2 (and:SI (match_operand:SI 1 "register_operand" "r")
2614 "TARGET_ALTIVEC && TARGET_32BIT"
2616 [(set_attr "type" "vecload")])
2618 ; This version of stvx is used only in cases where we need to force an stvx
2619 ; over any other store, and we don't care about losing CSE opportunities.
2620 ; Its primary use is for epilogue register restores.
2621 (define_insn "altivec_stvx_<mode>_internal"
2623 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2624 (match_operand:VM2 1 "register_operand" "v"))
2625 (unspec [(const_int 0)] UNSPEC_STVX)])]
2628 [(set_attr "type" "vecstore")])
2630 ; The next two patterns embody what stvx should usually look like.
2631 (define_insn "altivec_stvx_<mode>_2op"
2632 [(set (mem:VM2 (and:DI (plus:DI (match_operand:DI 1 "register_operand" "b")
2633 (match_operand:DI 2 "register_operand" "r"))
2635 (match_operand:VM2 0 "register_operand" "v"))]
2636 "TARGET_ALTIVEC && TARGET_64BIT"
2638 [(set_attr "type" "vecstore")])
2640 (define_insn "altivec_stvx_<mode>_1op"
2641 [(set (mem:VM2 (and:DI (match_operand:DI 1 "register_operand" "r")
2643 (match_operand:VM2 0 "register_operand" "v"))]
2644 "TARGET_ALTIVEC && TARGET_64BIT"
2646 [(set_attr "type" "vecstore")])
2648 ; 32-bit versions of the above.
2649 (define_insn "altivec_stvx_<mode>_2op_si"
2650 [(set (mem:VM2 (and:SI (plus:SI (match_operand:SI 1 "register_operand" "b")
2651 (match_operand:SI 2 "register_operand" "r"))
2653 (match_operand:VM2 0 "register_operand" "v"))]
2654 "TARGET_ALTIVEC && TARGET_32BIT"
2656 [(set_attr "type" "vecstore")])
2658 (define_insn "altivec_stvx_<mode>_1op_si"
2659 [(set (mem:VM2 (and:SI (match_operand:SI 1 "register_operand" "r")
2661 (match_operand:VM2 0 "register_operand" "v"))]
2662 "TARGET_ALTIVEC && TARGET_32BIT"
2664 [(set_attr "type" "vecstore")])
2666 (define_expand "altivec_stvxl_<mode>"
2668 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2669 (match_operand:VM2 1 "register_operand" "v"))
2670 (unspec [(const_int 0)] UNSPEC_STVXL)])]
2673 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2675 altivec_expand_stvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVXL);
2680 (define_insn "*altivec_stvxl_<mode>_internal"
2682 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2683 (match_operand:VM2 1 "register_operand" "v"))
2684 (unspec [(const_int 0)] UNSPEC_STVXL)])]
2687 [(set_attr "type" "vecstore")])
2689 (define_expand "altivec_stve<VI_char>x"
2690 [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
2691 (unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
2694 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2696 altivec_expand_stvex_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVE);
2701 (define_insn "*altivec_stve<VI_char>x_internal"
2702 [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
2703 (unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
2705 "stve<VI_char>x %1,%y0"
2706 [(set_attr "type" "vecstore")])
2708 (define_insn "*altivec_stvesfx"
2709 [(set (match_operand:SF 0 "memory_operand" "=Z")
2710 (unspec:SF [(match_operand:V4SF 1 "register_operand" "v")] UNSPEC_STVE))]
2713 [(set_attr "type" "vecstore")])
2716 ;; xxlxor/vxor SCRATCH0,SCRATCH0,SCRATCH0
2717 ;; vsubu?m SCRATCH2,SCRATCH1,%1
2718 ;; vmaxs? %0,%1,SCRATCH2"
2719 (define_expand "abs<mode>2"
2720 [(set (match_dup 2) (match_dup 3))
2722 (minus:VI2 (match_dup 2)
2723 (match_operand:VI2 1 "register_operand" "v")))
2724 (set (match_operand:VI2 0 "register_operand" "=v")
2725 (smax:VI2 (match_dup 1) (match_dup 4)))]
2728 int i, n_elt = GET_MODE_NUNITS (<MODE>mode);
2729 rtvec v = rtvec_alloc (n_elt);
2731 /* Create an all 0 constant. */
2732 for (i = 0; i < n_elt; ++i)
2733 RTVEC_ELT (v, i) = const0_rtx;
2735 operands[2] = gen_reg_rtx (<MODE>mode);
2736 operands[3] = gen_rtx_CONST_VECTOR (<MODE>mode, v);
2737 operands[4] = gen_reg_rtx (<MODE>mode);
2741 ;; vspltisw SCRATCH1,-1
2742 ;; vslw SCRATCH2,SCRATCH1,SCRATCH1
2743 ;; vandc %0,%1,SCRATCH2
2744 (define_expand "altivec_absv4sf2"
2746 (vec_duplicate:V4SI (const_int -1)))
2748 (ashift:V4SI (match_dup 2) (match_dup 2)))
2749 (set (match_operand:V4SF 0 "register_operand" "=v")
2750 (and:V4SF (not:V4SF (subreg:V4SF (match_dup 3) 0))
2751 (match_operand:V4SF 1 "register_operand" "v")))]
2754 operands[2] = gen_reg_rtx (V4SImode);
2755 operands[3] = gen_reg_rtx (V4SImode);
2759 ;; vspltis? SCRATCH0,0
2760 ;; vsubs?s SCRATCH2,SCRATCH1,%1
2761 ;; vmaxs? %0,%1,SCRATCH2"
2762 (define_expand "altivec_abss_<mode>"
2763 [(set (match_dup 2) (vec_duplicate:VI (const_int 0)))
2764 (parallel [(set (match_dup 3)
2765 (unspec:VI [(match_dup 2)
2766 (match_operand:VI 1 "register_operand" "v")]
2768 (set (reg:SI VSCR_REGNO)
2769 (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))])
2770 (set (match_operand:VI 0 "register_operand" "=v")
2771 (smax:VI (match_dup 1) (match_dup 3)))]
2774 operands[2] = gen_reg_rtx (GET_MODE (operands[0]));
2775 operands[3] = gen_reg_rtx (GET_MODE (operands[0]));
2778 (define_expand "reduc_plus_scal_<mode>"
2779 [(set (match_operand:<VI_scalar> 0 "register_operand" "=v")
2780 (unspec:VIshort [(match_operand:VIshort 1 "register_operand" "v")]
2781 UNSPEC_REDUC_PLUS))]
2784 rtx vzero = gen_reg_rtx (V4SImode);
2785 rtx vtmp1 = gen_reg_rtx (V4SImode);
2786 rtx vtmp2 = gen_reg_rtx (<MODE>mode);
2787 rtx dest = gen_lowpart (V4SImode, vtmp2);
2788 int elt = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (<MODE>mode) - 1 : 0;
2790 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2791 emit_insn (gen_altivec_vsum4s<VI_char>s (vtmp1, operands[1], vzero));
2792 emit_insn (gen_altivec_vsumsws_direct (dest, vtmp1, vzero));
2793 rs6000_expand_vector_extract (operands[0], vtmp2, GEN_INT (elt));
2797 (define_insn "*p9_neg<mode>2"
2798 [(set (match_operand:VNEG 0 "altivec_register_operand" "=v")
2799 (neg:VNEG (match_operand:VNEG 1 "altivec_register_operand" "v")))]
2801 "vneg<VI_char> %0,%1"
2802 [(set_attr "type" "vecsimple")])
2804 (define_expand "neg<mode>2"
2805 [(set (match_operand:VI2 0 "register_operand" "")
2806 (neg:VI2 (match_operand:VI2 1 "register_operand" "")))]
2809 if (!TARGET_P9_VECTOR || (<MODE>mode != V4SImode && <MODE>mode != V2DImode))
2813 vzero = gen_reg_rtx (GET_MODE (operands[0]));
2814 emit_move_insn (vzero, CONST0_RTX (<MODE>mode));
2815 emit_insn (gen_sub<mode>3 (operands[0], vzero, operands[1]));
2820 (define_expand "udot_prod<mode>"
2821 [(set (match_operand:V4SI 0 "register_operand" "=v")
2822 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
2823 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
2824 (match_operand:VIshort 2 "register_operand" "v")]
2829 emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], operands[2], operands[3]));
2833 (define_expand "sdot_prodv8hi"
2834 [(set (match_operand:V4SI 0 "register_operand" "=v")
2835 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
2836 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
2837 (match_operand:V8HI 2 "register_operand" "v")]
2842 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], operands[2], operands[3]));
2846 (define_expand "widen_usum<mode>3"
2847 [(set (match_operand:V4SI 0 "register_operand" "=v")
2848 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2849 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")]
2854 rtx vones = gen_reg_rtx (GET_MODE (operands[1]));
2856 emit_insn (gen_altivec_vspltis<VI_char> (vones, const1_rtx));
2857 emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], vones, operands[2]));
2861 (define_expand "widen_ssumv16qi3"
2862 [(set (match_operand:V4SI 0 "register_operand" "=v")
2863 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2864 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")]
2869 rtx vones = gen_reg_rtx (V16QImode);
2871 emit_insn (gen_altivec_vspltisb (vones, const1_rtx));
2872 emit_insn (gen_altivec_vmsummbm (operands[0], operands[1], vones, operands[2]));
2876 (define_expand "widen_ssumv8hi3"
2877 [(set (match_operand:V4SI 0 "register_operand" "=v")
2878 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2879 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2884 rtx vones = gen_reg_rtx (V8HImode);
2886 emit_insn (gen_altivec_vspltish (vones, const1_rtx));
2887 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], vones, operands[2]));
2891 (define_expand "vec_unpacks_hi_<VP_small_lc>"
2892 [(set (match_operand:VP 0 "register_operand" "=v")
2893 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2894 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2898 (define_expand "vec_unpacks_lo_<VP_small_lc>"
2899 [(set (match_operand:VP 0 "register_operand" "=v")
2900 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2901 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2905 (define_insn "vperm_v8hiv4si"
2906 [(set (match_operand:V4SI 0 "register_operand" "=v,?wo")
2907 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v,wo")
2908 (match_operand:V4SI 2 "register_operand" "v,0")
2909 (match_operand:V16QI 3 "register_operand" "v,wo")]
2915 [(set_attr "type" "vecperm")
2916 (set_attr "length" "4")])
2918 (define_insn "vperm_v16qiv8hi"
2919 [(set (match_operand:V8HI 0 "register_operand" "=v,?wo")
2920 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v,wo")
2921 (match_operand:V8HI 2 "register_operand" "v,0")
2922 (match_operand:V16QI 3 "register_operand" "v,wo")]
2928 [(set_attr "type" "vecperm")
2929 (set_attr "length" "4")])
2932 (define_expand "vec_unpacku_hi_v16qi"
2933 [(set (match_operand:V8HI 0 "register_operand" "=v")
2934 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
2939 rtx vzero = gen_reg_rtx (V8HImode);
2940 rtx mask = gen_reg_rtx (V16QImode);
2941 rtvec v = rtvec_alloc (16);
2942 bool be = BYTES_BIG_ENDIAN;
2944 emit_insn (gen_altivec_vspltish (vzero, const0_rtx));
2946 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 7);
2947 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 0 : 16);
2948 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 16 : 6);
2949 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 1 : 16);
2950 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 5);
2951 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 2 : 16);
2952 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 16 : 4);
2953 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 3 : 16);
2954 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 3);
2955 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 4 : 16);
2956 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 16 : 2);
2957 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 5 : 16);
2958 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 1);
2959 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 6 : 16);
2960 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 16 : 0);
2961 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 7 : 16);
2963 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2964 emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask));
2968 (define_expand "vec_unpacku_hi_v8hi"
2969 [(set (match_operand:V4SI 0 "register_operand" "=v")
2970 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2975 rtx vzero = gen_reg_rtx (V4SImode);
2976 rtx mask = gen_reg_rtx (V16QImode);
2977 rtvec v = rtvec_alloc (16);
2978 bool be = BYTES_BIG_ENDIAN;
2980 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2982 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 7);
2983 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 17 : 6);
2984 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 0 : 17);
2985 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 1 : 16);
2986 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 5);
2987 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 17 : 4);
2988 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 2 : 17);
2989 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 3 : 16);
2990 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 3);
2991 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 17 : 2);
2992 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 4 : 17);
2993 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 5 : 16);
2994 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 1);
2995 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 17 : 0);
2996 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 6 : 17);
2997 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 7 : 16);
2999 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3000 emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask));
3004 (define_expand "vec_unpacku_lo_v16qi"
3005 [(set (match_operand:V8HI 0 "register_operand" "=v")
3006 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
3011 rtx vzero = gen_reg_rtx (V8HImode);
3012 rtx mask = gen_reg_rtx (V16QImode);
3013 rtvec v = rtvec_alloc (16);
3014 bool be = BYTES_BIG_ENDIAN;
3016 emit_insn (gen_altivec_vspltish (vzero, const0_rtx));
3018 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 15);
3019 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 8 : 16);
3020 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 16 : 14);
3021 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 9 : 16);
3022 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 13);
3023 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 10 : 16);
3024 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 16 : 12);
3025 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 11 : 16);
3026 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 11);
3027 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 12 : 16);
3028 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 16 : 10);
3029 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 13 : 16);
3030 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 9);
3031 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 14 : 16);
3032 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 16 : 8);
3033 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 15 : 16);
3035 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3036 emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask));
3040 (define_expand "vec_unpacku_lo_v8hi"
3041 [(set (match_operand:V4SI 0 "register_operand" "=v")
3042 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
3047 rtx vzero = gen_reg_rtx (V4SImode);
3048 rtx mask = gen_reg_rtx (V16QImode);
3049 rtvec v = rtvec_alloc (16);
3050 bool be = BYTES_BIG_ENDIAN;
3052 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
3054 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 15);
3055 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 17 : 14);
3056 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 8 : 17);
3057 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 9 : 16);
3058 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 13);
3059 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 17 : 12);
3060 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 10 : 17);
3061 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 11 : 16);
3062 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 11);
3063 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 17 : 10);
3064 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 12 : 17);
3065 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 13 : 16);
3066 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 9);
3067 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 17 : 8);
3068 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 14 : 17);
3069 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 15 : 16);
3071 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3072 emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask));
3076 (define_expand "vec_widen_umult_hi_v16qi"
3077 [(set (match_operand:V8HI 0 "register_operand" "=v")
3078 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3079 (match_operand:V16QI 2 "register_operand" "v")]
3084 rtx ve = gen_reg_rtx (V8HImode);
3085 rtx vo = gen_reg_rtx (V8HImode);
3087 if (BYTES_BIG_ENDIAN)
3089 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
3090 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
3091 emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
3095 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3096 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3097 emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
3102 (define_expand "vec_widen_umult_lo_v16qi"
3103 [(set (match_operand:V8HI 0 "register_operand" "=v")
3104 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3105 (match_operand:V16QI 2 "register_operand" "v")]
3110 rtx ve = gen_reg_rtx (V8HImode);
3111 rtx vo = gen_reg_rtx (V8HImode);
3113 if (BYTES_BIG_ENDIAN)
3115 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
3116 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
3117 emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
3121 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3122 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3123 emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
3128 (define_expand "vec_widen_smult_hi_v16qi"
3129 [(set (match_operand:V8HI 0 "register_operand" "=v")
3130 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3131 (match_operand:V16QI 2 "register_operand" "v")]
3136 rtx ve = gen_reg_rtx (V8HImode);
3137 rtx vo = gen_reg_rtx (V8HImode);
3139 if (BYTES_BIG_ENDIAN)
3141 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3142 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3143 emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
3147 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3148 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3149 emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
3154 (define_expand "vec_widen_smult_lo_v16qi"
3155 [(set (match_operand:V8HI 0 "register_operand" "=v")
3156 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3157 (match_operand:V16QI 2 "register_operand" "v")]
3162 rtx ve = gen_reg_rtx (V8HImode);
3163 rtx vo = gen_reg_rtx (V8HImode);
3165 if (BYTES_BIG_ENDIAN)
3167 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3168 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3169 emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
3173 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3174 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3175 emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
3180 (define_expand "vec_widen_umult_hi_v8hi"
3181 [(set (match_operand:V4SI 0 "register_operand" "=v")
3182 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3183 (match_operand:V8HI 2 "register_operand" "v")]
3188 rtx ve = gen_reg_rtx (V4SImode);
3189 rtx vo = gen_reg_rtx (V4SImode);
3191 if (BYTES_BIG_ENDIAN)
3193 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3194 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3195 emit_insn (gen_altivec_vmrghw_direct (operands[0], ve, vo));
3199 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3200 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3201 emit_insn (gen_altivec_vmrghw_direct (operands[0], vo, ve));
3206 (define_expand "vec_widen_umult_lo_v8hi"
3207 [(set (match_operand:V4SI 0 "register_operand" "=v")
3208 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3209 (match_operand:V8HI 2 "register_operand" "v")]
3214 rtx ve = gen_reg_rtx (V4SImode);
3215 rtx vo = gen_reg_rtx (V4SImode);
3217 if (BYTES_BIG_ENDIAN)
3219 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3220 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3221 emit_insn (gen_altivec_vmrglw_direct (operands[0], ve, vo));
3225 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3226 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3227 emit_insn (gen_altivec_vmrglw_direct (operands[0], vo, ve));
3232 (define_expand "vec_widen_smult_hi_v8hi"
3233 [(set (match_operand:V4SI 0 "register_operand" "=v")
3234 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3235 (match_operand:V8HI 2 "register_operand" "v")]
3240 rtx ve = gen_reg_rtx (V4SImode);
3241 rtx vo = gen_reg_rtx (V4SImode);
3243 if (BYTES_BIG_ENDIAN)
3245 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3246 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3247 emit_insn (gen_altivec_vmrghw_direct (operands[0], ve, vo));
3251 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3252 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3253 emit_insn (gen_altivec_vmrghw_direct (operands[0], vo, ve));
3258 (define_expand "vec_widen_smult_lo_v8hi"
3259 [(set (match_operand:V4SI 0 "register_operand" "=v")
3260 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3261 (match_operand:V8HI 2 "register_operand" "v")]
3266 rtx ve = gen_reg_rtx (V4SImode);
3267 rtx vo = gen_reg_rtx (V4SImode);
3269 if (BYTES_BIG_ENDIAN)
3271 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3272 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3273 emit_insn (gen_altivec_vmrglw_direct (operands[0], ve, vo));
3277 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3278 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3279 emit_insn (gen_altivec_vmrglw_direct (operands[0], vo, ve));
3284 (define_expand "vec_pack_trunc_<mode>"
3285 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
3286 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
3287 (match_operand:VP 2 "register_operand" "v")]
3288 UNSPEC_VPACK_UNS_UNS_MOD))]
3292 (define_expand "mulv16qi3"
3293 [(set (match_operand:V16QI 0 "register_operand" "=v")
3294 (mult:V16QI (match_operand:V16QI 1 "register_operand" "v")
3295 (match_operand:V16QI 2 "register_operand" "v")))]
3299 rtx even = gen_reg_rtx (V8HImode);
3300 rtx odd = gen_reg_rtx (V8HImode);
3301 rtx mask = gen_reg_rtx (V16QImode);
3302 rtvec v = rtvec_alloc (16);
3305 for (i = 0; i < 8; ++i) {
3306 RTVEC_ELT (v, 2 * i)
3307 = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 1 : 31 - 2 * i);
3308 RTVEC_ELT (v, 2 * i + 1)
3309 = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 17 : 15 - 2 * i);
3312 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3313 emit_insn (gen_altivec_vmulesb (even, operands[1], operands[2]));
3314 emit_insn (gen_altivec_vmulosb (odd, operands[1], operands[2]));
3315 emit_insn (gen_altivec_vperm_v8hiv16qi (operands[0], even, odd, mask));
3319 (define_expand "altivec_negv4sf2"
3320 [(use (match_operand:V4SF 0 "register_operand" ""))
3321 (use (match_operand:V4SF 1 "register_operand" ""))]
3327 /* Generate [-0.0, -0.0, -0.0, -0.0]. */
3328 neg0 = gen_reg_rtx (V4SImode);
3329 emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
3330 emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
3333 emit_insn (gen_xorv4sf3 (operands[0],
3334 gen_lowpart (V4SFmode, neg0), operands[1]));
3339 ;; Vector SIMD PEM v2.06c defines LVLX, LVLXL, LVRX, LVRXL,
3340 ;; STVLX, STVLXL, STVVRX, STVRXL are available only on Cell.
3341 (define_insn "altivec_lvlx"
3342 [(set (match_operand:V16QI 0 "register_operand" "=v")
3343 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3345 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3347 [(set_attr "type" "vecload")])
3349 (define_insn "altivec_lvlxl"
3350 [(set (match_operand:V16QI 0 "register_operand" "=v")
3351 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3353 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3355 [(set_attr "type" "vecload")])
3357 (define_insn "altivec_lvrx"
3358 [(set (match_operand:V16QI 0 "register_operand" "=v")
3359 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3361 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3363 [(set_attr "type" "vecload")])
3365 (define_insn "altivec_lvrxl"
3366 [(set (match_operand:V16QI 0 "register_operand" "=v")
3367 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3369 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3371 [(set_attr "type" "vecload")])
3373 (define_insn "altivec_stvlx"
3375 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3376 (match_operand:V16QI 1 "register_operand" "v"))
3377 (unspec [(const_int 0)] UNSPEC_STVLX)])]
3378 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3380 [(set_attr "type" "vecstore")])
3382 (define_insn "altivec_stvlxl"
3384 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3385 (match_operand:V16QI 1 "register_operand" "v"))
3386 (unspec [(const_int 0)] UNSPEC_STVLXL)])]
3387 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3389 [(set_attr "type" "vecstore")])
3391 (define_insn "altivec_stvrx"
3393 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3394 (match_operand:V16QI 1 "register_operand" "v"))
3395 (unspec [(const_int 0)] UNSPEC_STVRX)])]
3396 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3398 [(set_attr "type" "vecstore")])
3400 (define_insn "altivec_stvrxl"
3402 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3403 (match_operand:V16QI 1 "register_operand" "v"))
3404 (unspec [(const_int 0)] UNSPEC_STVRXL)])]
3405 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3407 [(set_attr "type" "vecstore")])
3409 (define_expand "vec_unpacks_float_hi_v8hi"
3410 [(set (match_operand:V4SF 0 "register_operand" "")
3411 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3412 UNSPEC_VUPKHS_V4SF))]
3416 rtx tmp = gen_reg_rtx (V4SImode);
3418 emit_insn (gen_vec_unpacks_hi_v8hi (tmp, operands[1]));
3419 emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
3423 (define_expand "vec_unpacks_float_lo_v8hi"
3424 [(set (match_operand:V4SF 0 "register_operand" "")
3425 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3426 UNSPEC_VUPKLS_V4SF))]
3430 rtx tmp = gen_reg_rtx (V4SImode);
3432 emit_insn (gen_vec_unpacks_lo_v8hi (tmp, operands[1]));
3433 emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
3437 (define_expand "vec_unpacku_float_hi_v8hi"
3438 [(set (match_operand:V4SF 0 "register_operand" "")
3439 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3440 UNSPEC_VUPKHU_V4SF))]
3444 rtx tmp = gen_reg_rtx (V4SImode);
3446 emit_insn (gen_vec_unpacku_hi_v8hi (tmp, operands[1]));
3447 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
3451 (define_expand "vec_unpacku_float_lo_v8hi"
3452 [(set (match_operand:V4SF 0 "register_operand" "")
3453 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3454 UNSPEC_VUPKLU_V4SF))]
3458 rtx tmp = gen_reg_rtx (V4SImode);
3460 emit_insn (gen_vec_unpacku_lo_v8hi (tmp, operands[1]));
3461 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
3466 ;; Power8/power9 vector instructions encoded as Altivec instructions
3468 ;; Vector count leading zeros
3469 (define_insn "*p8v_clz<mode>2"
3470 [(set (match_operand:VI2 0 "register_operand" "=v")
3471 (clz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
3474 [(set_attr "length" "4")
3475 (set_attr "type" "vecsimple")])
3477 ;; Vector absolute difference unsigned
3478 (define_expand "vadu<mode>3"
3479 [(set (match_operand:VI 0 "register_operand")
3480 (unspec:VI [(match_operand:VI 1 "register_operand")
3481 (match_operand:VI 2 "register_operand")]
3485 ;; Vector absolute difference unsigned
3486 (define_insn "*p9_vadu<mode>3"
3487 [(set (match_operand:VI 0 "register_operand" "=v")
3488 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
3489 (match_operand:VI 2 "register_operand" "v")]
3492 "vabsdu<wd> %0,%1,%2"
3493 [(set_attr "type" "vecsimple")])
3495 ;; Vector count trailing zeros
3496 (define_insn "*p9v_ctz<mode>2"
3497 [(set (match_operand:VI2 0 "register_operand" "=v")
3498 (ctz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
3501 [(set_attr "length" "4")
3502 (set_attr "type" "vecsimple")])
3504 ;; Vector population count
3505 (define_insn "*p8v_popcount<mode>2"
3506 [(set (match_operand:VI2 0 "register_operand" "=v")
3507 (popcount:VI2 (match_operand:VI2 1 "register_operand" "v")))]
3510 [(set_attr "length" "4")
3511 (set_attr "type" "vecsimple")])
3514 (define_insn "*p9v_parity<mode>2"
3515 [(set (match_operand:VParity 0 "register_operand" "=v")
3516 (parity:VParity (match_operand:VParity 1 "register_operand" "v")))]
3519 [(set_attr "length" "4")
3520 (set_attr "type" "vecsimple")])
3522 ;; Vector Gather Bits by Bytes by Doubleword
3523 (define_insn "p8v_vgbbd"
3524 [(set (match_operand:V16QI 0 "register_operand" "=v")
3525 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")]
3529 [(set_attr "length" "4")
3530 (set_attr "type" "vecsimple")])
3533 ;; 128-bit binary integer arithmetic
3534 ;; We have a special container type (V1TImode) to allow operations using the
3535 ;; ISA 2.07 128-bit binary support to target the VMX/altivec registers without
3536 ;; having to worry about the register allocator deciding GPRs are better.
3538 (define_insn "altivec_vadduqm"
3539 [(set (match_operand:V1TI 0 "register_operand" "=v")
3540 (plus:V1TI (match_operand:V1TI 1 "register_operand" "v")
3541 (match_operand:V1TI 2 "register_operand" "v")))]
3544 [(set_attr "length" "4")
3545 (set_attr "type" "vecsimple")])
3547 (define_insn "altivec_vaddcuq"
3548 [(set (match_operand:V1TI 0 "register_operand" "=v")
3549 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3550 (match_operand:V1TI 2 "register_operand" "v")]
3554 [(set_attr "length" "4")
3555 (set_attr "type" "vecsimple")])
3557 (define_insn "altivec_vsubuqm"
3558 [(set (match_operand:V1TI 0 "register_operand" "=v")
3559 (minus:V1TI (match_operand:V1TI 1 "register_operand" "v")
3560 (match_operand:V1TI 2 "register_operand" "v")))]
3563 [(set_attr "length" "4")
3564 (set_attr "type" "vecsimple")])
3566 (define_insn "altivec_vsubcuq"
3567 [(set (match_operand:V1TI 0 "register_operand" "=v")
3568 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3569 (match_operand:V1TI 2 "register_operand" "v")]
3573 [(set_attr "length" "4")
3574 (set_attr "type" "vecsimple")])
3576 (define_insn "altivec_vaddeuqm"
3577 [(set (match_operand:V1TI 0 "register_operand" "=v")
3578 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3579 (match_operand:V1TI 2 "register_operand" "v")
3580 (match_operand:V1TI 3 "register_operand" "v")]
3583 "vaddeuqm %0,%1,%2,%3"
3584 [(set_attr "length" "4")
3585 (set_attr "type" "vecsimple")])
3587 (define_insn "altivec_vaddecuq"
3588 [(set (match_operand:V1TI 0 "register_operand" "=v")
3589 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3590 (match_operand:V1TI 2 "register_operand" "v")
3591 (match_operand:V1TI 3 "register_operand" "v")]
3594 "vaddecuq %0,%1,%2,%3"
3595 [(set_attr "length" "4")
3596 (set_attr "type" "vecsimple")])
3598 (define_insn "altivec_vsubeuqm"
3599 [(set (match_operand:V1TI 0 "register_operand" "=v")
3600 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3601 (match_operand:V1TI 2 "register_operand" "v")
3602 (match_operand:V1TI 3 "register_operand" "v")]
3605 "vsubeuqm %0,%1,%2,%3"
3606 [(set_attr "length" "4")
3607 (set_attr "type" "vecsimple")])
3609 (define_insn "altivec_vsubecuq"
3610 [(set (match_operand:V1TI 0 "register_operand" "=v")
3611 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3612 (match_operand:V1TI 2 "register_operand" "v")
3613 (match_operand:V1TI 3 "register_operand" "v")]
3616 "vsubecuq %0,%1,%2,%3"
3617 [(set_attr "length" "4")
3618 (set_attr "type" "vecsimple")])
3620 ;; We use V2DI as the output type to simplify converting the permute
3621 ;; bits into an integer
3622 (define_insn "altivec_vbpermq"
3623 [(set (match_operand:V2DI 0 "register_operand" "=v")
3624 (unspec:V2DI [(match_operand:V16QI 1 "register_operand" "v")
3625 (match_operand:V16QI 2 "register_operand" "v")]
3629 [(set_attr "length" "4")
3630 (set_attr "type" "vecsimple")])
3632 ;; Decimal Integer operations
3633 (define_int_iterator UNSPEC_BCD_ADD_SUB [UNSPEC_BCDADD UNSPEC_BCDSUB])
3635 (define_int_attr bcd_add_sub [(UNSPEC_BCDADD "add")
3636 (UNSPEC_BCDSUB "sub")])
3638 (define_code_iterator BCD_TEST [eq lt gt unordered])
3640 (define_insn "bcd<bcd_add_sub>"
3641 [(set (match_operand:V1TI 0 "register_operand" "")
3642 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "")
3643 (match_operand:V1TI 2 "register_operand" "")
3644 (match_operand:QI 3 "const_0_to_1_operand" "")]
3645 UNSPEC_BCD_ADD_SUB))
3646 (clobber (reg:CCFP CR6_REGNO))]
3648 "bcd<bcd_add_sub>. %0,%1,%2,%3"
3649 [(set_attr "length" "4")
3650 (set_attr "type" "vecsimple")])
3652 ;; Use a floating point type (V2DFmode) for the compare to set CR6 so that we
3653 ;; can use the unordered test for BCD nans and add/subtracts that overflow. An
3654 ;; UNORDERED test on an integer type (like V1TImode) is not defined. The type
3655 ;; probably should be one that can go in the VMX (Altivec) registers, so we
3656 ;; can't use DDmode or DFmode.
3657 (define_insn "*bcd<bcd_add_sub>_test"
3658 [(set (reg:CCFP CR6_REGNO)
3660 (unspec:V2DF [(match_operand:V1TI 1 "register_operand" "v")
3661 (match_operand:V1TI 2 "register_operand" "v")
3662 (match_operand:QI 3 "const_0_to_1_operand" "i")]
3664 (match_operand:V2DF 4 "zero_constant" "j")))
3665 (clobber (match_scratch:V1TI 0 "=v"))]
3667 "bcd<bcd_add_sub>. %0,%1,%2,%3"
3668 [(set_attr "length" "4")
3669 (set_attr "type" "vecsimple")])
3671 (define_insn "*bcd<bcd_add_sub>_test2"
3672 [(set (match_operand:V1TI 0 "register_operand" "=v")
3673 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3674 (match_operand:V1TI 2 "register_operand" "v")
3675 (match_operand:QI 3 "const_0_to_1_operand" "i")]
3676 UNSPEC_BCD_ADD_SUB))
3677 (set (reg:CCFP CR6_REGNO)
3679 (unspec:V2DF [(match_dup 1)
3683 (match_operand:V2DF 4 "zero_constant" "j")))]
3685 "bcd<bcd_add_sub>. %0,%1,%2,%3"
3686 [(set_attr "length" "4")
3687 (set_attr "type" "vecsimple")])
3689 (define_insn "darn_32"
3690 [(set (match_operand:SI 0 "register_operand" "=r")
3691 (unspec:SI [(const_int 0)] UNSPEC_DARN_32))]
3694 [(set_attr "type" "integer")])
3696 (define_insn "darn_raw"
3697 [(set (match_operand:DI 0 "register_operand" "=r")
3698 (unspec:DI [(const_int 0)] UNSPEC_DARN_RAW))]
3699 "TARGET_P9_MISC && TARGET_64BIT"
3701 [(set_attr "type" "integer")])
3704 [(set (match_operand:DI 0 "register_operand" "=r")
3705 (unspec:DI [(const_int 0)] UNSPEC_DARN))]
3706 "TARGET_P9_MISC && TARGET_64BIT"
3708 [(set_attr "type" "integer")])
3710 (define_expand "bcd<bcd_add_sub>_<code>"
3711 [(parallel [(set (reg:CCFP CR6_REGNO)
3713 (unspec:V2DF [(match_operand:V1TI 1 "register_operand" "")
3714 (match_operand:V1TI 2 "register_operand" "")
3715 (match_operand:QI 3 "const_0_to_1_operand" "")]
3718 (clobber (match_scratch:V1TI 5 ""))])
3719 (set (match_operand:SI 0 "register_operand" "")
3720 (BCD_TEST:SI (reg:CCFP CR6_REGNO)
3724 operands[4] = CONST0_RTX (V2DFmode);
3727 ;; Peephole2 pattern to combine a bcdadd/bcdsub that calculates the value and
3728 ;; the bcdadd/bcdsub that tests the value. The combiner won't work since
3729 ;; CR6 is a hard coded register. Unfortunately, all of the Altivec predicate
3730 ;; support is hard coded to use the fixed register CR6 instead of creating
3731 ;; a register class for CR6.
3734 [(parallel [(set (match_operand:V1TI 0 "register_operand" "")
3735 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "")
3736 (match_operand:V1TI 2 "register_operand" "")
3737 (match_operand:QI 3 "const_0_to_1_operand" "")]
3738 UNSPEC_BCD_ADD_SUB))
3739 (clobber (reg:CCFP CR6_REGNO))])
3740 (parallel [(set (reg:CCFP CR6_REGNO)
3742 (unspec:V2DF [(match_dup 1)
3746 (match_operand:V2DF 4 "zero_constant" "")))
3747 (clobber (match_operand:V1TI 5 "register_operand" ""))])]
3749 [(parallel [(set (match_dup 0)
3750 (unspec:V1TI [(match_dup 1)
3753 UNSPEC_BCD_ADD_SUB))
3754 (set (reg:CCFP CR6_REGNO)
3756 (unspec:V2DF [(match_dup 1)