2 ;; Copyright (C) 2002-2017 Free Software Foundation, Inc.
3 ;; Contributed by Aldy Hernandez (aldy@quesejoda.com)
5 ;; This file is part of GCC.
7 ;; GCC is free software; you can redistribute it and/or modify it
8 ;; under the terms of the GNU General Public License as published
9 ;; by the Free Software Foundation; either version 3, or (at your
10 ;; option) any later version.
12 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
13 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 ;; License for more details.
17 ;; You should have received a copy of the GNU General Public License
18 ;; along with GCC; see the file COPYING3. If not see
19 ;; <http://www.gnu.org/licenses/>.
21 (define_c_enum "unspec"
48 UNSPEC_VPACK_SIGN_SIGN_SAT
49 UNSPEC_VPACK_SIGN_UNS_SAT
50 UNSPEC_VPACK_UNS_UNS_SAT
51 UNSPEC_VPACK_UNS_UNS_MOD
52 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT
76 UNSPEC_VUNPACK_HI_SIGN
77 UNSPEC_VUNPACK_LO_SIGN
78 UNSPEC_VUNPACK_HI_SIGN_DIRECT
79 UNSPEC_VUNPACK_LO_SIGN_DIRECT
150 UNSPEC_VSUMSWS_DIRECT
169 (define_c_enum "unspecv"
177 ;; Like VI, defined in vector.md, but add ISA 2.07 integer vector ops
178 (define_mode_iterator VI2 [V4SI V8HI V16QI V2DI])
179 ;; Short vec int modes
180 (define_mode_iterator VIshort [V8HI V16QI])
181 ;; Longer vec int modes for rotate/mask ops
182 (define_mode_iterator VIlong [V2DI V4SI])
184 (define_mode_iterator VF [V4SF])
185 ;; Vec modes, pity mode iterators are not composable
186 (define_mode_iterator V [V4SI V8HI V16QI V4SF])
187 ;; Vec modes for move/logical/permute ops, include vector types for move not
188 ;; otherwise handled by altivec (v2df, v2di, ti)
189 (define_mode_iterator VM [V4SI
197 (KF "FLOAT128_VECTOR_P (KFmode)")
198 (TF "FLOAT128_VECTOR_P (TFmode)")])
200 ;; Like VM, except don't do TImode
201 (define_mode_iterator VM2 [V4SI
208 (KF "FLOAT128_VECTOR_P (KFmode)")
209 (TF "FLOAT128_VECTOR_P (TFmode)")])
211 ;; Map the Vector convert single precision to double precision for integer
212 ;; versus floating point
213 (define_mode_attr VS_sxwsp [(V4SI "sxw") (V4SF "sp")])
215 ;; Specific iterator for parity which does not have a byte/half-word form, but
216 ;; does have a quad word form
217 (define_mode_iterator VParity [V4SI
220 (TI "TARGET_VSX_TIMODE")])
222 (define_mode_attr VI_char [(V2DI "d") (V4SI "w") (V8HI "h") (V16QI "b")])
223 (define_mode_attr VI_scalar [(V2DI "DI") (V4SI "SI") (V8HI "HI") (V16QI "QI")])
224 (define_mode_attr VI_unit [(V16QI "VECTOR_UNIT_ALTIVEC_P (V16QImode)")
225 (V8HI "VECTOR_UNIT_ALTIVEC_P (V8HImode)")
226 (V4SI "VECTOR_UNIT_ALTIVEC_P (V4SImode)")
227 (V2DI "VECTOR_UNIT_P8_VECTOR_P (V2DImode)")
228 (V1TI "VECTOR_UNIT_ALTIVEC_P (V1TImode)")])
230 ;; Vector pack/unpack
231 (define_mode_iterator VP [V2DI V4SI V8HI])
232 (define_mode_attr VP_small [(V2DI "V4SI") (V4SI "V8HI") (V8HI "V16QI")])
233 (define_mode_attr VP_small_lc [(V2DI "v4si") (V4SI "v8hi") (V8HI "v16qi")])
234 (define_mode_attr VU_char [(V2DI "w") (V4SI "h") (V8HI "b")])
237 (define_mode_iterator VNEG [V4SI V2DI])
239 ;; Vector move instructions.
240 (define_insn "*altivec_mov<mode>"
241 [(set (match_operand:VM2 0 "nonimmediate_operand" "=Z,v,v,?Y,?*r,?*r,v,v,?*r")
242 (match_operand:VM2 1 "input_operand" "v,Z,v,*r,Y,*r,j,W,W"))]
243 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)
244 && (register_operand (operands[0], <MODE>mode)
245 || register_operand (operands[1], <MODE>mode))"
247 switch (which_alternative)
249 case 0: return "stvx %1,%y0";
250 case 1: return "lvx %0,%y1";
251 case 2: return "vor %0,%1,%1";
255 case 6: return "vxor %0,%0,%0";
256 case 7: return output_vec_const_move (operands);
258 default: gcc_unreachable ();
261 [(set_attr "type" "vecstore,vecload,veclogical,store,load,*,veclogical,*,*")
262 (set_attr "length" "4,4,4,20,20,20,4,8,32")])
264 ;; Unlike other altivec moves, allow the GPRs, since a normal use of TImode
265 ;; is for unions. However for plain data movement, slightly favor the vector
267 (define_insn "*altivec_movti"
268 [(set (match_operand:TI 0 "nonimmediate_operand" "=Z,v,v,?Y,?r,?r,v,v")
269 (match_operand:TI 1 "input_operand" "v,Z,v,r,Y,r,j,W"))]
270 "VECTOR_MEM_ALTIVEC_P (TImode)
271 && (register_operand (operands[0], TImode)
272 || register_operand (operands[1], TImode))"
274 switch (which_alternative)
276 case 0: return "stvx %1,%y0";
277 case 1: return "lvx %0,%y1";
278 case 2: return "vor %0,%1,%1";
282 case 6: return "vxor %0,%0,%0";
283 case 7: return output_vec_const_move (operands);
284 default: gcc_unreachable ();
287 [(set_attr "type" "vecstore,vecload,veclogical,store,load,*,veclogical,*")])
289 ;; Load up a vector with the most significant bit set by loading up -1 and
290 ;; doing a shift left
292 [(set (match_operand:VM 0 "altivec_register_operand" "")
293 (match_operand:VM 1 "easy_vector_constant_msb" ""))]
294 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
297 rtx dest = operands[0];
298 machine_mode mode = GET_MODE (operands[0]);
302 if (mode == V4SFmode)
305 dest = gen_lowpart (V4SImode, dest);
308 num_elements = GET_MODE_NUNITS (mode);
309 v = rtvec_alloc (num_elements);
310 for (i = 0; i < num_elements; i++)
311 RTVEC_ELT (v, i) = constm1_rtx;
313 emit_insn (gen_vec_initv4si (dest, gen_rtx_PARALLEL (mode, v)));
314 emit_insn (gen_rtx_SET (dest, gen_rtx_ASHIFT (mode, dest, dest)));
319 [(set (match_operand:VM 0 "altivec_register_operand" "")
320 (match_operand:VM 1 "easy_vector_constant_add_self" ""))]
321 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
322 [(set (match_dup 0) (match_dup 3))
323 (set (match_dup 0) (match_dup 4))]
325 rtx dup = gen_easy_altivec_constant (operands[1]);
327 machine_mode op_mode = <MODE>mode;
329 /* Divide the operand of the resulting VEC_DUPLICATE, and use
330 simplify_rtx to make a CONST_VECTOR. */
331 XEXP (dup, 0) = simplify_const_binary_operation (ASHIFTRT, QImode,
332 XEXP (dup, 0), const1_rtx);
333 const_vec = simplify_rtx (dup);
335 if (op_mode == V4SFmode)
338 operands[0] = gen_lowpart (op_mode, operands[0]);
340 if (GET_MODE (const_vec) == op_mode)
341 operands[3] = const_vec;
343 operands[3] = gen_lowpart (op_mode, const_vec);
344 operands[4] = gen_rtx_PLUS (op_mode, operands[0], operands[0]);
348 [(set (match_operand:VM 0 "altivec_register_operand" "")
349 (match_operand:VM 1 "easy_vector_constant_vsldoi" ""))]
350 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && can_create_pseudo_p ()"
351 [(set (match_dup 2) (match_dup 3))
352 (set (match_dup 4) (match_dup 5))
354 (unspec:VM [(match_dup 2)
359 rtx op1 = operands[1];
360 int elt = (BYTES_BIG_ENDIAN) ? 0 : GET_MODE_NUNITS (<MODE>mode) - 1;
361 HOST_WIDE_INT val = const_vector_elt_as_int (op1, elt);
362 rtx rtx_val = GEN_INT (val);
363 int shift = vspltis_shifted (op1);
364 int nunits = GET_MODE_NUNITS (<MODE>mode);
367 gcc_assert (shift != 0);
368 operands[2] = gen_reg_rtx (<MODE>mode);
369 operands[3] = gen_rtx_CONST_VECTOR (<MODE>mode, rtvec_alloc (nunits));
370 operands[4] = gen_reg_rtx (<MODE>mode);
374 operands[5] = CONSTM1_RTX (<MODE>mode);
375 operands[6] = GEN_INT (-shift);
379 operands[5] = CONST0_RTX (<MODE>mode);
380 operands[6] = GEN_INT (shift);
383 /* Populate the constant vectors. */
384 for (i = 0; i < nunits; i++)
385 XVECEXP (operands[3], 0, i) = rtx_val;
388 (define_insn "get_vrsave_internal"
389 [(set (match_operand:SI 0 "register_operand" "=r")
390 (unspec:SI [(reg:SI VRSAVE_REGNO)] UNSPEC_GET_VRSAVE))]
394 return "mfspr %0,256";
396 return "mfvrsave %0";
398 [(set_attr "type" "*")])
400 (define_insn "*set_vrsave_internal"
401 [(match_parallel 0 "vrsave_operation"
402 [(set (reg:SI VRSAVE_REGNO)
403 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "r")
404 (reg:SI VRSAVE_REGNO)] UNSPECV_SET_VRSAVE))])]
408 return "mtspr 256,%1";
410 return "mtvrsave %1";
412 [(set_attr "type" "*")])
414 (define_insn "*save_world"
415 [(match_parallel 0 "save_world_operation"
416 [(clobber (reg:SI LR_REGNO))
417 (use (match_operand:SI 1 "call_operand" "s"))])]
418 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
420 [(set_attr "type" "branch")
421 (set_attr "length" "4")])
423 (define_insn "*restore_world"
424 [(match_parallel 0 "restore_world_operation"
426 (use (reg:SI LR_REGNO))
427 (use (match_operand:SI 1 "call_operand" "s"))
428 (clobber (match_operand:SI 2 "gpc_reg_operand" "=r"))])]
429 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
432 ;; The save_vregs and restore_vregs patterns don't use memory_operand
433 ;; because (plus (reg) (const_int)) is not a valid vector address.
434 ;; This way is more compact than describing exactly what happens in
435 ;; the out-of-line functions, ie. loading the constant into r11/r12
436 ;; then using indexed addressing, and requires less editing of rtl
437 ;; to describe the operation to dwarf2out_frame_debug_expr.
438 (define_insn "*save_vregs_<mode>_r11"
439 [(match_parallel 0 "any_parallel_operand"
440 [(clobber (reg:P LR_REGNO))
441 (use (match_operand:P 1 "symbol_ref_operand" "s"))
444 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
445 (match_operand:P 3 "short_cint_operand" "I")))
446 (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
449 [(set_attr "type" "branch")
450 (set_attr "length" "4")])
452 (define_insn "*save_vregs_<mode>_r12"
453 [(match_parallel 0 "any_parallel_operand"
454 [(clobber (reg:P LR_REGNO))
455 (use (match_operand:P 1 "symbol_ref_operand" "s"))
458 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
459 (match_operand:P 3 "short_cint_operand" "I")))
460 (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
463 [(set_attr "type" "branch")
464 (set_attr "length" "4")])
466 (define_insn "*restore_vregs_<mode>_r11"
467 [(match_parallel 0 "any_parallel_operand"
468 [(clobber (reg:P LR_REGNO))
469 (use (match_operand:P 1 "symbol_ref_operand" "s"))
472 (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
473 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
474 (match_operand:P 4 "short_cint_operand" "I"))))])]
477 [(set_attr "type" "branch")
478 (set_attr "length" "4")])
480 (define_insn "*restore_vregs_<mode>_r12"
481 [(match_parallel 0 "any_parallel_operand"
482 [(clobber (reg:P LR_REGNO))
483 (use (match_operand:P 1 "symbol_ref_operand" "s"))
486 (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
487 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
488 (match_operand:P 4 "short_cint_operand" "I"))))])]
491 [(set_attr "type" "branch")
492 (set_attr "length" "4")])
494 ;; Simple binary operations.
497 (define_insn "add<mode>3"
498 [(set (match_operand:VI2 0 "register_operand" "=v")
499 (plus:VI2 (match_operand:VI2 1 "register_operand" "v")
500 (match_operand:VI2 2 "register_operand" "v")))]
502 "vaddu<VI_char>m %0,%1,%2"
503 [(set_attr "type" "vecsimple")])
505 (define_insn "*altivec_addv4sf3"
506 [(set (match_operand:V4SF 0 "register_operand" "=v")
507 (plus:V4SF (match_operand:V4SF 1 "register_operand" "v")
508 (match_operand:V4SF 2 "register_operand" "v")))]
509 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
511 [(set_attr "type" "vecfloat")])
513 (define_insn "altivec_vaddcuw"
514 [(set (match_operand:V4SI 0 "register_operand" "=v")
515 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
516 (match_operand:V4SI 2 "register_operand" "v")]
518 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
520 [(set_attr "type" "vecsimple")])
522 (define_insn "altivec_vaddu<VI_char>s"
523 [(set (match_operand:VI 0 "register_operand" "=v")
524 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
525 (match_operand:VI 2 "register_operand" "v")]
527 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
529 "vaddu<VI_char>s %0,%1,%2"
530 [(set_attr "type" "vecsimple")])
532 (define_insn "altivec_vadds<VI_char>s"
533 [(set (match_operand:VI 0 "register_operand" "=v")
534 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
535 (match_operand:VI 2 "register_operand" "v")]
537 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
538 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
539 "vadds<VI_char>s %0,%1,%2"
540 [(set_attr "type" "vecsimple")])
543 (define_insn "sub<mode>3"
544 [(set (match_operand:VI2 0 "register_operand" "=v")
545 (minus:VI2 (match_operand:VI2 1 "register_operand" "v")
546 (match_operand:VI2 2 "register_operand" "v")))]
548 "vsubu<VI_char>m %0,%1,%2"
549 [(set_attr "type" "vecsimple")])
551 (define_insn "*altivec_subv4sf3"
552 [(set (match_operand:V4SF 0 "register_operand" "=v")
553 (minus:V4SF (match_operand:V4SF 1 "register_operand" "v")
554 (match_operand:V4SF 2 "register_operand" "v")))]
555 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
557 [(set_attr "type" "vecfloat")])
559 (define_insn "altivec_vsubcuw"
560 [(set (match_operand:V4SI 0 "register_operand" "=v")
561 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
562 (match_operand:V4SI 2 "register_operand" "v")]
564 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
566 [(set_attr "type" "vecsimple")])
568 (define_insn "altivec_vsubu<VI_char>s"
569 [(set (match_operand:VI 0 "register_operand" "=v")
570 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
571 (match_operand:VI 2 "register_operand" "v")]
573 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
574 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
575 "vsubu<VI_char>s %0,%1,%2"
576 [(set_attr "type" "vecsimple")])
578 (define_insn "altivec_vsubs<VI_char>s"
579 [(set (match_operand:VI 0 "register_operand" "=v")
580 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
581 (match_operand:VI 2 "register_operand" "v")]
583 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
584 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
585 "vsubs<VI_char>s %0,%1,%2"
586 [(set_attr "type" "vecsimple")])
589 (define_insn "altivec_vavgu<VI_char>"
590 [(set (match_operand:VI 0 "register_operand" "=v")
591 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
592 (match_operand:VI 2 "register_operand" "v")]
595 "vavgu<VI_char> %0,%1,%2"
596 [(set_attr "type" "vecsimple")])
598 (define_insn "altivec_vavgs<VI_char>"
599 [(set (match_operand:VI 0 "register_operand" "=v")
600 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
601 (match_operand:VI 2 "register_operand" "v")]
603 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
604 "vavgs<VI_char> %0,%1,%2"
605 [(set_attr "type" "vecsimple")])
607 (define_insn "altivec_vcmpbfp"
608 [(set (match_operand:V4SI 0 "register_operand" "=v")
609 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
610 (match_operand:V4SF 2 "register_operand" "v")]
612 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
614 [(set_attr "type" "veccmp")])
616 (define_insn "*altivec_eq<mode>"
617 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
618 (eq:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
619 (match_operand:VI2 2 "altivec_register_operand" "v")))]
621 "vcmpequ<VI_char> %0,%1,%2"
622 [(set_attr "type" "veccmpfx")])
624 (define_insn "*altivec_gt<mode>"
625 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
626 (gt:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
627 (match_operand:VI2 2 "altivec_register_operand" "v")))]
629 "vcmpgts<VI_char> %0,%1,%2"
630 [(set_attr "type" "veccmpfx")])
632 (define_insn "*altivec_gtu<mode>"
633 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
634 (gtu:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
635 (match_operand:VI2 2 "altivec_register_operand" "v")))]
637 "vcmpgtu<VI_char> %0,%1,%2"
638 [(set_attr "type" "veccmpfx")])
640 (define_insn "*altivec_eqv4sf"
641 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
642 (eq:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
643 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
644 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
646 [(set_attr "type" "veccmp")])
648 (define_insn "*altivec_gtv4sf"
649 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
650 (gt:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
651 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
652 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
654 [(set_attr "type" "veccmp")])
656 (define_insn "*altivec_gev4sf"
657 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
658 (ge:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
659 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
660 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
662 [(set_attr "type" "veccmp")])
664 (define_insn "*altivec_vsel<mode>"
665 [(set (match_operand:VM 0 "altivec_register_operand" "=v")
667 (ne:CC (match_operand:VM 1 "altivec_register_operand" "v")
668 (match_operand:VM 4 "zero_constant" ""))
669 (match_operand:VM 2 "altivec_register_operand" "v")
670 (match_operand:VM 3 "altivec_register_operand" "v")))]
671 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
673 [(set_attr "type" "vecmove")])
675 (define_insn "*altivec_vsel<mode>_uns"
676 [(set (match_operand:VM 0 "altivec_register_operand" "=v")
678 (ne:CCUNS (match_operand:VM 1 "altivec_register_operand" "v")
679 (match_operand:VM 4 "zero_constant" ""))
680 (match_operand:VM 2 "altivec_register_operand" "v")
681 (match_operand:VM 3 "altivec_register_operand" "v")))]
682 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
684 [(set_attr "type" "vecmove")])
686 ;; Fused multiply add.
688 (define_insn "*altivec_fmav4sf4"
689 [(set (match_operand:V4SF 0 "register_operand" "=v")
690 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
691 (match_operand:V4SF 2 "register_operand" "v")
692 (match_operand:V4SF 3 "register_operand" "v")))]
693 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
694 "vmaddfp %0,%1,%2,%3"
695 [(set_attr "type" "vecfloat")])
697 ;; We do multiply as a fused multiply-add with an add of a -0.0 vector.
699 (define_expand "altivec_mulv4sf3"
700 [(set (match_operand:V4SF 0 "register_operand" "")
701 (fma:V4SF (match_operand:V4SF 1 "register_operand" "")
702 (match_operand:V4SF 2 "register_operand" "")
704 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
708 /* Generate [-0.0, -0.0, -0.0, -0.0]. */
709 neg0 = gen_reg_rtx (V4SImode);
710 emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
711 emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
713 operands[3] = gen_lowpart (V4SFmode, neg0);
716 ;; 32-bit integer multiplication
717 ;; A_high = Operand_0 & 0xFFFF0000 >> 16
718 ;; A_low = Operand_0 & 0xFFFF
719 ;; B_high = Operand_1 & 0xFFFF0000 >> 16
720 ;; B_low = Operand_1 & 0xFFFF
721 ;; result = A_low * B_low + (A_high * B_low + B_high * A_low) << 16
723 ;; (define_insn "mulv4si3"
724 ;; [(set (match_operand:V4SI 0 "register_operand" "=v")
725 ;; (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
726 ;; (match_operand:V4SI 2 "register_operand" "v")))]
727 (define_insn "mulv4si3_p8"
728 [(set (match_operand:V4SI 0 "register_operand" "=v")
729 (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
730 (match_operand:V4SI 2 "register_operand" "v")))]
733 [(set_attr "type" "veccomplex")])
735 (define_expand "mulv4si3"
736 [(use (match_operand:V4SI 0 "register_operand" ""))
737 (use (match_operand:V4SI 1 "register_operand" ""))
738 (use (match_operand:V4SI 2 "register_operand" ""))]
750 if (TARGET_P8_VECTOR)
752 emit_insn (gen_mulv4si3_p8 (operands[0], operands[1], operands[2]));
756 zero = gen_reg_rtx (V4SImode);
757 emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
759 sixteen = gen_reg_rtx (V4SImode);
760 emit_insn (gen_altivec_vspltisw (sixteen, gen_rtx_CONST_INT (V4SImode, -16)));
762 swap = gen_reg_rtx (V4SImode);
763 emit_insn (gen_vrotlv4si3 (swap, operands[2], sixteen));
765 one = gen_reg_rtx (V8HImode);
766 convert_move (one, operands[1], 0);
768 two = gen_reg_rtx (V8HImode);
769 convert_move (two, operands[2], 0);
771 small_swap = gen_reg_rtx (V8HImode);
772 convert_move (small_swap, swap, 0);
774 low_product = gen_reg_rtx (V4SImode);
775 emit_insn (gen_altivec_vmulouh (low_product, one, two));
777 high_product = gen_reg_rtx (V4SImode);
778 emit_insn (gen_altivec_vmsumuhm (high_product, one, small_swap, zero));
780 emit_insn (gen_vashlv4si3 (high_product, high_product, sixteen));
782 emit_insn (gen_addv4si3 (operands[0], high_product, low_product));
787 (define_expand "mulv8hi3"
788 [(use (match_operand:V8HI 0 "register_operand" ""))
789 (use (match_operand:V8HI 1 "register_operand" ""))
790 (use (match_operand:V8HI 2 "register_operand" ""))]
793 rtx zero = gen_reg_rtx (V8HImode);
795 emit_insn (gen_altivec_vspltish (zero, const0_rtx));
796 emit_insn (gen_altivec_vmladduhm(operands[0], operands[1], operands[2], zero));
801 ;; Fused multiply subtract
802 (define_insn "*altivec_vnmsubfp"
803 [(set (match_operand:V4SF 0 "register_operand" "=v")
805 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
806 (match_operand:V4SF 2 "register_operand" "v")
808 (match_operand:V4SF 3 "register_operand" "v")))))]
809 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
810 "vnmsubfp %0,%1,%2,%3"
811 [(set_attr "type" "vecfloat")])
813 (define_insn "altivec_vmsumu<VI_char>m"
814 [(set (match_operand:V4SI 0 "register_operand" "=v")
815 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
816 (match_operand:VIshort 2 "register_operand" "v")
817 (match_operand:V4SI 3 "register_operand" "v")]
820 "vmsumu<VI_char>m %0,%1,%2,%3"
821 [(set_attr "type" "veccomplex")])
823 (define_insn "altivec_vmsumm<VI_char>m"
824 [(set (match_operand:V4SI 0 "register_operand" "=v")
825 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
826 (match_operand:VIshort 2 "register_operand" "v")
827 (match_operand:V4SI 3 "register_operand" "v")]
830 "vmsumm<VI_char>m %0,%1,%2,%3"
831 [(set_attr "type" "veccomplex")])
833 (define_insn "altivec_vmsumshm"
834 [(set (match_operand:V4SI 0 "register_operand" "=v")
835 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
836 (match_operand:V8HI 2 "register_operand" "v")
837 (match_operand:V4SI 3 "register_operand" "v")]
840 "vmsumshm %0,%1,%2,%3"
841 [(set_attr "type" "veccomplex")])
843 (define_insn "altivec_vmsumuhs"
844 [(set (match_operand:V4SI 0 "register_operand" "=v")
845 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
846 (match_operand:V8HI 2 "register_operand" "v")
847 (match_operand:V4SI 3 "register_operand" "v")]
849 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
851 "vmsumuhs %0,%1,%2,%3"
852 [(set_attr "type" "veccomplex")])
854 (define_insn "altivec_vmsumshs"
855 [(set (match_operand:V4SI 0 "register_operand" "=v")
856 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
857 (match_operand:V8HI 2 "register_operand" "v")
858 (match_operand:V4SI 3 "register_operand" "v")]
860 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
862 "vmsumshs %0,%1,%2,%3"
863 [(set_attr "type" "veccomplex")])
867 (define_insn "umax<mode>3"
868 [(set (match_operand:VI2 0 "register_operand" "=v")
869 (umax:VI2 (match_operand:VI2 1 "register_operand" "v")
870 (match_operand:VI2 2 "register_operand" "v")))]
872 "vmaxu<VI_char> %0,%1,%2"
873 [(set_attr "type" "vecsimple")])
875 (define_insn "smax<mode>3"
876 [(set (match_operand:VI2 0 "register_operand" "=v")
877 (smax:VI2 (match_operand:VI2 1 "register_operand" "v")
878 (match_operand:VI2 2 "register_operand" "v")))]
880 "vmaxs<VI_char> %0,%1,%2"
881 [(set_attr "type" "vecsimple")])
883 (define_insn "*altivec_smaxv4sf3"
884 [(set (match_operand:V4SF 0 "register_operand" "=v")
885 (smax:V4SF (match_operand:V4SF 1 "register_operand" "v")
886 (match_operand:V4SF 2 "register_operand" "v")))]
887 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
889 [(set_attr "type" "veccmp")])
891 (define_insn "umin<mode>3"
892 [(set (match_operand:VI2 0 "register_operand" "=v")
893 (umin:VI2 (match_operand:VI2 1 "register_operand" "v")
894 (match_operand:VI2 2 "register_operand" "v")))]
896 "vminu<VI_char> %0,%1,%2"
897 [(set_attr "type" "vecsimple")])
899 (define_insn "smin<mode>3"
900 [(set (match_operand:VI2 0 "register_operand" "=v")
901 (smin:VI2 (match_operand:VI2 1 "register_operand" "v")
902 (match_operand:VI2 2 "register_operand" "v")))]
904 "vmins<VI_char> %0,%1,%2"
905 [(set_attr "type" "vecsimple")])
907 (define_insn "*altivec_sminv4sf3"
908 [(set (match_operand:V4SF 0 "register_operand" "=v")
909 (smin:V4SF (match_operand:V4SF 1 "register_operand" "v")
910 (match_operand:V4SF 2 "register_operand" "v")))]
911 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
913 [(set_attr "type" "veccmp")])
915 (define_insn "altivec_vmhaddshs"
916 [(set (match_operand:V8HI 0 "register_operand" "=v")
917 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
918 (match_operand:V8HI 2 "register_operand" "v")
919 (match_operand:V8HI 3 "register_operand" "v")]
921 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
923 "vmhaddshs %0,%1,%2,%3"
924 [(set_attr "type" "veccomplex")])
926 (define_insn "altivec_vmhraddshs"
927 [(set (match_operand:V8HI 0 "register_operand" "=v")
928 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
929 (match_operand:V8HI 2 "register_operand" "v")
930 (match_operand:V8HI 3 "register_operand" "v")]
932 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
934 "vmhraddshs %0,%1,%2,%3"
935 [(set_attr "type" "veccomplex")])
937 (define_insn "altivec_vmladduhm"
938 [(set (match_operand:V8HI 0 "register_operand" "=v")
939 (plus:V8HI (mult:V8HI (match_operand:V8HI 1 "register_operand" "v")
940 (match_operand:V8HI 2 "register_operand" "v"))
941 (match_operand:V8HI 3 "register_operand" "v")))]
943 "vmladduhm %0,%1,%2,%3"
944 [(set_attr "type" "veccomplex")])
946 (define_expand "altivec_vmrghb"
947 [(use (match_operand:V16QI 0 "register_operand" ""))
948 (use (match_operand:V16QI 1 "register_operand" ""))
949 (use (match_operand:V16QI 2 "register_operand" ""))]
955 /* Special handling for LE with -maltivec=be. */
956 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
958 v = gen_rtvec (16, GEN_INT (8), GEN_INT (24), GEN_INT (9), GEN_INT (25),
959 GEN_INT (10), GEN_INT (26), GEN_INT (11), GEN_INT (27),
960 GEN_INT (12), GEN_INT (28), GEN_INT (13), GEN_INT (29),
961 GEN_INT (14), GEN_INT (30), GEN_INT (15), GEN_INT (31));
962 x = gen_rtx_VEC_CONCAT (V32QImode, operands[2], operands[1]);
966 v = gen_rtvec (16, GEN_INT (0), GEN_INT (16), GEN_INT (1), GEN_INT (17),
967 GEN_INT (2), GEN_INT (18), GEN_INT (3), GEN_INT (19),
968 GEN_INT (4), GEN_INT (20), GEN_INT (5), GEN_INT (21),
969 GEN_INT (6), GEN_INT (22), GEN_INT (7), GEN_INT (23));
970 x = gen_rtx_VEC_CONCAT (V32QImode, operands[1], operands[2]);
973 x = gen_rtx_VEC_SELECT (V16QImode, x, gen_rtx_PARALLEL (VOIDmode, v));
974 emit_insn (gen_rtx_SET (operands[0], x));
978 (define_insn "*altivec_vmrghb_internal"
979 [(set (match_operand:V16QI 0 "register_operand" "=v")
982 (match_operand:V16QI 1 "register_operand" "v")
983 (match_operand:V16QI 2 "register_operand" "v"))
984 (parallel [(const_int 0) (const_int 16)
985 (const_int 1) (const_int 17)
986 (const_int 2) (const_int 18)
987 (const_int 3) (const_int 19)
988 (const_int 4) (const_int 20)
989 (const_int 5) (const_int 21)
990 (const_int 6) (const_int 22)
991 (const_int 7) (const_int 23)])))]
994 if (BYTES_BIG_ENDIAN)
995 return "vmrghb %0,%1,%2";
997 return "vmrglb %0,%2,%1";
999 [(set_attr "type" "vecperm")])
1001 (define_insn "altivec_vmrghb_direct"
1002 [(set (match_operand:V16QI 0 "register_operand" "=v")
1003 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1004 (match_operand:V16QI 2 "register_operand" "v")]
1005 UNSPEC_VMRGH_DIRECT))]
1008 [(set_attr "type" "vecperm")])
1010 (define_expand "altivec_vmrghh"
1011 [(use (match_operand:V8HI 0 "register_operand" ""))
1012 (use (match_operand:V8HI 1 "register_operand" ""))
1013 (use (match_operand:V8HI 2 "register_operand" ""))]
1019 /* Special handling for LE with -maltivec=be. */
1020 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1022 v = gen_rtvec (8, GEN_INT (4), GEN_INT (12), GEN_INT (5), GEN_INT (13),
1023 GEN_INT (6), GEN_INT (14), GEN_INT (7), GEN_INT (15));
1024 x = gen_rtx_VEC_CONCAT (V16HImode, operands[2], operands[1]);
1028 v = gen_rtvec (8, GEN_INT (0), GEN_INT (8), GEN_INT (1), GEN_INT (9),
1029 GEN_INT (2), GEN_INT (10), GEN_INT (3), GEN_INT (11));
1030 x = gen_rtx_VEC_CONCAT (V16HImode, operands[1], operands[2]);
1033 x = gen_rtx_VEC_SELECT (V8HImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1034 emit_insn (gen_rtx_SET (operands[0], x));
1038 (define_insn "*altivec_vmrghh_internal"
1039 [(set (match_operand:V8HI 0 "register_operand" "=v")
1042 (match_operand:V8HI 1 "register_operand" "v")
1043 (match_operand:V8HI 2 "register_operand" "v"))
1044 (parallel [(const_int 0) (const_int 8)
1045 (const_int 1) (const_int 9)
1046 (const_int 2) (const_int 10)
1047 (const_int 3) (const_int 11)])))]
1050 if (BYTES_BIG_ENDIAN)
1051 return "vmrghh %0,%1,%2";
1053 return "vmrglh %0,%2,%1";
1055 [(set_attr "type" "vecperm")])
1057 (define_insn "altivec_vmrghh_direct"
1058 [(set (match_operand:V8HI 0 "register_operand" "=v")
1059 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1060 (match_operand:V8HI 2 "register_operand" "v")]
1061 UNSPEC_VMRGH_DIRECT))]
1064 [(set_attr "type" "vecperm")])
1066 (define_expand "altivec_vmrghw"
1067 [(use (match_operand:V4SI 0 "register_operand" ""))
1068 (use (match_operand:V4SI 1 "register_operand" ""))
1069 (use (match_operand:V4SI 2 "register_operand" ""))]
1070 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1075 /* Special handling for LE with -maltivec=be. */
1076 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1078 v = gen_rtvec (4, GEN_INT (2), GEN_INT (6), GEN_INT (3), GEN_INT (7));
1079 x = gen_rtx_VEC_CONCAT (V8SImode, operands[2], operands[1]);
1083 v = gen_rtvec (4, GEN_INT (0), GEN_INT (4), GEN_INT (1), GEN_INT (5));
1084 x = gen_rtx_VEC_CONCAT (V8SImode, operands[1], operands[2]);
1087 x = gen_rtx_VEC_SELECT (V4SImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1088 emit_insn (gen_rtx_SET (operands[0], x));
1092 (define_insn "*altivec_vmrghw_internal"
1093 [(set (match_operand:V4SI 0 "register_operand" "=v")
1096 (match_operand:V4SI 1 "register_operand" "v")
1097 (match_operand:V4SI 2 "register_operand" "v"))
1098 (parallel [(const_int 0) (const_int 4)
1099 (const_int 1) (const_int 5)])))]
1100 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1102 if (BYTES_BIG_ENDIAN)
1103 return "vmrghw %0,%1,%2";
1105 return "vmrglw %0,%2,%1";
1107 [(set_attr "type" "vecperm")])
1109 (define_insn "altivec_vmrghw_direct"
1110 [(set (match_operand:V4SI 0 "register_operand" "=v")
1111 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1112 (match_operand:V4SI 2 "register_operand" "v")]
1113 UNSPEC_VMRGH_DIRECT))]
1116 [(set_attr "type" "vecperm")])
1118 (define_insn "*altivec_vmrghsf"
1119 [(set (match_operand:V4SF 0 "register_operand" "=v")
1122 (match_operand:V4SF 1 "register_operand" "v")
1123 (match_operand:V4SF 2 "register_operand" "v"))
1124 (parallel [(const_int 0) (const_int 4)
1125 (const_int 1) (const_int 5)])))]
1126 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1128 if (BYTES_BIG_ENDIAN)
1129 return "vmrghw %0,%1,%2";
1131 return "vmrglw %0,%2,%1";
1133 [(set_attr "type" "vecperm")])
1135 (define_expand "altivec_vmrglb"
1136 [(use (match_operand:V16QI 0 "register_operand" ""))
1137 (use (match_operand:V16QI 1 "register_operand" ""))
1138 (use (match_operand:V16QI 2 "register_operand" ""))]
1144 /* Special handling for LE with -maltivec=be. */
1145 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1147 v = gen_rtvec (16, GEN_INT (0), GEN_INT (16), GEN_INT (1), GEN_INT (17),
1148 GEN_INT (2), GEN_INT (18), GEN_INT (3), GEN_INT (19),
1149 GEN_INT (4), GEN_INT (20), GEN_INT (5), GEN_INT (21),
1150 GEN_INT (6), GEN_INT (22), GEN_INT (7), GEN_INT (23));
1151 x = gen_rtx_VEC_CONCAT (V32QImode, operands[2], operands[1]);
1155 v = gen_rtvec (16, GEN_INT (8), GEN_INT (24), GEN_INT (9), GEN_INT (25),
1156 GEN_INT (10), GEN_INT (26), GEN_INT (11), GEN_INT (27),
1157 GEN_INT (12), GEN_INT (28), GEN_INT (13), GEN_INT (29),
1158 GEN_INT (14), GEN_INT (30), GEN_INT (15), GEN_INT (31));
1159 x = gen_rtx_VEC_CONCAT (V32QImode, operands[1], operands[2]);
1162 x = gen_rtx_VEC_SELECT (V16QImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1163 emit_insn (gen_rtx_SET (operands[0], x));
1167 (define_insn "*altivec_vmrglb_internal"
1168 [(set (match_operand:V16QI 0 "register_operand" "=v")
1171 (match_operand:V16QI 1 "register_operand" "v")
1172 (match_operand:V16QI 2 "register_operand" "v"))
1173 (parallel [(const_int 8) (const_int 24)
1174 (const_int 9) (const_int 25)
1175 (const_int 10) (const_int 26)
1176 (const_int 11) (const_int 27)
1177 (const_int 12) (const_int 28)
1178 (const_int 13) (const_int 29)
1179 (const_int 14) (const_int 30)
1180 (const_int 15) (const_int 31)])))]
1183 if (BYTES_BIG_ENDIAN)
1184 return "vmrglb %0,%1,%2";
1186 return "vmrghb %0,%2,%1";
1188 [(set_attr "type" "vecperm")])
1190 (define_insn "altivec_vmrglb_direct"
1191 [(set (match_operand:V16QI 0 "register_operand" "=v")
1192 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1193 (match_operand:V16QI 2 "register_operand" "v")]
1194 UNSPEC_VMRGL_DIRECT))]
1197 [(set_attr "type" "vecperm")])
1199 (define_expand "altivec_vmrglh"
1200 [(use (match_operand:V8HI 0 "register_operand" ""))
1201 (use (match_operand:V8HI 1 "register_operand" ""))
1202 (use (match_operand:V8HI 2 "register_operand" ""))]
1208 /* Special handling for LE with -maltivec=be. */
1209 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1211 v = gen_rtvec (8, GEN_INT (0), GEN_INT (8), GEN_INT (1), GEN_INT (9),
1212 GEN_INT (2), GEN_INT (10), GEN_INT (3), GEN_INT (11));
1213 x = gen_rtx_VEC_CONCAT (V16HImode, operands[2], operands[1]);
1217 v = gen_rtvec (8, GEN_INT (4), GEN_INT (12), GEN_INT (5), GEN_INT (13),
1218 GEN_INT (6), GEN_INT (14), GEN_INT (7), GEN_INT (15));
1219 x = gen_rtx_VEC_CONCAT (V16HImode, operands[1], operands[2]);
1222 x = gen_rtx_VEC_SELECT (V8HImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1223 emit_insn (gen_rtx_SET (operands[0], x));
1227 (define_insn "*altivec_vmrglh_internal"
1228 [(set (match_operand:V8HI 0 "register_operand" "=v")
1231 (match_operand:V8HI 1 "register_operand" "v")
1232 (match_operand:V8HI 2 "register_operand" "v"))
1233 (parallel [(const_int 4) (const_int 12)
1234 (const_int 5) (const_int 13)
1235 (const_int 6) (const_int 14)
1236 (const_int 7) (const_int 15)])))]
1239 if (BYTES_BIG_ENDIAN)
1240 return "vmrglh %0,%1,%2";
1242 return "vmrghh %0,%2,%1";
1244 [(set_attr "type" "vecperm")])
1246 (define_insn "altivec_vmrglh_direct"
1247 [(set (match_operand:V8HI 0 "register_operand" "=v")
1248 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1249 (match_operand:V8HI 2 "register_operand" "v")]
1250 UNSPEC_VMRGL_DIRECT))]
1253 [(set_attr "type" "vecperm")])
1255 (define_expand "altivec_vmrglw"
1256 [(use (match_operand:V4SI 0 "register_operand" ""))
1257 (use (match_operand:V4SI 1 "register_operand" ""))
1258 (use (match_operand:V4SI 2 "register_operand" ""))]
1259 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1264 /* Special handling for LE with -maltivec=be. */
1265 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1267 v = gen_rtvec (4, GEN_INT (0), GEN_INT (4), GEN_INT (1), GEN_INT (5));
1268 x = gen_rtx_VEC_CONCAT (V8SImode, operands[2], operands[1]);
1272 v = gen_rtvec (4, GEN_INT (2), GEN_INT (6), GEN_INT (3), GEN_INT (7));
1273 x = gen_rtx_VEC_CONCAT (V8SImode, operands[1], operands[2]);
1276 x = gen_rtx_VEC_SELECT (V4SImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1277 emit_insn (gen_rtx_SET (operands[0], x));
1281 (define_insn "*altivec_vmrglw_internal"
1282 [(set (match_operand:V4SI 0 "register_operand" "=v")
1285 (match_operand:V4SI 1 "register_operand" "v")
1286 (match_operand:V4SI 2 "register_operand" "v"))
1287 (parallel [(const_int 2) (const_int 6)
1288 (const_int 3) (const_int 7)])))]
1289 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1291 if (BYTES_BIG_ENDIAN)
1292 return "vmrglw %0,%1,%2";
1294 return "vmrghw %0,%2,%1";
1296 [(set_attr "type" "vecperm")])
1298 (define_insn "altivec_vmrglw_direct"
1299 [(set (match_operand:V4SI 0 "register_operand" "=v")
1300 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1301 (match_operand:V4SI 2 "register_operand" "v")]
1302 UNSPEC_VMRGL_DIRECT))]
1305 [(set_attr "type" "vecperm")])
1307 (define_insn "*altivec_vmrglsf"
1308 [(set (match_operand:V4SF 0 "register_operand" "=v")
1311 (match_operand:V4SF 1 "register_operand" "v")
1312 (match_operand:V4SF 2 "register_operand" "v"))
1313 (parallel [(const_int 2) (const_int 6)
1314 (const_int 3) (const_int 7)])))]
1315 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1317 if (BYTES_BIG_ENDIAN)
1318 return "vmrglw %0,%1,%2";
1320 return "vmrghw %0,%2,%1";
1322 [(set_attr "type" "vecperm")])
1324 ;; Power8 vector merge two V4SF/V4SI even words to V4SF
1325 (define_insn "p8_vmrgew_<mode>"
1326 [(set (match_operand:VSX_W 0 "register_operand" "=v")
1328 (vec_concat:<VS_double>
1329 (match_operand:VSX_W 1 "register_operand" "v")
1330 (match_operand:VSX_W 2 "register_operand" "v"))
1331 (parallel [(const_int 0) (const_int 4)
1332 (const_int 2) (const_int 6)])))]
1335 if (BYTES_BIG_ENDIAN)
1336 return "vmrgew %0,%1,%2";
1338 return "vmrgow %0,%2,%1";
1340 [(set_attr "type" "vecperm")])
1342 (define_insn "p8_vmrgow"
1343 [(set (match_operand:V4SI 0 "register_operand" "=v")
1346 (match_operand:V4SI 1 "register_operand" "v")
1347 (match_operand:V4SI 2 "register_operand" "v"))
1348 (parallel [(const_int 1) (const_int 5)
1349 (const_int 3) (const_int 7)])))]
1352 if (BYTES_BIG_ENDIAN)
1353 return "vmrgow %0,%1,%2";
1355 return "vmrgew %0,%2,%1";
1357 [(set_attr "type" "vecperm")])
1359 (define_insn "p8_vmrgew_v4sf_direct"
1360 [(set (match_operand:V4SF 0 "register_operand" "=v")
1361 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")
1362 (match_operand:V4SF 2 "register_operand" "v")]
1363 UNSPEC_VMRGEW_DIRECT))]
1366 [(set_attr "type" "vecperm")])
1368 (define_expand "vec_widen_umult_even_v16qi"
1369 [(use (match_operand:V8HI 0 "register_operand" ""))
1370 (use (match_operand:V16QI 1 "register_operand" ""))
1371 (use (match_operand:V16QI 2 "register_operand" ""))]
1374 if (VECTOR_ELT_ORDER_BIG)
1375 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1377 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1381 (define_expand "vec_widen_smult_even_v16qi"
1382 [(use (match_operand:V8HI 0 "register_operand" ""))
1383 (use (match_operand:V16QI 1 "register_operand" ""))
1384 (use (match_operand:V16QI 2 "register_operand" ""))]
1387 if (VECTOR_ELT_ORDER_BIG)
1388 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1390 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1394 (define_expand "vec_widen_umult_even_v8hi"
1395 [(use (match_operand:V4SI 0 "register_operand" ""))
1396 (use (match_operand:V8HI 1 "register_operand" ""))
1397 (use (match_operand:V8HI 2 "register_operand" ""))]
1400 if (VECTOR_ELT_ORDER_BIG)
1401 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1403 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1407 (define_expand "vec_widen_smult_even_v8hi"
1408 [(use (match_operand:V4SI 0 "register_operand" ""))
1409 (use (match_operand:V8HI 1 "register_operand" ""))
1410 (use (match_operand:V8HI 2 "register_operand" ""))]
1413 if (VECTOR_ELT_ORDER_BIG)
1414 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1416 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1420 (define_expand "vec_widen_umult_even_v4si"
1421 [(use (match_operand:V2DI 0 "register_operand" ""))
1422 (use (match_operand:V4SI 1 "register_operand" ""))
1423 (use (match_operand:V4SI 2 "register_operand" ""))]
1426 if (VECTOR_ELT_ORDER_BIG)
1427 emit_insn (gen_altivec_vmuleuw (operands[0], operands[1], operands[2]));
1429 emit_insn (gen_altivec_vmulouw (operands[0], operands[1], operands[2]));
1433 (define_expand "vec_widen_smult_even_v4si"
1434 [(use (match_operand:V2DI 0 "register_operand" ""))
1435 (use (match_operand:V4SI 1 "register_operand" ""))
1436 (use (match_operand:V4SI 2 "register_operand" ""))]
1439 if (VECTOR_ELT_ORDER_BIG)
1440 emit_insn (gen_altivec_vmulesw (operands[0], operands[1], operands[2]));
1442 emit_insn (gen_altivec_vmulosw (operands[0], operands[1], operands[2]));
1446 (define_expand "vec_widen_umult_odd_v16qi"
1447 [(use (match_operand:V8HI 0 "register_operand" ""))
1448 (use (match_operand:V16QI 1 "register_operand" ""))
1449 (use (match_operand:V16QI 2 "register_operand" ""))]
1452 if (VECTOR_ELT_ORDER_BIG)
1453 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1455 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1459 (define_expand "vec_widen_smult_odd_v16qi"
1460 [(use (match_operand:V8HI 0 "register_operand" ""))
1461 (use (match_operand:V16QI 1 "register_operand" ""))
1462 (use (match_operand:V16QI 2 "register_operand" ""))]
1465 if (VECTOR_ELT_ORDER_BIG)
1466 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1468 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1472 (define_expand "vec_widen_umult_odd_v8hi"
1473 [(use (match_operand:V4SI 0 "register_operand" ""))
1474 (use (match_operand:V8HI 1 "register_operand" ""))
1475 (use (match_operand:V8HI 2 "register_operand" ""))]
1478 if (VECTOR_ELT_ORDER_BIG)
1479 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1481 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1485 (define_expand "vec_widen_smult_odd_v8hi"
1486 [(use (match_operand:V4SI 0 "register_operand" ""))
1487 (use (match_operand:V8HI 1 "register_operand" ""))
1488 (use (match_operand:V8HI 2 "register_operand" ""))]
1491 if (VECTOR_ELT_ORDER_BIG)
1492 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1494 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1498 (define_expand "vec_widen_umult_odd_v4si"
1499 [(use (match_operand:V2DI 0 "register_operand" ""))
1500 (use (match_operand:V4SI 1 "register_operand" ""))
1501 (use (match_operand:V4SI 2 "register_operand" ""))]
1504 if (VECTOR_ELT_ORDER_BIG)
1505 emit_insn (gen_altivec_vmulouw (operands[0], operands[1], operands[2]));
1507 emit_insn (gen_altivec_vmuleuw (operands[0], operands[1], operands[2]));
1511 (define_expand "vec_widen_smult_odd_v4si"
1512 [(use (match_operand:V2DI 0 "register_operand" ""))
1513 (use (match_operand:V4SI 1 "register_operand" ""))
1514 (use (match_operand:V4SI 2 "register_operand" ""))]
1517 if (VECTOR_ELT_ORDER_BIG)
1518 emit_insn (gen_altivec_vmulosw (operands[0], operands[1],
1521 emit_insn (gen_altivec_vmulesw (operands[0], operands[1],
1526 (define_insn "altivec_vmuleub"
1527 [(set (match_operand:V8HI 0 "register_operand" "=v")
1528 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1529 (match_operand:V16QI 2 "register_operand" "v")]
1533 [(set_attr "type" "veccomplex")])
1535 (define_insn "altivec_vmuloub"
1536 [(set (match_operand:V8HI 0 "register_operand" "=v")
1537 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1538 (match_operand:V16QI 2 "register_operand" "v")]
1542 [(set_attr "type" "veccomplex")])
1544 (define_insn "altivec_vmulesb"
1545 [(set (match_operand:V8HI 0 "register_operand" "=v")
1546 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1547 (match_operand:V16QI 2 "register_operand" "v")]
1551 [(set_attr "type" "veccomplex")])
1553 (define_insn "altivec_vmulosb"
1554 [(set (match_operand:V8HI 0 "register_operand" "=v")
1555 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1556 (match_operand:V16QI 2 "register_operand" "v")]
1560 [(set_attr "type" "veccomplex")])
1562 (define_insn "altivec_vmuleuh"
1563 [(set (match_operand:V4SI 0 "register_operand" "=v")
1564 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1565 (match_operand:V8HI 2 "register_operand" "v")]
1569 [(set_attr "type" "veccomplex")])
1571 (define_insn "altivec_vmulouh"
1572 [(set (match_operand:V4SI 0 "register_operand" "=v")
1573 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1574 (match_operand:V8HI 2 "register_operand" "v")]
1578 [(set_attr "type" "veccomplex")])
1580 (define_insn "altivec_vmulesh"
1581 [(set (match_operand:V4SI 0 "register_operand" "=v")
1582 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1583 (match_operand:V8HI 2 "register_operand" "v")]
1587 [(set_attr "type" "veccomplex")])
1589 (define_insn "altivec_vmulosh"
1590 [(set (match_operand:V4SI 0 "register_operand" "=v")
1591 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1592 (match_operand:V8HI 2 "register_operand" "v")]
1596 [(set_attr "type" "veccomplex")])
1598 (define_insn "altivec_vmuleuw"
1599 [(set (match_operand:V2DI 0 "register_operand" "=v")
1600 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1601 (match_operand:V4SI 2 "register_operand" "v")]
1605 [(set_attr "type" "veccomplex")])
1607 (define_insn "altivec_vmulouw"
1608 [(set (match_operand:V2DI 0 "register_operand" "=v")
1609 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1610 (match_operand:V4SI 2 "register_operand" "v")]
1614 [(set_attr "type" "veccomplex")])
1616 (define_insn "altivec_vmulesw"
1617 [(set (match_operand:V2DI 0 "register_operand" "=v")
1618 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1619 (match_operand:V4SI 2 "register_operand" "v")]
1623 [(set_attr "type" "veccomplex")])
1625 (define_insn "altivec_vmulosw"
1626 [(set (match_operand:V2DI 0 "register_operand" "=v")
1627 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1628 (match_operand:V4SI 2 "register_operand" "v")]
1632 [(set_attr "type" "veccomplex")])
1634 ;; Vector pack/unpack
1635 (define_insn "altivec_vpkpx"
1636 [(set (match_operand:V8HI 0 "register_operand" "=v")
1637 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v")
1638 (match_operand:V4SI 2 "register_operand" "v")]
1643 if (VECTOR_ELT_ORDER_BIG)
1644 return \"vpkpx %0,%1,%2\";
1646 return \"vpkpx %0,%2,%1\";
1648 [(set_attr "type" "vecperm")])
1650 (define_insn "altivec_vpks<VI_char>ss"
1651 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1652 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1653 (match_operand:VP 2 "register_operand" "v")]
1654 UNSPEC_VPACK_SIGN_SIGN_SAT))]
1658 if (VECTOR_ELT_ORDER_BIG)
1659 return \"vpks<VI_char>ss %0,%1,%2\";
1661 return \"vpks<VI_char>ss %0,%2,%1\";
1663 [(set_attr "type" "vecperm")])
1665 (define_insn "altivec_vpks<VI_char>us"
1666 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1667 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1668 (match_operand:VP 2 "register_operand" "v")]
1669 UNSPEC_VPACK_SIGN_UNS_SAT))]
1673 if (VECTOR_ELT_ORDER_BIG)
1674 return \"vpks<VI_char>us %0,%1,%2\";
1676 return \"vpks<VI_char>us %0,%2,%1\";
1678 [(set_attr "type" "vecperm")])
1680 (define_insn "altivec_vpku<VI_char>us"
1681 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1682 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1683 (match_operand:VP 2 "register_operand" "v")]
1684 UNSPEC_VPACK_UNS_UNS_SAT))]
1688 if (VECTOR_ELT_ORDER_BIG)
1689 return \"vpku<VI_char>us %0,%1,%2\";
1691 return \"vpku<VI_char>us %0,%2,%1\";
1693 [(set_attr "type" "vecperm")])
1695 (define_insn "altivec_vpku<VI_char>um"
1696 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1697 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1698 (match_operand:VP 2 "register_operand" "v")]
1699 UNSPEC_VPACK_UNS_UNS_MOD))]
1703 if (VECTOR_ELT_ORDER_BIG)
1704 return \"vpku<VI_char>um %0,%1,%2\";
1706 return \"vpku<VI_char>um %0,%2,%1\";
1708 [(set_attr "type" "vecperm")])
1710 (define_insn "altivec_vpku<VI_char>um_direct"
1711 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1712 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1713 (match_operand:VP 2 "register_operand" "v")]
1714 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT))]
1718 if (BYTES_BIG_ENDIAN)
1719 return \"vpku<VI_char>um %0,%1,%2\";
1721 return \"vpku<VI_char>um %0,%2,%1\";
1723 [(set_attr "type" "vecperm")])
1725 (define_insn "*altivec_vrl<VI_char>"
1726 [(set (match_operand:VI2 0 "register_operand" "=v")
1727 (rotate:VI2 (match_operand:VI2 1 "register_operand" "v")
1728 (match_operand:VI2 2 "register_operand" "v")))]
1730 "vrl<VI_char> %0,%1,%2"
1731 [(set_attr "type" "vecsimple")])
1733 (define_insn "altivec_vrl<VI_char>mi"
1734 [(set (match_operand:VIlong 0 "register_operand" "=v")
1735 (unspec:VIlong [(match_operand:VIlong 1 "register_operand" "0")
1736 (match_operand:VIlong 2 "register_operand" "v")
1737 (match_operand:VIlong 3 "register_operand" "v")]
1740 "vrl<VI_char>mi %0,%2,%3"
1741 [(set_attr "type" "veclogical")])
1743 (define_insn "altivec_vrl<VI_char>nm"
1744 [(set (match_operand:VIlong 0 "register_operand" "=v")
1745 (unspec:VIlong [(match_operand:VIlong 1 "register_operand" "v")
1746 (match_operand:VIlong 2 "register_operand" "v")]
1749 "vrl<VI_char>nm %0,%1,%2"
1750 [(set_attr "type" "veclogical")])
1752 (define_insn "altivec_vsl"
1753 [(set (match_operand:V4SI 0 "register_operand" "=v")
1754 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1755 (match_operand:V4SI 2 "register_operand" "v")]
1759 [(set_attr "type" "vecperm")])
1761 (define_insn "altivec_vslo"
1762 [(set (match_operand:V4SI 0 "register_operand" "=v")
1763 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1764 (match_operand:V4SI 2 "register_operand" "v")]
1768 [(set_attr "type" "vecperm")])
1771 [(set (match_operand:V16QI 0 "register_operand" "=v")
1772 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1773 (match_operand:V16QI 2 "register_operand" "v")]
1777 [(set_attr "type" "vecsimple")])
1780 [(set (match_operand:V16QI 0 "register_operand" "=v")
1781 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1782 (match_operand:V16QI 2 "register_operand" "v")]
1786 [(set_attr "type" "vecsimple")])
1788 (define_insn "*altivec_vsl<VI_char>"
1789 [(set (match_operand:VI2 0 "register_operand" "=v")
1790 (ashift:VI2 (match_operand:VI2 1 "register_operand" "v")
1791 (match_operand:VI2 2 "register_operand" "v")))]
1793 "vsl<VI_char> %0,%1,%2"
1794 [(set_attr "type" "vecsimple")])
1796 (define_insn "*altivec_vsr<VI_char>"
1797 [(set (match_operand:VI2 0 "register_operand" "=v")
1798 (lshiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
1799 (match_operand:VI2 2 "register_operand" "v")))]
1801 "vsr<VI_char> %0,%1,%2"
1802 [(set_attr "type" "vecsimple")])
1804 (define_insn "*altivec_vsra<VI_char>"
1805 [(set (match_operand:VI2 0 "register_operand" "=v")
1806 (ashiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
1807 (match_operand:VI2 2 "register_operand" "v")))]
1809 "vsra<VI_char> %0,%1,%2"
1810 [(set_attr "type" "vecsimple")])
1812 (define_insn "altivec_vsr"
1813 [(set (match_operand:V4SI 0 "register_operand" "=v")
1814 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1815 (match_operand:V4SI 2 "register_operand" "v")]
1819 [(set_attr "type" "vecperm")])
1821 (define_insn "altivec_vsro"
1822 [(set (match_operand:V4SI 0 "register_operand" "=v")
1823 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1824 (match_operand:V4SI 2 "register_operand" "v")]
1828 [(set_attr "type" "vecperm")])
1830 (define_insn "altivec_vsum4ubs"
1831 [(set (match_operand:V4SI 0 "register_operand" "=v")
1832 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")
1833 (match_operand:V4SI 2 "register_operand" "v")]
1835 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1838 [(set_attr "type" "veccomplex")])
1840 (define_insn "altivec_vsum4s<VI_char>s"
1841 [(set (match_operand:V4SI 0 "register_operand" "=v")
1842 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
1843 (match_operand:V4SI 2 "register_operand" "v")]
1845 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1847 "vsum4s<VI_char>s %0,%1,%2"
1848 [(set_attr "type" "veccomplex")])
1850 ;; FIXME: For the following two patterns, the scratch should only be
1851 ;; allocated for !VECTOR_ELT_ORDER_BIG, and the instructions should
1852 ;; be emitted separately.
1853 (define_insn "altivec_vsum2sws"
1854 [(set (match_operand:V4SI 0 "register_operand" "=v")
1855 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1856 (match_operand:V4SI 2 "register_operand" "v")]
1858 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))
1859 (clobber (match_scratch:V4SI 3 "=v"))]
1862 if (VECTOR_ELT_ORDER_BIG)
1863 return "vsum2sws %0,%1,%2";
1865 return "vsldoi %3,%2,%2,12\n\tvsum2sws %3,%1,%3\n\tvsldoi %0,%3,%3,4";
1867 [(set_attr "type" "veccomplex")
1868 (set (attr "length")
1870 (match_test "VECTOR_ELT_ORDER_BIG")
1872 (const_string "12")))])
1874 (define_insn "altivec_vsumsws"
1875 [(set (match_operand:V4SI 0 "register_operand" "=v")
1876 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1877 (match_operand:V4SI 2 "register_operand" "v")]
1879 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))
1880 (clobber (match_scratch:V4SI 3 "=v"))]
1883 if (VECTOR_ELT_ORDER_BIG)
1884 return "vsumsws %0,%1,%2";
1886 return "vspltw %3,%2,0\n\tvsumsws %3,%1,%3\n\tvsldoi %0,%3,%3,12";
1888 [(set_attr "type" "veccomplex")
1889 (set (attr "length")
1891 (match_test "(VECTOR_ELT_ORDER_BIG)")
1893 (const_string "12")))])
1895 (define_insn "altivec_vsumsws_direct"
1896 [(set (match_operand:V4SI 0 "register_operand" "=v")
1897 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1898 (match_operand:V4SI 2 "register_operand" "v")]
1899 UNSPEC_VSUMSWS_DIRECT))
1900 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1903 [(set_attr "type" "veccomplex")])
1905 (define_expand "altivec_vspltb"
1906 [(use (match_operand:V16QI 0 "register_operand" ""))
1907 (use (match_operand:V16QI 1 "register_operand" ""))
1908 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1914 /* Special handling for LE with -maltivec=be. We have to reflect
1915 the actual selected index for the splat in the RTL. */
1916 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1917 operands[2] = GEN_INT (15 - INTVAL (operands[2]));
1919 v = gen_rtvec (1, operands[2]);
1920 x = gen_rtx_VEC_SELECT (QImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1921 x = gen_rtx_VEC_DUPLICATE (V16QImode, x);
1922 emit_insn (gen_rtx_SET (operands[0], x));
1926 (define_insn "*altivec_vspltb_internal"
1927 [(set (match_operand:V16QI 0 "register_operand" "=v")
1928 (vec_duplicate:V16QI
1929 (vec_select:QI (match_operand:V16QI 1 "register_operand" "v")
1931 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))]
1934 /* For true LE, this adjusts the selected index. For LE with
1935 -maltivec=be, this reverses what was done in the define_expand
1936 because the instruction already has big-endian bias. */
1937 if (!BYTES_BIG_ENDIAN)
1938 operands[2] = GEN_INT (15 - INTVAL (operands[2]));
1940 return "vspltb %0,%1,%2";
1942 [(set_attr "type" "vecperm")])
1944 (define_insn "altivec_vspltb_direct"
1945 [(set (match_operand:V16QI 0 "register_operand" "=v")
1946 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1947 (match_operand:QI 2 "u5bit_cint_operand" "i")]
1948 UNSPEC_VSPLT_DIRECT))]
1951 [(set_attr "type" "vecperm")])
1953 (define_expand "altivec_vsplth"
1954 [(use (match_operand:V8HI 0 "register_operand" ""))
1955 (use (match_operand:V8HI 1 "register_operand" ""))
1956 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1962 /* Special handling for LE with -maltivec=be. We have to reflect
1963 the actual selected index for the splat in the RTL. */
1964 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1965 operands[2] = GEN_INT (7 - INTVAL (operands[2]));
1967 v = gen_rtvec (1, operands[2]);
1968 x = gen_rtx_VEC_SELECT (HImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1969 x = gen_rtx_VEC_DUPLICATE (V8HImode, x);
1970 emit_insn (gen_rtx_SET (operands[0], x));
1974 (define_insn "*altivec_vsplth_internal"
1975 [(set (match_operand:V8HI 0 "register_operand" "=v")
1977 (vec_select:HI (match_operand:V8HI 1 "register_operand" "v")
1979 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))]
1982 /* For true LE, this adjusts the selected index. For LE with
1983 -maltivec=be, this reverses what was done in the define_expand
1984 because the instruction already has big-endian bias. */
1985 if (!BYTES_BIG_ENDIAN)
1986 operands[2] = GEN_INT (7 - INTVAL (operands[2]));
1988 return "vsplth %0,%1,%2";
1990 [(set_attr "type" "vecperm")])
1992 (define_insn "altivec_vsplth_direct"
1993 [(set (match_operand:V8HI 0 "register_operand" "=v")
1994 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1995 (match_operand:QI 2 "u5bit_cint_operand" "i")]
1996 UNSPEC_VSPLT_DIRECT))]
1999 [(set_attr "type" "vecperm")])
2001 (define_expand "altivec_vspltw"
2002 [(use (match_operand:V4SI 0 "register_operand" ""))
2003 (use (match_operand:V4SI 1 "register_operand" ""))
2004 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
2010 /* Special handling for LE with -maltivec=be. We have to reflect
2011 the actual selected index for the splat in the RTL. */
2012 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2013 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
2015 v = gen_rtvec (1, operands[2]);
2016 x = gen_rtx_VEC_SELECT (SImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
2017 x = gen_rtx_VEC_DUPLICATE (V4SImode, x);
2018 emit_insn (gen_rtx_SET (operands[0], x));
2022 (define_insn "*altivec_vspltw_internal"
2023 [(set (match_operand:V4SI 0 "register_operand" "=v")
2025 (vec_select:SI (match_operand:V4SI 1 "register_operand" "v")
2027 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))]
2030 /* For true LE, this adjusts the selected index. For LE with
2031 -maltivec=be, this reverses what was done in the define_expand
2032 because the instruction already has big-endian bias. */
2033 if (!BYTES_BIG_ENDIAN)
2034 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
2036 return "vspltw %0,%1,%2";
2038 [(set_attr "type" "vecperm")])
2040 (define_insn "altivec_vspltw_direct"
2041 [(set (match_operand:V4SI 0 "register_operand" "=v")
2042 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2043 (match_operand:QI 2 "u5bit_cint_operand" "i")]
2044 UNSPEC_VSPLT_DIRECT))]
2047 [(set_attr "type" "vecperm")])
2049 (define_expand "altivec_vspltsf"
2050 [(use (match_operand:V4SF 0 "register_operand" ""))
2051 (use (match_operand:V4SF 1 "register_operand" ""))
2052 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
2058 /* Special handling for LE with -maltivec=be. We have to reflect
2059 the actual selected index for the splat in the RTL. */
2060 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2061 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
2063 v = gen_rtvec (1, operands[2]);
2064 x = gen_rtx_VEC_SELECT (SFmode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
2065 x = gen_rtx_VEC_DUPLICATE (V4SFmode, x);
2066 emit_insn (gen_rtx_SET (operands[0], x));
2070 (define_insn "*altivec_vspltsf_internal"
2071 [(set (match_operand:V4SF 0 "register_operand" "=v")
2073 (vec_select:SF (match_operand:V4SF 1 "register_operand" "v")
2075 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))]
2076 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2078 /* For true LE, this adjusts the selected index. For LE with
2079 -maltivec=be, this reverses what was done in the define_expand
2080 because the instruction already has big-endian bias. */
2081 if (!BYTES_BIG_ENDIAN)
2082 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
2084 return "vspltw %0,%1,%2";
2086 [(set_attr "type" "vecperm")])
2088 (define_insn "altivec_vspltis<VI_char>"
2089 [(set (match_operand:VI 0 "register_operand" "=v")
2091 (match_operand:QI 1 "s5bit_cint_operand" "i")))]
2093 "vspltis<VI_char> %0,%1"
2094 [(set_attr "type" "vecperm")])
2096 (define_insn "*altivec_vrfiz"
2097 [(set (match_operand:V4SF 0 "register_operand" "=v")
2098 (fix:V4SF (match_operand:V4SF 1 "register_operand" "v")))]
2099 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2101 [(set_attr "type" "vecfloat")])
2103 (define_expand "altivec_vperm_<mode>"
2104 [(set (match_operand:VM 0 "register_operand" "")
2105 (unspec:VM [(match_operand:VM 1 "register_operand" "")
2106 (match_operand:VM 2 "register_operand" "")
2107 (match_operand:V16QI 3 "register_operand" "")]
2111 if (!VECTOR_ELT_ORDER_BIG)
2113 altivec_expand_vec_perm_le (operands);
2118 ;; Slightly prefer vperm, since the target does not overlap the source
2119 (define_insn "*altivec_vperm_<mode>_internal"
2120 [(set (match_operand:VM 0 "register_operand" "=v,?wo")
2121 (unspec:VM [(match_operand:VM 1 "register_operand" "v,wo")
2122 (match_operand:VM 2 "register_operand" "v,0")
2123 (match_operand:V16QI 3 "register_operand" "v,wo")]
2129 [(set_attr "type" "vecperm")
2130 (set_attr "length" "4")])
2132 (define_insn "altivec_vperm_v8hiv16qi"
2133 [(set (match_operand:V16QI 0 "register_operand" "=v,?wo")
2134 (unspec:V16QI [(match_operand:V8HI 1 "register_operand" "v,wo")
2135 (match_operand:V8HI 2 "register_operand" "v,0")
2136 (match_operand:V16QI 3 "register_operand" "v,wo")]
2142 [(set_attr "type" "vecperm")
2143 (set_attr "length" "4")])
2145 (define_expand "altivec_vperm_<mode>_uns"
2146 [(set (match_operand:VM 0 "register_operand" "")
2147 (unspec:VM [(match_operand:VM 1 "register_operand" "")
2148 (match_operand:VM 2 "register_operand" "")
2149 (match_operand:V16QI 3 "register_operand" "")]
2153 if (!VECTOR_ELT_ORDER_BIG)
2155 altivec_expand_vec_perm_le (operands);
2160 (define_insn "*altivec_vperm_<mode>_uns_internal"
2161 [(set (match_operand:VM 0 "register_operand" "=v,?wo")
2162 (unspec:VM [(match_operand:VM 1 "register_operand" "v,wo")
2163 (match_operand:VM 2 "register_operand" "v,0")
2164 (match_operand:V16QI 3 "register_operand" "v,wo")]
2170 [(set_attr "type" "vecperm")
2171 (set_attr "length" "4")])
2173 (define_expand "vec_permv16qi"
2174 [(set (match_operand:V16QI 0 "register_operand" "")
2175 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "")
2176 (match_operand:V16QI 2 "register_operand" "")
2177 (match_operand:V16QI 3 "register_operand" "")]
2181 if (!BYTES_BIG_ENDIAN) {
2182 altivec_expand_vec_perm_le (operands);
2187 (define_expand "vec_perm_constv16qi"
2188 [(match_operand:V16QI 0 "register_operand" "")
2189 (match_operand:V16QI 1 "register_operand" "")
2190 (match_operand:V16QI 2 "register_operand" "")
2191 (match_operand:V16QI 3 "" "")]
2194 if (altivec_expand_vec_perm_const (operands))
2200 (define_insn "*altivec_vpermr_<mode>_internal"
2201 [(set (match_operand:VM 0 "register_operand" "=v,?wo")
2202 (unspec:VM [(match_operand:VM 1 "register_operand" "v,wo")
2203 (match_operand:VM 2 "register_operand" "v,0")
2204 (match_operand:V16QI 3 "register_operand" "v,wo")]
2209 xxpermr %x0,%x1,%x3"
2210 [(set_attr "type" "vecperm")
2211 (set_attr "length" "4")])
2213 (define_insn "altivec_vrfip" ; ceil
2214 [(set (match_operand:V4SF 0 "register_operand" "=v")
2215 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2219 [(set_attr "type" "vecfloat")])
2221 (define_insn "altivec_vrfin"
2222 [(set (match_operand:V4SF 0 "register_operand" "=v")
2223 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2227 [(set_attr "type" "vecfloat")])
2229 (define_insn "*altivec_vrfim" ; floor
2230 [(set (match_operand:V4SF 0 "register_operand" "=v")
2231 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2235 [(set_attr "type" "vecfloat")])
2237 (define_insn "altivec_vcfux"
2238 [(set (match_operand:V4SF 0 "register_operand" "=v")
2239 (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
2240 (match_operand:QI 2 "immediate_operand" "i")]
2244 [(set_attr "type" "vecfloat")])
2246 (define_insn "altivec_vcfsx"
2247 [(set (match_operand:V4SF 0 "register_operand" "=v")
2248 (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
2249 (match_operand:QI 2 "immediate_operand" "i")]
2253 [(set_attr "type" "vecfloat")])
2255 (define_insn "altivec_vctuxs"
2256 [(set (match_operand:V4SI 0 "register_operand" "=v")
2257 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2258 (match_operand:QI 2 "immediate_operand" "i")]
2260 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2263 [(set_attr "type" "vecfloat")])
2265 (define_insn "altivec_vctsxs"
2266 [(set (match_operand:V4SI 0 "register_operand" "=v")
2267 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2268 (match_operand:QI 2 "immediate_operand" "i")]
2270 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2273 [(set_attr "type" "vecfloat")])
2275 (define_insn "altivec_vlogefp"
2276 [(set (match_operand:V4SF 0 "register_operand" "=v")
2277 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2281 [(set_attr "type" "vecfloat")])
2283 (define_insn "altivec_vexptefp"
2284 [(set (match_operand:V4SF 0 "register_operand" "=v")
2285 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2289 [(set_attr "type" "vecfloat")])
2291 (define_insn "*altivec_vrsqrtefp"
2292 [(set (match_operand:V4SF 0 "register_operand" "=v")
2293 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2295 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2297 [(set_attr "type" "vecfloat")])
2299 (define_insn "altivec_vrefp"
2300 [(set (match_operand:V4SF 0 "register_operand" "=v")
2301 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2303 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2305 [(set_attr "type" "vecfloat")])
2307 (define_expand "altivec_copysign_v4sf3"
2308 [(use (match_operand:V4SF 0 "register_operand" ""))
2309 (use (match_operand:V4SF 1 "register_operand" ""))
2310 (use (match_operand:V4SF 2 "register_operand" ""))]
2311 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2314 rtx mask = gen_reg_rtx (V4SImode);
2315 rtvec v = rtvec_alloc (4);
2316 unsigned HOST_WIDE_INT mask_val = ((unsigned HOST_WIDE_INT)1) << 31;
2318 RTVEC_ELT (v, 0) = GEN_INT (mask_val);
2319 RTVEC_ELT (v, 1) = GEN_INT (mask_val);
2320 RTVEC_ELT (v, 2) = GEN_INT (mask_val);
2321 RTVEC_ELT (v, 3) = GEN_INT (mask_val);
2323 emit_insn (gen_vec_initv4si (mask, gen_rtx_PARALLEL (V4SImode, v)));
2324 emit_insn (gen_vector_select_v4sf (operands[0], operands[1], operands[2],
2325 gen_lowpart (V4SFmode, mask)));
2329 (define_insn "altivec_vsldoi_<mode>"
2330 [(set (match_operand:VM 0 "register_operand" "=v")
2331 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
2332 (match_operand:VM 2 "register_operand" "v")
2333 (match_operand:QI 3 "immediate_operand" "i")]
2336 "vsldoi %0,%1,%2,%3"
2337 [(set_attr "type" "vecperm")])
2339 (define_insn "altivec_vupkhs<VU_char>"
2340 [(set (match_operand:VP 0 "register_operand" "=v")
2341 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2342 UNSPEC_VUNPACK_HI_SIGN))]
2345 if (VECTOR_ELT_ORDER_BIG)
2346 return "vupkhs<VU_char> %0,%1";
2348 return "vupkls<VU_char> %0,%1";
2350 [(set_attr "type" "vecperm")])
2352 (define_insn "*altivec_vupkhs<VU_char>_direct"
2353 [(set (match_operand:VP 0 "register_operand" "=v")
2354 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2355 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2357 "vupkhs<VU_char> %0,%1"
2358 [(set_attr "type" "vecperm")])
2360 (define_insn "altivec_vupkls<VU_char>"
2361 [(set (match_operand:VP 0 "register_operand" "=v")
2362 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2363 UNSPEC_VUNPACK_LO_SIGN))]
2366 if (VECTOR_ELT_ORDER_BIG)
2367 return "vupkls<VU_char> %0,%1";
2369 return "vupkhs<VU_char> %0,%1";
2371 [(set_attr "type" "vecperm")])
2373 (define_insn "*altivec_vupkls<VU_char>_direct"
2374 [(set (match_operand:VP 0 "register_operand" "=v")
2375 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2376 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2378 "vupkls<VU_char> %0,%1"
2379 [(set_attr "type" "vecperm")])
2381 (define_insn "altivec_vupkhpx"
2382 [(set (match_operand:V4SI 0 "register_operand" "=v")
2383 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2387 if (VECTOR_ELT_ORDER_BIG)
2388 return "vupkhpx %0,%1";
2390 return "vupklpx %0,%1";
2392 [(set_attr "type" "vecperm")])
2394 (define_insn "altivec_vupklpx"
2395 [(set (match_operand:V4SI 0 "register_operand" "=v")
2396 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2400 if (VECTOR_ELT_ORDER_BIG)
2401 return "vupklpx %0,%1";
2403 return "vupkhpx %0,%1";
2405 [(set_attr "type" "vecperm")])
2407 ;; Compare vectors producing a vector result and a predicate, setting CR6 to
2408 ;; indicate a combined status
2409 (define_insn "*altivec_vcmpequ<VI_char>_p"
2410 [(set (reg:CC CR6_REGNO)
2411 (unspec:CC [(eq:CC (match_operand:VI2 1 "register_operand" "v")
2412 (match_operand:VI2 2 "register_operand" "v"))]
2414 (set (match_operand:VI2 0 "register_operand" "=v")
2415 (eq:VI2 (match_dup 1)
2418 "vcmpequ<VI_char>. %0,%1,%2"
2419 [(set_attr "type" "veccmpfx")])
2421 (define_insn "*altivec_vcmpgts<VI_char>_p"
2422 [(set (reg:CC CR6_REGNO)
2423 (unspec:CC [(gt:CC (match_operand:VI2 1 "register_operand" "v")
2424 (match_operand:VI2 2 "register_operand" "v"))]
2426 (set (match_operand:VI2 0 "register_operand" "=v")
2427 (gt:VI2 (match_dup 1)
2430 "vcmpgts<VI_char>. %0,%1,%2"
2431 [(set_attr "type" "veccmpfx")])
2433 (define_insn "*altivec_vcmpgtu<VI_char>_p"
2434 [(set (reg:CC CR6_REGNO)
2435 (unspec:CC [(gtu:CC (match_operand:VI2 1 "register_operand" "v")
2436 (match_operand:VI2 2 "register_operand" "v"))]
2438 (set (match_operand:VI2 0 "register_operand" "=v")
2439 (gtu:VI2 (match_dup 1)
2442 "vcmpgtu<VI_char>. %0,%1,%2"
2443 [(set_attr "type" "veccmpfx")])
2445 (define_insn "*altivec_vcmpeqfp_p"
2446 [(set (reg:CC CR6_REGNO)
2447 (unspec:CC [(eq:CC (match_operand:V4SF 1 "register_operand" "v")
2448 (match_operand:V4SF 2 "register_operand" "v"))]
2450 (set (match_operand:V4SF 0 "register_operand" "=v")
2451 (eq:V4SF (match_dup 1)
2453 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2454 "vcmpeqfp. %0,%1,%2"
2455 [(set_attr "type" "veccmp")])
2457 (define_insn "*altivec_vcmpgtfp_p"
2458 [(set (reg:CC CR6_REGNO)
2459 (unspec:CC [(gt:CC (match_operand:V4SF 1 "register_operand" "v")
2460 (match_operand:V4SF 2 "register_operand" "v"))]
2462 (set (match_operand:V4SF 0 "register_operand" "=v")
2463 (gt:V4SF (match_dup 1)
2465 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2466 "vcmpgtfp. %0,%1,%2"
2467 [(set_attr "type" "veccmp")])
2469 (define_insn "*altivec_vcmpgefp_p"
2470 [(set (reg:CC CR6_REGNO)
2471 (unspec:CC [(ge:CC (match_operand:V4SF 1 "register_operand" "v")
2472 (match_operand:V4SF 2 "register_operand" "v"))]
2474 (set (match_operand:V4SF 0 "register_operand" "=v")
2475 (ge:V4SF (match_dup 1)
2477 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2478 "vcmpgefp. %0,%1,%2"
2479 [(set_attr "type" "veccmp")])
2481 (define_insn "altivec_vcmpbfp_p"
2482 [(set (reg:CC CR6_REGNO)
2483 (unspec:CC [(match_operand:V4SF 1 "register_operand" "v")
2484 (match_operand:V4SF 2 "register_operand" "v")]
2486 (set (match_operand:V4SF 0 "register_operand" "=v")
2487 (unspec:V4SF [(match_dup 1)
2490 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode)"
2492 [(set_attr "type" "veccmp")])
2494 (define_insn "altivec_mtvscr"
2495 [(set (reg:SI VSCR_REGNO)
2497 [(match_operand:V4SI 0 "register_operand" "v")] UNSPECV_MTVSCR))]
2500 [(set_attr "type" "vecsimple")])
2502 (define_insn "altivec_mfvscr"
2503 [(set (match_operand:V8HI 0 "register_operand" "=v")
2504 (unspec_volatile:V8HI [(reg:SI VSCR_REGNO)] UNSPECV_MFVSCR))]
2507 [(set_attr "type" "vecsimple")])
2509 (define_insn "altivec_dssall"
2510 [(unspec_volatile [(const_int 0)] UNSPECV_DSSALL)]
2513 [(set_attr "type" "vecsimple")])
2515 (define_insn "altivec_dss"
2516 [(unspec_volatile [(match_operand:QI 0 "immediate_operand" "i")]
2520 [(set_attr "type" "vecsimple")])
2522 (define_insn "altivec_dst"
2523 [(unspec [(match_operand 0 "register_operand" "b")
2524 (match_operand:SI 1 "register_operand" "r")
2525 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DST)]
2526 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2528 [(set_attr "type" "vecsimple")])
2530 (define_insn "altivec_dstt"
2531 [(unspec [(match_operand 0 "register_operand" "b")
2532 (match_operand:SI 1 "register_operand" "r")
2533 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTT)]
2534 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2536 [(set_attr "type" "vecsimple")])
2538 (define_insn "altivec_dstst"
2539 [(unspec [(match_operand 0 "register_operand" "b")
2540 (match_operand:SI 1 "register_operand" "r")
2541 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTST)]
2542 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2544 [(set_attr "type" "vecsimple")])
2546 (define_insn "altivec_dststt"
2547 [(unspec [(match_operand 0 "register_operand" "b")
2548 (match_operand:SI 1 "register_operand" "r")
2549 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTSTT)]
2550 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2552 [(set_attr "type" "vecsimple")])
2554 (define_expand "altivec_lvsl"
2555 [(use (match_operand:V16QI 0 "register_operand" ""))
2556 (use (match_operand:V16QI 1 "memory_operand" ""))]
2559 if (VECTOR_ELT_ORDER_BIG)
2560 emit_insn (gen_altivec_lvsl_direct (operands[0], operands[1]));
2564 rtx mask, perm[16], constv, vperm;
2565 mask = gen_reg_rtx (V16QImode);
2566 emit_insn (gen_altivec_lvsl_direct (mask, operands[1]));
2567 for (i = 0; i < 16; ++i)
2568 perm[i] = GEN_INT (i);
2569 constv = gen_rtx_CONST_VECTOR (V16QImode, gen_rtvec_v (16, perm));
2570 constv = force_reg (V16QImode, constv);
2571 vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2573 emit_insn (gen_rtx_SET (operands[0], vperm));
2578 (define_insn "altivec_lvsl_direct"
2579 [(set (match_operand:V16QI 0 "register_operand" "=v")
2580 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2584 [(set_attr "type" "vecload")])
2586 (define_expand "altivec_lvsr"
2587 [(use (match_operand:V16QI 0 "register_operand" ""))
2588 (use (match_operand:V16QI 1 "memory_operand" ""))]
2591 if (VECTOR_ELT_ORDER_BIG)
2592 emit_insn (gen_altivec_lvsr_direct (operands[0], operands[1]));
2596 rtx mask, perm[16], constv, vperm;
2597 mask = gen_reg_rtx (V16QImode);
2598 emit_insn (gen_altivec_lvsr_direct (mask, operands[1]));
2599 for (i = 0; i < 16; ++i)
2600 perm[i] = GEN_INT (i);
2601 constv = gen_rtx_CONST_VECTOR (V16QImode, gen_rtvec_v (16, perm));
2602 constv = force_reg (V16QImode, constv);
2603 vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2605 emit_insn (gen_rtx_SET (operands[0], vperm));
2610 (define_insn "altivec_lvsr_direct"
2611 [(set (match_operand:V16QI 0 "register_operand" "=v")
2612 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2616 [(set_attr "type" "vecload")])
2618 (define_expand "build_vector_mask_for_load"
2619 [(set (match_operand:V16QI 0 "register_operand" "")
2620 (unspec:V16QI [(match_operand 1 "memory_operand" "")] UNSPEC_LVSR))]
2627 gcc_assert (GET_CODE (operands[1]) == MEM);
2629 addr = XEXP (operands[1], 0);
2630 temp = gen_reg_rtx (GET_MODE (addr));
2631 emit_insn (gen_rtx_SET (temp, gen_rtx_NEG (GET_MODE (addr), addr)));
2632 emit_insn (gen_altivec_lvsr (operands[0],
2633 replace_equiv_address (operands[1], temp)));
2637 ;; Parallel some of the LVE* and STV*'s with unspecs because some have
2638 ;; identical rtl but different instructions-- and gcc gets confused.
2640 (define_expand "altivec_lve<VI_char>x"
2642 [(set (match_operand:VI 0 "register_operand" "=v")
2643 (match_operand:VI 1 "memory_operand" "Z"))
2644 (unspec [(const_int 0)] UNSPEC_LVE)])]
2647 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2649 altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_LVE);
2654 (define_insn "*altivec_lve<VI_char>x_internal"
2656 [(set (match_operand:VI 0 "register_operand" "=v")
2657 (match_operand:VI 1 "memory_operand" "Z"))
2658 (unspec [(const_int 0)] UNSPEC_LVE)])]
2660 "lve<VI_char>x %0,%y1"
2661 [(set_attr "type" "vecload")])
2663 (define_insn "*altivec_lvesfx"
2665 [(set (match_operand:V4SF 0 "register_operand" "=v")
2666 (match_operand:V4SF 1 "memory_operand" "Z"))
2667 (unspec [(const_int 0)] UNSPEC_LVE)])]
2670 [(set_attr "type" "vecload")])
2672 (define_expand "altivec_lvxl_<mode>"
2674 [(set (match_operand:VM2 0 "register_operand" "=v")
2675 (match_operand:VM2 1 "memory_operand" "Z"))
2676 (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
2679 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2681 altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_SET_VSCR);
2686 (define_insn "*altivec_lvxl_<mode>_internal"
2688 [(set (match_operand:VM2 0 "register_operand" "=v")
2689 (match_operand:VM2 1 "memory_operand" "Z"))
2690 (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
2693 [(set_attr "type" "vecload")])
2695 ; This version of lvx is used only in cases where we need to force an lvx
2696 ; over any other load, and we don't care about losing CSE opportunities.
2697 ; Its primary use is for prologue register saves.
2698 (define_insn "altivec_lvx_<mode>_internal"
2700 [(set (match_operand:VM2 0 "register_operand" "=v")
2701 (match_operand:VM2 1 "memory_operand" "Z"))
2702 (unspec [(const_int 0)] UNSPEC_LVX)])]
2705 [(set_attr "type" "vecload")])
2707 ; The next two patterns embody what lvx should usually look like.
2708 (define_insn "altivec_lvx_<mode>_2op"
2709 [(set (match_operand:VM2 0 "register_operand" "=v")
2710 (mem:VM2 (and:DI (plus:DI (match_operand:DI 1 "register_operand" "b")
2711 (match_operand:DI 2 "register_operand" "r"))
2713 "TARGET_ALTIVEC && TARGET_64BIT"
2715 [(set_attr "type" "vecload")])
2717 (define_insn "altivec_lvx_<mode>_1op"
2718 [(set (match_operand:VM2 0 "register_operand" "=v")
2719 (mem:VM2 (and:DI (match_operand:DI 1 "register_operand" "r")
2721 "TARGET_ALTIVEC && TARGET_64BIT"
2723 [(set_attr "type" "vecload")])
2725 ; 32-bit versions of the above.
2726 (define_insn "altivec_lvx_<mode>_2op_si"
2727 [(set (match_operand:VM2 0 "register_operand" "=v")
2728 (mem:VM2 (and:SI (plus:SI (match_operand:SI 1 "register_operand" "b")
2729 (match_operand:SI 2 "register_operand" "r"))
2731 "TARGET_ALTIVEC && TARGET_32BIT"
2733 [(set_attr "type" "vecload")])
2735 (define_insn "altivec_lvx_<mode>_1op_si"
2736 [(set (match_operand:VM2 0 "register_operand" "=v")
2737 (mem:VM2 (and:SI (match_operand:SI 1 "register_operand" "r")
2739 "TARGET_ALTIVEC && TARGET_32BIT"
2741 [(set_attr "type" "vecload")])
2743 ; This version of stvx is used only in cases where we need to force an stvx
2744 ; over any other store, and we don't care about losing CSE opportunities.
2745 ; Its primary use is for epilogue register restores.
2746 (define_insn "altivec_stvx_<mode>_internal"
2748 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2749 (match_operand:VM2 1 "register_operand" "v"))
2750 (unspec [(const_int 0)] UNSPEC_STVX)])]
2753 [(set_attr "type" "vecstore")])
2755 ; The next two patterns embody what stvx should usually look like.
2756 (define_insn "altivec_stvx_<mode>_2op"
2757 [(set (mem:VM2 (and:DI (plus:DI (match_operand:DI 1 "register_operand" "b")
2758 (match_operand:DI 2 "register_operand" "r"))
2760 (match_operand:VM2 0 "register_operand" "v"))]
2761 "TARGET_ALTIVEC && TARGET_64BIT"
2763 [(set_attr "type" "vecstore")])
2765 (define_insn "altivec_stvx_<mode>_1op"
2766 [(set (mem:VM2 (and:DI (match_operand:DI 1 "register_operand" "r")
2768 (match_operand:VM2 0 "register_operand" "v"))]
2769 "TARGET_ALTIVEC && TARGET_64BIT"
2771 [(set_attr "type" "vecstore")])
2773 ; 32-bit versions of the above.
2774 (define_insn "altivec_stvx_<mode>_2op_si"
2775 [(set (mem:VM2 (and:SI (plus:SI (match_operand:SI 1 "register_operand" "b")
2776 (match_operand:SI 2 "register_operand" "r"))
2778 (match_operand:VM2 0 "register_operand" "v"))]
2779 "TARGET_ALTIVEC && TARGET_32BIT"
2781 [(set_attr "type" "vecstore")])
2783 (define_insn "altivec_stvx_<mode>_1op_si"
2784 [(set (mem:VM2 (and:SI (match_operand:SI 1 "register_operand" "r")
2786 (match_operand:VM2 0 "register_operand" "v"))]
2787 "TARGET_ALTIVEC && TARGET_32BIT"
2789 [(set_attr "type" "vecstore")])
2791 (define_expand "altivec_stvxl_<mode>"
2793 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2794 (match_operand:VM2 1 "register_operand" "v"))
2795 (unspec [(const_int 0)] UNSPEC_STVXL)])]
2798 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2800 altivec_expand_stvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVXL);
2805 (define_insn "*altivec_stvxl_<mode>_internal"
2807 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2808 (match_operand:VM2 1 "register_operand" "v"))
2809 (unspec [(const_int 0)] UNSPEC_STVXL)])]
2812 [(set_attr "type" "vecstore")])
2814 (define_expand "altivec_stve<VI_char>x"
2815 [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
2816 (unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
2819 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2821 altivec_expand_stvex_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVE);
2826 (define_insn "*altivec_stve<VI_char>x_internal"
2827 [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
2828 (unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
2830 "stve<VI_char>x %1,%y0"
2831 [(set_attr "type" "vecstore")])
2833 (define_insn "*altivec_stvesfx"
2834 [(set (match_operand:SF 0 "memory_operand" "=Z")
2835 (unspec:SF [(match_operand:V4SF 1 "register_operand" "v")] UNSPEC_STVE))]
2838 [(set_attr "type" "vecstore")])
2841 ;; signed int/float to double convert words 0 and 2
2842 (define_expand "doublee<mode>2"
2843 [(set (match_operand:V2DF 0 "register_operand" "=v")
2844 (match_operand:VSX_W 1 "register_operand" "v"))]
2847 machine_mode op_mode = GET_MODE (operands[1]);
2849 if (VECTOR_ELT_ORDER_BIG)
2851 /* Big endian word numbering for words in operand is 0 1 2 3.
2852 Input words 0 and 2 are where they need to be. */
2853 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], operands[1]));
2857 /* Little endian word numbering for operand is 3 2 1 0.
2858 take (operand[1] operand[1]) and shift left one word
2859 3 2 1 0 3 2 1 0 => 2 1 0 3
2860 Input words 2 and 0 are now where they need to be for the
2863 rtx rtx_val = GEN_INT (1);
2865 rtx_tmp = gen_reg_rtx (op_mode);
2866 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
2867 operands[1], rtx_val));
2868 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
2872 [(set_attr "type" "veccomplex")])
2874 ;; Generate unsdoublee
2875 ;; unsigned int to double convert words 0 and 2
2876 (define_expand "unsdoubleev4si2"
2877 [(set (match_operand:V2DF 0 "register_operand" "=v")
2878 (match_operand:V4SI 1 "register_operand" "v"))]
2881 if (VECTOR_ELT_ORDER_BIG)
2883 /* Big endian word numbering for words in operand is 0 1 2 3.
2884 Input words 0 and 2 are where they need to be. */
2885 emit_insn (gen_vsx_xvcvuxwdp (operands[0], operands[1]));
2889 /* Little endian word numbering for operand is 3 2 1 0.
2890 take (operand[1] operand[1]) and shift left one word
2891 3 2 1 0 3 2 1 0 => 2 1 0 3
2892 Input words 2 and 0 are now where they need to be for the
2895 rtx rtx_val = GEN_INT (1);
2897 rtx_tmp = gen_reg_rtx (V4SImode);
2898 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
2899 operands[1], rtx_val));
2900 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
2904 [(set_attr "type" "veccomplex")])
2906 ;; Generate doubleov
2907 ;; signed int/float to double convert words 1 and 3
2908 (define_expand "doubleo<mode>2"
2909 [(set (match_operand:V2DF 0 "register_operand" "=v")
2910 (match_operand:VSX_W 1 "register_operand" "v"))]
2913 machine_mode op_mode = GET_MODE (operands[1]);
2915 if (VECTOR_ELT_ORDER_BIG)
2917 /* Big endian word numbering for words in operand is 0 1 2 3.
2918 take (operand[1] operand[1]) and shift left one word
2919 0 1 2 3 0 1 2 3 => 1 2 3 0
2920 Input words 1 and 3 are now where they need to be for the
2923 rtx rtx_val = GEN_INT (1);
2925 rtx_tmp = gen_reg_rtx (op_mode);
2926 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
2927 operands[1], rtx_val));
2928 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
2932 /* Little endian word numbering for operand is 3 2 1 0.
2933 Input words 3 and 1 are where they need to be. */
2934 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], operands[1]));
2938 [(set_attr "type" "veccomplex")])
2940 ;; Generate unsdoubleov
2941 ;; unsigned int to double convert words 1 and 3
2942 (define_expand "unsdoubleov4si2"
2943 [(set (match_operand:V2DF 0 "register_operand" "=v")
2944 (match_operand:V4SI 1 "register_operand" "v"))]
2947 if (VECTOR_ELT_ORDER_BIG)
2949 /* Big endian word numbering for words in operand is 0 1 2 3.
2950 take (operand[1] operand[1]) and shift left one word
2951 0 1 2 3 0 1 2 3 => 1 2 3 0
2952 Input words 1 and 3 are now where they need to be for the
2955 rtx rtx_val = GEN_INT (1);
2957 rtx_tmp = gen_reg_rtx (V4SImode);
2958 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
2959 operands[1], rtx_val));
2960 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
2964 /* Want to convert the words 1 and 3.
2965 Little endian word numbering for operand is 3 2 1 0.
2966 Input words 3 and 1 are where they need to be. */
2967 emit_insn (gen_vsx_xvcvuxwdp (operands[0], operands[1]));
2971 [(set_attr "type" "veccomplex")])
2973 ;; Generate doublehv
2974 ;; signed int/float to double convert words 0 and 1
2975 (define_expand "doubleh<mode>2"
2976 [(set (match_operand:V2DF 0 "register_operand" "=v")
2977 (match_operand:VSX_W 1 "register_operand" "v"))]
2983 machine_mode op_mode = GET_MODE (operands[1]);
2984 rtx_tmp = gen_reg_rtx (op_mode);
2986 if (VECTOR_ELT_ORDER_BIG)
2988 /* Big endian word numbering for words in operand is 0 1 2 3.
2989 Shift operand left one word, rtx_tmp word order is now 1 2 3 0.
2990 take (rts_tmp operand[1]) and shift left three words
2991 1 2 3 0 0 1 2 3 => 0 0 1 2
2992 Input words 0 and 1 are now where they need to be for the
2994 rtx_val = GEN_INT (1);
2995 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
2996 operands[1], rtx_val));
2998 rtx_val = GEN_INT (3);
2999 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, rtx_tmp,
3000 operands[1], rtx_val));
3001 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3005 /* Little endian word numbering for operand is 3 2 1 0.
3006 Shift operand left three words, rtx_tmp word order is now 0 3 2 1.
3007 take (operand[1] rts_tmp) and shift left two words
3008 3 2 1 0 0 3 2 1 => 1 0 0 3
3009 Input words 0 and 1 are now where they need to be for the
3011 rtx_val = GEN_INT (3);
3012 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3013 operands[1], rtx_val));
3015 rtx_val = GEN_INT (2);
3016 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3018 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3022 [(set_attr "type" "veccomplex")])
3024 ;; Generate unsdoublehv
3025 ;; unsigned int to double convert words 0 and 1
3026 (define_expand "unsdoublehv4si2"
3027 [(set (match_operand:V2DF 0 "register_operand" "=v")
3028 (match_operand:V4SI 1 "register_operand" "v"))]
3031 rtx rtx_tmp = gen_reg_rtx (V4SImode);
3032 rtx rtx_val = GEN_INT (12);
3034 if (VECTOR_ELT_ORDER_BIG)
3036 /* Big endian word numbering for words in operand is 0 1 2 3.
3037 Shift operand left one word, rtx_tmp word order is now 1 2 3 0.
3038 take (rts_tmp operand[1]) and shift left three words
3039 1 2 3 0 0 1 2 3 => 0 0 1 2
3040 Input words 0 and 1 are now where they need to be for the
3042 rtx_val = GEN_INT (1);
3043 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3044 operands[1], rtx_val));
3046 rtx_val = GEN_INT (3);
3047 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, rtx_tmp,
3048 operands[1], rtx_val));
3049 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3053 /* Little endian word numbering for operand is 3 2 1 0.
3054 Shift operand left three words, rtx_tmp word order is now 0 3 2 1.
3055 take (operand[1] rts_tmp) and shift left two words
3056 3 2 1 0 0 3 2 1 => 1 0 0 3
3057 Input words 1 and 0 are now where they need to be for the
3059 rtx_val = GEN_INT (3);
3061 rtx_tmp = gen_reg_rtx (V4SImode);
3062 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3063 operands[1], rtx_val));
3065 rtx_val = GEN_INT (2);
3066 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3068 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3072 [(set_attr "type" "veccomplex")])
3074 ;; Generate doublelv
3075 ;; signed int/float to double convert words 2 and 3
3076 (define_expand "doublel<mode>2"
3077 [(set (match_operand:V2DF 0 "register_operand" "=v")
3078 (match_operand:VSX_W 1 "register_operand" "v"))]
3082 rtx rtx_val = GEN_INT (3);
3084 machine_mode op_mode = GET_MODE (operands[1]);
3085 rtx_tmp = gen_reg_rtx (op_mode);
3087 if (VECTOR_ELT_ORDER_BIG)
3089 /* Big endian word numbering for operand is 0 1 2 3.
3090 Shift operand left three words, rtx_tmp word order is now 3 0 1 2.
3091 take (operand[1] rtx_tmp) and shift left two words
3092 0 1 2 3 3 0 1 2 => 2 3 3 0
3093 now use convert instruction to convert word 2 and 3 in the
3095 rtx_val = GEN_INT (3);
3096 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3097 operands[1], rtx_val));
3099 rtx_val = GEN_INT (2);
3100 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3102 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3106 /* Little endian word numbering for operand is 3 2 1 0.
3107 Shift operand left one word, rtx_tmp word order is now 2 1 0 3.
3108 take (rtx_tmp operand[1]) and shift left three words
3109 2 1 0 3 3 2 1 0 => 3 3 2 1
3110 now use convert instruction to convert word 3 and 2 in the
3112 rtx_val = GEN_INT (1);
3113 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3114 operands[1], rtx_val));
3116 rtx_val = GEN_INT (3);
3117 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, rtx_tmp,
3118 operands[1], rtx_val));
3119 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3123 [(set_attr "type" "veccomplex")])
3125 ;; Generate unsdoublelv
3126 ;; unsigned int to double convert convert 2 and 3
3127 (define_expand "unsdoublelv4si2"
3128 [(set (match_operand:V2DF 0 "register_operand" "=v")
3129 (match_operand:V4SI 1 "register_operand" "v"))]
3132 rtx rtx_tmp = gen_reg_rtx (V4SImode);
3133 rtx rtx_val = GEN_INT (12);
3135 if (VECTOR_ELT_ORDER_BIG)
3137 /* Big endian word numbering for operand is 0 1 2 3.
3138 Shift operand left three words, rtx_tmp word order is now 3 0 1 2.
3139 take (operand[1] rtx_tmp) and shift left two words
3140 0 1 2 3 3 0 1 2 => 2 3 3 0
3141 now use convert instruction to convert word 2 and 3 in the
3143 rtx_val = GEN_INT (3);
3144 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3145 operands[1], rtx_val));
3147 rtx_val = GEN_INT (2);
3148 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3150 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3154 /* Little endian word numbering for operand is 3 2 1 0.
3155 Shift operand left one word, rtx_tmp word order is now 2 1 0 3.
3156 take (rtx_tmp operand[1]) and shift left three words
3157 2 1 0 3 3 2 1 0 => 3 3 2 1
3158 now use convert instruction to convert word 3 and 2 in the
3160 rtx_val = GEN_INT (1);
3161 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp,
3162 operands[1], operands[1], rtx_val));
3164 rtx_val = GEN_INT (3);
3165 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, rtx_tmp,
3166 operands[1], rtx_val));
3167 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3171 [(set_attr "type" "veccomplex")])
3174 ;; xxlxor/vxor SCRATCH0,SCRATCH0,SCRATCH0
3175 ;; vsubu?m SCRATCH2,SCRATCH1,%1
3176 ;; vmaxs? %0,%1,SCRATCH2"
3177 (define_expand "abs<mode>2"
3178 [(set (match_dup 2) (match_dup 3))
3180 (minus:VI2 (match_dup 2)
3181 (match_operand:VI2 1 "register_operand" "v")))
3182 (set (match_operand:VI2 0 "register_operand" "=v")
3183 (smax:VI2 (match_dup 1) (match_dup 4)))]
3186 int i, n_elt = GET_MODE_NUNITS (<MODE>mode);
3187 rtvec v = rtvec_alloc (n_elt);
3189 /* Create an all 0 constant. */
3190 for (i = 0; i < n_elt; ++i)
3191 RTVEC_ELT (v, i) = const0_rtx;
3193 operands[2] = gen_reg_rtx (<MODE>mode);
3194 operands[3] = gen_rtx_CONST_VECTOR (<MODE>mode, v);
3195 operands[4] = gen_reg_rtx (<MODE>mode);
3199 ;; vspltisw SCRATCH1,0
3200 ;; vsubu?m SCRATCH2,SCRATCH1,%1
3201 ;; vmins? %0,%1,SCRATCH2"
3202 (define_expand "nabs<mode>2"
3203 [(set (match_dup 2) (match_dup 3))
3205 (minus:VI2 (match_dup 2)
3206 (match_operand:VI2 1 "register_operand" "v")))
3207 (set (match_operand:VI2 0 "register_operand" "=v")
3208 (smin:VI2 (match_dup 1) (match_dup 4)))]
3212 int n_elt = GET_MODE_NUNITS (<MODE>mode);
3214 rtvec v = rtvec_alloc (n_elt);
3216 /* Create an all 0 constant. */
3217 for (i = 0; i < n_elt; ++i)
3218 RTVEC_ELT (v, i) = const0_rtx;
3220 operands[2] = gen_reg_rtx (<MODE>mode);
3221 operands[3] = gen_rtx_CONST_VECTOR (<MODE>mode, v);
3222 operands[4] = gen_reg_rtx (<MODE>mode);
3226 ;; vspltisw SCRATCH1,-1
3227 ;; vslw SCRATCH2,SCRATCH1,SCRATCH1
3228 ;; vandc %0,%1,SCRATCH2
3229 (define_expand "altivec_absv4sf2"
3231 (vec_duplicate:V4SI (const_int -1)))
3233 (ashift:V4SI (match_dup 2) (match_dup 2)))
3234 (set (match_operand:V4SF 0 "register_operand" "=v")
3235 (and:V4SF (not:V4SF (subreg:V4SF (match_dup 3) 0))
3236 (match_operand:V4SF 1 "register_operand" "v")))]
3239 operands[2] = gen_reg_rtx (V4SImode);
3240 operands[3] = gen_reg_rtx (V4SImode);
3244 ;; vspltis? SCRATCH0,0
3245 ;; vsubs?s SCRATCH2,SCRATCH1,%1
3246 ;; vmaxs? %0,%1,SCRATCH2"
3247 (define_expand "altivec_abss_<mode>"
3248 [(set (match_dup 2) (vec_duplicate:VI (const_int 0)))
3249 (parallel [(set (match_dup 3)
3250 (unspec:VI [(match_dup 2)
3251 (match_operand:VI 1 "register_operand" "v")]
3253 (set (reg:SI VSCR_REGNO)
3254 (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))])
3255 (set (match_operand:VI 0 "register_operand" "=v")
3256 (smax:VI (match_dup 1) (match_dup 3)))]
3259 operands[2] = gen_reg_rtx (GET_MODE (operands[0]));
3260 operands[3] = gen_reg_rtx (GET_MODE (operands[0]));
3263 (define_expand "reduc_plus_scal_<mode>"
3264 [(set (match_operand:<VI_scalar> 0 "register_operand" "=v")
3265 (unspec:VIshort [(match_operand:VIshort 1 "register_operand" "v")]
3266 UNSPEC_REDUC_PLUS))]
3269 rtx vzero = gen_reg_rtx (V4SImode);
3270 rtx vtmp1 = gen_reg_rtx (V4SImode);
3271 rtx vtmp2 = gen_reg_rtx (<MODE>mode);
3272 rtx dest = gen_lowpart (V4SImode, vtmp2);
3273 int elt = VECTOR_ELT_ORDER_BIG ? GET_MODE_NUNITS (<MODE>mode) - 1 : 0;
3275 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
3276 emit_insn (gen_altivec_vsum4s<VI_char>s (vtmp1, operands[1], vzero));
3277 emit_insn (gen_altivec_vsumsws_direct (dest, vtmp1, vzero));
3278 rs6000_expand_vector_extract (operands[0], vtmp2, GEN_INT (elt));
3282 (define_insn "*p9_neg<mode>2"
3283 [(set (match_operand:VNEG 0 "altivec_register_operand" "=v")
3284 (neg:VNEG (match_operand:VNEG 1 "altivec_register_operand" "v")))]
3286 "vneg<VI_char> %0,%1"
3287 [(set_attr "type" "vecsimple")])
3289 (define_expand "neg<mode>2"
3290 [(set (match_operand:VI2 0 "register_operand" "")
3291 (neg:VI2 (match_operand:VI2 1 "register_operand" "")))]
3294 if (!TARGET_P9_VECTOR || (<MODE>mode != V4SImode && <MODE>mode != V2DImode))
3298 vzero = gen_reg_rtx (GET_MODE (operands[0]));
3299 emit_move_insn (vzero, CONST0_RTX (<MODE>mode));
3300 emit_insn (gen_sub<mode>3 (operands[0], vzero, operands[1]));
3305 (define_expand "udot_prod<mode>"
3306 [(set (match_operand:V4SI 0 "register_operand" "=v")
3307 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
3308 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
3309 (match_operand:VIshort 2 "register_operand" "v")]
3314 emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], operands[2], operands[3]));
3318 (define_expand "sdot_prodv8hi"
3319 [(set (match_operand:V4SI 0 "register_operand" "=v")
3320 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
3321 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3322 (match_operand:V8HI 2 "register_operand" "v")]
3327 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], operands[2], operands[3]));
3331 (define_expand "widen_usum<mode>3"
3332 [(set (match_operand:V4SI 0 "register_operand" "=v")
3333 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
3334 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")]
3339 rtx vones = gen_reg_rtx (GET_MODE (operands[1]));
3341 emit_insn (gen_altivec_vspltis<VI_char> (vones, const1_rtx));
3342 emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], vones, operands[2]));
3346 (define_expand "widen_ssumv16qi3"
3347 [(set (match_operand:V4SI 0 "register_operand" "=v")
3348 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
3349 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")]
3354 rtx vones = gen_reg_rtx (V16QImode);
3356 emit_insn (gen_altivec_vspltisb (vones, const1_rtx));
3357 emit_insn (gen_altivec_vmsummbm (operands[0], operands[1], vones, operands[2]));
3361 (define_expand "widen_ssumv8hi3"
3362 [(set (match_operand:V4SI 0 "register_operand" "=v")
3363 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
3364 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
3369 rtx vones = gen_reg_rtx (V8HImode);
3371 emit_insn (gen_altivec_vspltish (vones, const1_rtx));
3372 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], vones, operands[2]));
3376 (define_expand "vec_unpacks_hi_<VP_small_lc>"
3377 [(set (match_operand:VP 0 "register_operand" "=v")
3378 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
3379 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
3383 (define_expand "vec_unpacks_lo_<VP_small_lc>"
3384 [(set (match_operand:VP 0 "register_operand" "=v")
3385 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
3386 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
3390 (define_insn "vperm_v8hiv4si"
3391 [(set (match_operand:V4SI 0 "register_operand" "=v,?wo")
3392 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v,wo")
3393 (match_operand:V4SI 2 "register_operand" "v,0")
3394 (match_operand:V16QI 3 "register_operand" "v,wo")]
3400 [(set_attr "type" "vecperm")
3401 (set_attr "length" "4")])
3403 (define_insn "vperm_v16qiv8hi"
3404 [(set (match_operand:V8HI 0 "register_operand" "=v,?wo")
3405 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v,wo")
3406 (match_operand:V8HI 2 "register_operand" "v,0")
3407 (match_operand:V16QI 3 "register_operand" "v,wo")]
3413 [(set_attr "type" "vecperm")
3414 (set_attr "length" "4")])
3417 (define_expand "vec_unpacku_hi_v16qi"
3418 [(set (match_operand:V8HI 0 "register_operand" "=v")
3419 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
3424 rtx vzero = gen_reg_rtx (V8HImode);
3425 rtx mask = gen_reg_rtx (V16QImode);
3426 rtvec v = rtvec_alloc (16);
3427 bool be = BYTES_BIG_ENDIAN;
3429 emit_insn (gen_altivec_vspltish (vzero, const0_rtx));
3431 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 7);
3432 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 0 : 16);
3433 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 16 : 6);
3434 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 1 : 16);
3435 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 5);
3436 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 2 : 16);
3437 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 16 : 4);
3438 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 3 : 16);
3439 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 3);
3440 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 4 : 16);
3441 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 16 : 2);
3442 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 5 : 16);
3443 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 1);
3444 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 6 : 16);
3445 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 16 : 0);
3446 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 7 : 16);
3448 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3449 emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask));
3453 (define_expand "vec_unpacku_hi_v8hi"
3454 [(set (match_operand:V4SI 0 "register_operand" "=v")
3455 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
3460 rtx vzero = gen_reg_rtx (V4SImode);
3461 rtx mask = gen_reg_rtx (V16QImode);
3462 rtvec v = rtvec_alloc (16);
3463 bool be = BYTES_BIG_ENDIAN;
3465 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
3467 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 7);
3468 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 17 : 6);
3469 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 0 : 17);
3470 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 1 : 16);
3471 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 5);
3472 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 17 : 4);
3473 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 2 : 17);
3474 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 3 : 16);
3475 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 3);
3476 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 17 : 2);
3477 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 4 : 17);
3478 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 5 : 16);
3479 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 1);
3480 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 17 : 0);
3481 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 6 : 17);
3482 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 7 : 16);
3484 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3485 emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask));
3489 (define_expand "vec_unpacku_lo_v16qi"
3490 [(set (match_operand:V8HI 0 "register_operand" "=v")
3491 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
3496 rtx vzero = gen_reg_rtx (V8HImode);
3497 rtx mask = gen_reg_rtx (V16QImode);
3498 rtvec v = rtvec_alloc (16);
3499 bool be = BYTES_BIG_ENDIAN;
3501 emit_insn (gen_altivec_vspltish (vzero, const0_rtx));
3503 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 15);
3504 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 8 : 16);
3505 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 16 : 14);
3506 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 9 : 16);
3507 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 13);
3508 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 10 : 16);
3509 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 16 : 12);
3510 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 11 : 16);
3511 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 11);
3512 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 12 : 16);
3513 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 16 : 10);
3514 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 13 : 16);
3515 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 9);
3516 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 14 : 16);
3517 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 16 : 8);
3518 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 15 : 16);
3520 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3521 emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask));
3525 (define_expand "vec_unpacku_lo_v8hi"
3526 [(set (match_operand:V4SI 0 "register_operand" "=v")
3527 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
3532 rtx vzero = gen_reg_rtx (V4SImode);
3533 rtx mask = gen_reg_rtx (V16QImode);
3534 rtvec v = rtvec_alloc (16);
3535 bool be = BYTES_BIG_ENDIAN;
3537 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
3539 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 15);
3540 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 17 : 14);
3541 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 8 : 17);
3542 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 9 : 16);
3543 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 13);
3544 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 17 : 12);
3545 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 10 : 17);
3546 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 11 : 16);
3547 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 11);
3548 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 17 : 10);
3549 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 12 : 17);
3550 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 13 : 16);
3551 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 9);
3552 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 17 : 8);
3553 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 14 : 17);
3554 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 15 : 16);
3556 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3557 emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask));
3561 (define_expand "vec_widen_umult_hi_v16qi"
3562 [(set (match_operand:V8HI 0 "register_operand" "=v")
3563 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3564 (match_operand:V16QI 2 "register_operand" "v")]
3569 rtx ve = gen_reg_rtx (V8HImode);
3570 rtx vo = gen_reg_rtx (V8HImode);
3572 if (BYTES_BIG_ENDIAN)
3574 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
3575 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
3576 emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
3580 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3581 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3582 emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
3587 (define_expand "vec_widen_umult_lo_v16qi"
3588 [(set (match_operand:V8HI 0 "register_operand" "=v")
3589 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3590 (match_operand:V16QI 2 "register_operand" "v")]
3595 rtx ve = gen_reg_rtx (V8HImode);
3596 rtx vo = gen_reg_rtx (V8HImode);
3598 if (BYTES_BIG_ENDIAN)
3600 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
3601 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
3602 emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
3606 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3607 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3608 emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
3613 (define_expand "vec_widen_smult_hi_v16qi"
3614 [(set (match_operand:V8HI 0 "register_operand" "=v")
3615 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3616 (match_operand:V16QI 2 "register_operand" "v")]
3621 rtx ve = gen_reg_rtx (V8HImode);
3622 rtx vo = gen_reg_rtx (V8HImode);
3624 if (BYTES_BIG_ENDIAN)
3626 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3627 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3628 emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
3632 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3633 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3634 emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
3639 (define_expand "vec_widen_smult_lo_v16qi"
3640 [(set (match_operand:V8HI 0 "register_operand" "=v")
3641 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3642 (match_operand:V16QI 2 "register_operand" "v")]
3647 rtx ve = gen_reg_rtx (V8HImode);
3648 rtx vo = gen_reg_rtx (V8HImode);
3650 if (BYTES_BIG_ENDIAN)
3652 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3653 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3654 emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
3658 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3659 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3660 emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
3665 (define_expand "vec_widen_umult_hi_v8hi"
3666 [(set (match_operand:V4SI 0 "register_operand" "=v")
3667 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3668 (match_operand:V8HI 2 "register_operand" "v")]
3673 rtx ve = gen_reg_rtx (V4SImode);
3674 rtx vo = gen_reg_rtx (V4SImode);
3676 if (BYTES_BIG_ENDIAN)
3678 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3679 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3680 emit_insn (gen_altivec_vmrghw_direct (operands[0], ve, vo));
3684 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3685 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3686 emit_insn (gen_altivec_vmrghw_direct (operands[0], vo, ve));
3691 (define_expand "vec_widen_umult_lo_v8hi"
3692 [(set (match_operand:V4SI 0 "register_operand" "=v")
3693 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3694 (match_operand:V8HI 2 "register_operand" "v")]
3699 rtx ve = gen_reg_rtx (V4SImode);
3700 rtx vo = gen_reg_rtx (V4SImode);
3702 if (BYTES_BIG_ENDIAN)
3704 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3705 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3706 emit_insn (gen_altivec_vmrglw_direct (operands[0], ve, vo));
3710 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3711 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3712 emit_insn (gen_altivec_vmrglw_direct (operands[0], vo, ve));
3717 (define_expand "vec_widen_smult_hi_v8hi"
3718 [(set (match_operand:V4SI 0 "register_operand" "=v")
3719 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3720 (match_operand:V8HI 2 "register_operand" "v")]
3725 rtx ve = gen_reg_rtx (V4SImode);
3726 rtx vo = gen_reg_rtx (V4SImode);
3728 if (BYTES_BIG_ENDIAN)
3730 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3731 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3732 emit_insn (gen_altivec_vmrghw_direct (operands[0], ve, vo));
3736 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3737 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3738 emit_insn (gen_altivec_vmrghw_direct (operands[0], vo, ve));
3743 (define_expand "vec_widen_smult_lo_v8hi"
3744 [(set (match_operand:V4SI 0 "register_operand" "=v")
3745 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3746 (match_operand:V8HI 2 "register_operand" "v")]
3751 rtx ve = gen_reg_rtx (V4SImode);
3752 rtx vo = gen_reg_rtx (V4SImode);
3754 if (BYTES_BIG_ENDIAN)
3756 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3757 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3758 emit_insn (gen_altivec_vmrglw_direct (operands[0], ve, vo));
3762 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3763 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3764 emit_insn (gen_altivec_vmrglw_direct (operands[0], vo, ve));
3769 (define_expand "vec_pack_trunc_<mode>"
3770 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
3771 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
3772 (match_operand:VP 2 "register_operand" "v")]
3773 UNSPEC_VPACK_UNS_UNS_MOD))]
3777 (define_expand "mulv16qi3"
3778 [(set (match_operand:V16QI 0 "register_operand" "=v")
3779 (mult:V16QI (match_operand:V16QI 1 "register_operand" "v")
3780 (match_operand:V16QI 2 "register_operand" "v")))]
3784 rtx even = gen_reg_rtx (V8HImode);
3785 rtx odd = gen_reg_rtx (V8HImode);
3786 rtx mask = gen_reg_rtx (V16QImode);
3787 rtvec v = rtvec_alloc (16);
3790 for (i = 0; i < 8; ++i) {
3791 RTVEC_ELT (v, 2 * i)
3792 = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 1 : 31 - 2 * i);
3793 RTVEC_ELT (v, 2 * i + 1)
3794 = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 17 : 15 - 2 * i);
3797 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3798 emit_insn (gen_altivec_vmulesb (even, operands[1], operands[2]));
3799 emit_insn (gen_altivec_vmulosb (odd, operands[1], operands[2]));
3800 emit_insn (gen_altivec_vperm_v8hiv16qi (operands[0], even, odd, mask));
3804 (define_expand "altivec_negv4sf2"
3805 [(use (match_operand:V4SF 0 "register_operand" ""))
3806 (use (match_operand:V4SF 1 "register_operand" ""))]
3812 /* Generate [-0.0, -0.0, -0.0, -0.0]. */
3813 neg0 = gen_reg_rtx (V4SImode);
3814 emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
3815 emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
3818 emit_insn (gen_xorv4sf3 (operands[0],
3819 gen_lowpart (V4SFmode, neg0), operands[1]));
3824 ;; Vector reverse elements
3825 (define_expand "altivec_vreve<mode>2"
3826 [(set (match_operand:VEC_A 0 "register_operand" "=v")
3827 (unspec:VEC_A [(match_operand:VEC_A 1 "register_operand" "v")]
3831 int i, j, size, num_elements;
3832 rtvec v = rtvec_alloc (16);
3833 rtx mask = gen_reg_rtx (V16QImode);
3835 size = GET_MODE_UNIT_SIZE (<MODE>mode);
3836 num_elements = GET_MODE_NUNITS (<MODE>mode);
3838 for (j = 0; j < num_elements; j++)
3839 for (i = 0; i < size; i++)
3840 RTVEC_ELT (v, i + j * size)
3841 = GEN_INT (i + (num_elements - 1 - j) * size);
3843 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3844 emit_insn (gen_altivec_vperm_<mode> (operands[0], operands[1],
3845 operands[1], mask));
3849 ;; Vector SIMD PEM v2.06c defines LVLX, LVLXL, LVRX, LVRXL,
3850 ;; STVLX, STVLXL, STVVRX, STVRXL are available only on Cell.
3851 (define_insn "altivec_lvlx"
3852 [(set (match_operand:V16QI 0 "register_operand" "=v")
3853 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3855 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3857 [(set_attr "type" "vecload")])
3859 (define_insn "altivec_lvlxl"
3860 [(set (match_operand:V16QI 0 "register_operand" "=v")
3861 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3863 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3865 [(set_attr "type" "vecload")])
3867 (define_insn "altivec_lvrx"
3868 [(set (match_operand:V16QI 0 "register_operand" "=v")
3869 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3871 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3873 [(set_attr "type" "vecload")])
3875 (define_insn "altivec_lvrxl"
3876 [(set (match_operand:V16QI 0 "register_operand" "=v")
3877 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3879 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3881 [(set_attr "type" "vecload")])
3883 (define_insn "altivec_stvlx"
3885 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3886 (match_operand:V16QI 1 "register_operand" "v"))
3887 (unspec [(const_int 0)] UNSPEC_STVLX)])]
3888 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3890 [(set_attr "type" "vecstore")])
3892 (define_insn "altivec_stvlxl"
3894 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3895 (match_operand:V16QI 1 "register_operand" "v"))
3896 (unspec [(const_int 0)] UNSPEC_STVLXL)])]
3897 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3899 [(set_attr "type" "vecstore")])
3901 (define_insn "altivec_stvrx"
3903 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3904 (match_operand:V16QI 1 "register_operand" "v"))
3905 (unspec [(const_int 0)] UNSPEC_STVRX)])]
3906 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3908 [(set_attr "type" "vecstore")])
3910 (define_insn "altivec_stvrxl"
3912 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3913 (match_operand:V16QI 1 "register_operand" "v"))
3914 (unspec [(const_int 0)] UNSPEC_STVRXL)])]
3915 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3917 [(set_attr "type" "vecstore")])
3919 (define_expand "vec_unpacks_float_hi_v8hi"
3920 [(set (match_operand:V4SF 0 "register_operand" "")
3921 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3922 UNSPEC_VUPKHS_V4SF))]
3926 rtx tmp = gen_reg_rtx (V4SImode);
3928 emit_insn (gen_vec_unpacks_hi_v8hi (tmp, operands[1]));
3929 emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
3933 (define_expand "vec_unpacks_float_lo_v8hi"
3934 [(set (match_operand:V4SF 0 "register_operand" "")
3935 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3936 UNSPEC_VUPKLS_V4SF))]
3940 rtx tmp = gen_reg_rtx (V4SImode);
3942 emit_insn (gen_vec_unpacks_lo_v8hi (tmp, operands[1]));
3943 emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
3947 (define_expand "vec_unpacku_float_hi_v8hi"
3948 [(set (match_operand:V4SF 0 "register_operand" "")
3949 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3950 UNSPEC_VUPKHU_V4SF))]
3954 rtx tmp = gen_reg_rtx (V4SImode);
3956 emit_insn (gen_vec_unpacku_hi_v8hi (tmp, operands[1]));
3957 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
3961 (define_expand "vec_unpacku_float_lo_v8hi"
3962 [(set (match_operand:V4SF 0 "register_operand" "")
3963 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3964 UNSPEC_VUPKLU_V4SF))]
3968 rtx tmp = gen_reg_rtx (V4SImode);
3970 emit_insn (gen_vec_unpacku_lo_v8hi (tmp, operands[1]));
3971 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
3976 ;; Power8/power9 vector instructions encoded as Altivec instructions
3978 ;; Vector count leading zeros
3979 (define_insn "*p8v_clz<mode>2"
3980 [(set (match_operand:VI2 0 "register_operand" "=v")
3981 (clz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
3984 [(set_attr "length" "4")
3985 (set_attr "type" "vecsimple")])
3987 ;; Vector absolute difference unsigned
3988 (define_expand "vadu<mode>3"
3989 [(set (match_operand:VI 0 "register_operand")
3990 (unspec:VI [(match_operand:VI 1 "register_operand")
3991 (match_operand:VI 2 "register_operand")]
3995 ;; Vector absolute difference unsigned
3996 (define_insn "*p9_vadu<mode>3"
3997 [(set (match_operand:VI 0 "register_operand" "=v")
3998 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
3999 (match_operand:VI 2 "register_operand" "v")]
4002 "vabsdu<wd> %0,%1,%2"
4003 [(set_attr "type" "vecsimple")])
4005 ;; Vector count trailing zeros
4006 (define_insn "*p9v_ctz<mode>2"
4007 [(set (match_operand:VI2 0 "register_operand" "=v")
4008 (ctz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
4011 [(set_attr "length" "4")
4012 (set_attr "type" "vecsimple")])
4014 ;; Vector population count
4015 (define_insn "*p8v_popcount<mode>2"
4016 [(set (match_operand:VI2 0 "register_operand" "=v")
4017 (popcount:VI2 (match_operand:VI2 1 "register_operand" "v")))]
4020 [(set_attr "length" "4")
4021 (set_attr "type" "vecsimple")])
4024 (define_insn "*p9v_parity<mode>2"
4025 [(set (match_operand:VParity 0 "register_operand" "=v")
4026 (parity:VParity (match_operand:VParity 1 "register_operand" "v")))]
4029 [(set_attr "length" "4")
4030 (set_attr "type" "vecsimple")])
4032 ;; Vector Gather Bits by Bytes by Doubleword
4033 (define_insn "p8v_vgbbd"
4034 [(set (match_operand:V16QI 0 "register_operand" "=v")
4035 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")]
4039 [(set_attr "length" "4")
4040 (set_attr "type" "vecsimple")])
4043 ;; 128-bit binary integer arithmetic
4044 ;; We have a special container type (V1TImode) to allow operations using the
4045 ;; ISA 2.07 128-bit binary support to target the VMX/altivec registers without
4046 ;; having to worry about the register allocator deciding GPRs are better.
4048 (define_insn "altivec_vadduqm"
4049 [(set (match_operand:V1TI 0 "register_operand" "=v")
4050 (plus:V1TI (match_operand:V1TI 1 "register_operand" "v")
4051 (match_operand:V1TI 2 "register_operand" "v")))]
4054 [(set_attr "length" "4")
4055 (set_attr "type" "vecsimple")])
4057 (define_insn "altivec_vaddcuq"
4058 [(set (match_operand:V1TI 0 "register_operand" "=v")
4059 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4060 (match_operand:V1TI 2 "register_operand" "v")]
4064 [(set_attr "length" "4")
4065 (set_attr "type" "vecsimple")])
4067 (define_insn "altivec_vsubuqm"
4068 [(set (match_operand:V1TI 0 "register_operand" "=v")
4069 (minus:V1TI (match_operand:V1TI 1 "register_operand" "v")
4070 (match_operand:V1TI 2 "register_operand" "v")))]
4073 [(set_attr "length" "4")
4074 (set_attr "type" "vecsimple")])
4076 (define_insn "altivec_vsubcuq"
4077 [(set (match_operand:V1TI 0 "register_operand" "=v")
4078 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4079 (match_operand:V1TI 2 "register_operand" "v")]
4083 [(set_attr "length" "4")
4084 (set_attr "type" "vecsimple")])
4086 (define_insn "altivec_vaddeuqm"
4087 [(set (match_operand:V1TI 0 "register_operand" "=v")
4088 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4089 (match_operand:V1TI 2 "register_operand" "v")
4090 (match_operand:V1TI 3 "register_operand" "v")]
4093 "vaddeuqm %0,%1,%2,%3"
4094 [(set_attr "length" "4")
4095 (set_attr "type" "vecsimple")])
4097 (define_insn "altivec_vaddecuq"
4098 [(set (match_operand:V1TI 0 "register_operand" "=v")
4099 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4100 (match_operand:V1TI 2 "register_operand" "v")
4101 (match_operand:V1TI 3 "register_operand" "v")]
4104 "vaddecuq %0,%1,%2,%3"
4105 [(set_attr "length" "4")
4106 (set_attr "type" "vecsimple")])
4108 (define_insn "altivec_vsubeuqm"
4109 [(set (match_operand:V1TI 0 "register_operand" "=v")
4110 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4111 (match_operand:V1TI 2 "register_operand" "v")
4112 (match_operand:V1TI 3 "register_operand" "v")]
4115 "vsubeuqm %0,%1,%2,%3"
4116 [(set_attr "length" "4")
4117 (set_attr "type" "vecsimple")])
4119 (define_insn "altivec_vsubecuq"
4120 [(set (match_operand:V1TI 0 "register_operand" "=v")
4121 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4122 (match_operand:V1TI 2 "register_operand" "v")
4123 (match_operand:V1TI 3 "register_operand" "v")]
4126 "vsubecuq %0,%1,%2,%3"
4127 [(set_attr "length" "4")
4128 (set_attr "type" "vecsimple")])
4130 ;; We use V2DI as the output type to simplify converting the permute
4131 ;; bits into an integer
4132 (define_insn "altivec_vbpermq"
4133 [(set (match_operand:V2DI 0 "register_operand" "=v")
4134 (unspec:V2DI [(match_operand:V16QI 1 "register_operand" "v")
4135 (match_operand:V16QI 2 "register_operand" "v")]
4139 [(set_attr "type" "vecperm")])
4141 ; One of the vector API interfaces requires returning vector unsigned char.
4142 (define_insn "altivec_vbpermq2"
4143 [(set (match_operand:V16QI 0 "register_operand" "=v")
4144 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
4145 (match_operand:V16QI 2 "register_operand" "v")]
4149 [(set_attr "type" "vecperm")])
4151 (define_insn "altivec_vbpermd"
4152 [(set (match_operand:V2DI 0 "register_operand" "=v")
4153 (unspec:V2DI [(match_operand:V2DI 1 "register_operand" "v")
4154 (match_operand:V16QI 2 "register_operand" "v")]
4158 [(set_attr "type" "vecsimple")])
4160 ;; Decimal Integer operations
4161 (define_int_iterator UNSPEC_BCD_ADD_SUB [UNSPEC_BCDADD UNSPEC_BCDSUB])
4163 (define_int_attr bcd_add_sub [(UNSPEC_BCDADD "add")
4164 (UNSPEC_BCDSUB "sub")])
4166 (define_code_iterator BCD_TEST [eq lt gt unordered])
4168 (define_insn "bcd<bcd_add_sub>"
4169 [(set (match_operand:V1TI 0 "gpc_reg_operand" "=v")
4170 (unspec:V1TI [(match_operand:V1TI 1 "gpc_reg_operand" "v")
4171 (match_operand:V1TI 2 "gpc_reg_operand" "v")
4172 (match_operand:QI 3 "const_0_to_1_operand" "n")]
4173 UNSPEC_BCD_ADD_SUB))
4174 (clobber (reg:CCFP CR6_REGNO))]
4176 "bcd<bcd_add_sub>. %0,%1,%2,%3"
4177 [(set_attr "length" "4")
4178 (set_attr "type" "vecsimple")])
4180 ;; Use a floating point type (V2DFmode) for the compare to set CR6 so that we
4181 ;; can use the unordered test for BCD nans and add/subtracts that overflow. An
4182 ;; UNORDERED test on an integer type (like V1TImode) is not defined. The type
4183 ;; probably should be one that can go in the VMX (Altivec) registers, so we
4184 ;; can't use DDmode or DFmode.
4185 (define_insn "*bcd<bcd_add_sub>_test"
4186 [(set (reg:CCFP CR6_REGNO)
4188 (unspec:V2DF [(match_operand:V1TI 1 "register_operand" "v")
4189 (match_operand:V1TI 2 "register_operand" "v")
4190 (match_operand:QI 3 "const_0_to_1_operand" "i")]
4192 (match_operand:V2DF 4 "zero_constant" "j")))
4193 (clobber (match_scratch:V1TI 0 "=v"))]
4195 "bcd<bcd_add_sub>. %0,%1,%2,%3"
4196 [(set_attr "length" "4")
4197 (set_attr "type" "vecsimple")])
4199 (define_insn "*bcd<bcd_add_sub>_test2"
4200 [(set (match_operand:V1TI 0 "register_operand" "=v")
4201 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4202 (match_operand:V1TI 2 "register_operand" "v")
4203 (match_operand:QI 3 "const_0_to_1_operand" "i")]
4204 UNSPEC_BCD_ADD_SUB))
4205 (set (reg:CCFP CR6_REGNO)
4207 (unspec:V2DF [(match_dup 1)
4211 (match_operand:V2DF 4 "zero_constant" "j")))]
4213 "bcd<bcd_add_sub>. %0,%1,%2,%3"
4214 [(set_attr "length" "4")
4215 (set_attr "type" "vecsimple")])
4217 (define_insn "darn_32"
4218 [(set (match_operand:SI 0 "register_operand" "=r")
4219 (unspec:SI [(const_int 0)] UNSPEC_DARN_32))]
4222 [(set_attr "type" "integer")])
4224 (define_insn "darn_raw"
4225 [(set (match_operand:DI 0 "register_operand" "=r")
4226 (unspec:DI [(const_int 0)] UNSPEC_DARN_RAW))]
4227 "TARGET_P9_MISC && TARGET_64BIT"
4229 [(set_attr "type" "integer")])
4232 [(set (match_operand:DI 0 "register_operand" "=r")
4233 (unspec:DI [(const_int 0)] UNSPEC_DARN))]
4234 "TARGET_P9_MISC && TARGET_64BIT"
4236 [(set_attr "type" "integer")])
4238 ;; Test byte within range.
4240 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
4241 ;; represents a byte whose value is ignored in this context and
4242 ;; vv, the least significant byte, holds the byte value that is to
4243 ;; be tested for membership within the range specified by operand 2.
4244 ;; The bytes of operand 2 are organized as xx:xx:hi:lo.
4246 ;; Return in target register operand 0 a value of 1 if lo <= vv and
4247 ;; vv <= hi. Otherwise, set register operand 0 to 0.
4249 ;; Though the instructions to which this expansion maps operate on
4250 ;; 64-bit registers, the current implementation only operates on
4251 ;; SI-mode operands as the high-order bits provide no information
4252 ;; that is not already available in the low-order bits. To avoid the
4253 ;; costs of data widening operations, future enhancements might allow
4254 ;; DI mode for operand 0 and/or might allow operand 1 to be QI mode.
4255 (define_expand "cmprb"
4257 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
4258 (match_operand:SI 2 "gpc_reg_operand" "r")]
4260 (set (match_operand:SI 0 "gpc_reg_operand" "=r")
4261 (if_then_else:SI (lt (match_dup 3)
4264 (if_then_else (gt (match_dup 3)
4270 operands[3] = gen_reg_rtx (CCmode);
4273 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
4274 ;; represents a byte whose value is ignored in this context and
4275 ;; vv, the least significant byte, holds the byte value that is to
4276 ;; be tested for membership within the range specified by operand 2.
4277 ;; The bytes of operand 2 are organized as xx:xx:hi:lo.
4279 ;; Set bit 1 (the GT bit, 0x4) of CR register operand 0 to 1 if
4280 ;; lo <= vv and vv <= hi. Otherwise, set the GT bit to 0. The other
4281 ;; 3 bits of the target CR register are all set to 0.
4282 (define_insn "*cmprb_internal"
4283 [(set (match_operand:CC 0 "cc_reg_operand" "=y")
4284 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
4285 (match_operand:SI 2 "gpc_reg_operand" "r")]
4289 [(set_attr "type" "logical")])
4291 ;; Set operand 0 register to -1 if the LT bit (0x8) of condition
4292 ;; register operand 1 is on. Otherwise, set operand 0 register to 1
4293 ;; if the GT bit (0x4) of condition register operand 1 is on.
4294 ;; Otherwise, set operand 0 to 0. Note that the result stored into
4295 ;; register operand 0 is non-zero iff either the LT or GT bits are on
4296 ;; within condition register operand 1.
4297 (define_insn "setb_signed"
4298 [(set (match_operand:SI 0 "gpc_reg_operand" "=r")
4299 (if_then_else:SI (lt (match_operand:CC 1 "cc_reg_operand" "y")
4302 (if_then_else (gt (match_dup 1)
4308 [(set_attr "type" "logical")])
4310 (define_insn "setb_unsigned"
4311 [(set (match_operand:SI 0 "gpc_reg_operand" "=r")
4312 (if_then_else:SI (ltu (match_operand:CCUNS 1 "cc_reg_operand" "y")
4315 (if_then_else (gtu (match_dup 1)
4321 [(set_attr "type" "logical")])
4323 ;; Test byte within two ranges.
4325 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
4326 ;; represents a byte whose value is ignored in this context and
4327 ;; vv, the least significant byte, holds the byte value that is to
4328 ;; be tested for membership within the range specified by operand 2.
4329 ;; The bytes of operand 2 are organized as hi_1:lo_1:hi_2:lo_2.
4331 ;; Return in target register operand 0 a value of 1 if (lo_1 <= vv and
4332 ;; vv <= hi_1) or if (lo_2 <= vv and vv <= hi_2). Otherwise, set register
4335 ;; Though the instructions to which this expansion maps operate on
4336 ;; 64-bit registers, the current implementation only operates on
4337 ;; SI-mode operands as the high-order bits provide no information
4338 ;; that is not already available in the low-order bits. To avoid the
4339 ;; costs of data widening operations, future enhancements might allow
4340 ;; DI mode for operand 0 and/or might allow operand 1 to be QI mode.
4341 (define_expand "cmprb2"
4343 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
4344 (match_operand:SI 2 "gpc_reg_operand" "r")]
4346 (set (match_operand:SI 0 "gpc_reg_operand" "=r")
4347 (if_then_else:SI (lt (match_dup 3)
4350 (if_then_else (gt (match_dup 3)
4356 operands[3] = gen_reg_rtx (CCmode);
4359 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
4360 ;; represents a byte whose value is ignored in this context and
4361 ;; vv, the least significant byte, holds the byte value that is to
4362 ;; be tested for membership within the ranges specified by operand 2.
4363 ;; The bytes of operand 2 are organized as hi_1:lo_1:hi_2:lo_2.
4365 ;; Set bit 1 (the GT bit, 0x4) of CR register operand 0 to 1 if
4366 ;; (lo_1 <= vv and vv <= hi_1) or if (lo_2 <= vv and vv <= hi_2).
4367 ;; Otherwise, set the GT bit to 0. The other 3 bits of the target
4368 ;; CR register are all set to 0.
4369 (define_insn "*cmprb2_internal"
4370 [(set (match_operand:CC 0 "cc_reg_operand" "=y")
4371 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
4372 (match_operand:SI 2 "gpc_reg_operand" "r")]
4376 [(set_attr "type" "logical")])
4378 ;; Test byte membership within set of 8 bytes.
4380 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
4381 ;; represents a byte whose value is ignored in this context and
4382 ;; vv, the least significant byte, holds the byte value that is to
4383 ;; be tested for membership within the set specified by operand 2.
4384 ;; The bytes of operand 2 are organized as e0:e1:e2:e3:e4:e5:e6:e7.
4386 ;; Return in target register operand 0 a value of 1 if vv equals one
4387 ;; of the values e0, e1, e2, e3, e4, e5, e6, or e7. Otherwise, set
4388 ;; register operand 0 to 0. Note that the 8 byte values held within
4389 ;; operand 2 need not be unique.
4391 ;; Though the instructions to which this expansion maps operate on
4392 ;; 64-bit registers, the current implementation requires that operands
4393 ;; 0 and 1 have mode SI as the high-order bits provide no information
4394 ;; that is not already available in the low-order bits. To avoid the
4395 ;; costs of data widening operations, future enhancements might allow
4396 ;; DI mode for operand 0 and/or might allow operand 1 to be QI mode.
4397 (define_expand "cmpeqb"
4399 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
4400 (match_operand:DI 2 "gpc_reg_operand" "r")]
4402 (set (match_operand:SI 0 "gpc_reg_operand" "=r")
4403 (if_then_else:SI (lt (match_dup 3)
4406 (if_then_else (gt (match_dup 3)
4410 "TARGET_P9_MISC && TARGET_64BIT"
4412 operands[3] = gen_reg_rtx (CCmode);
4415 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
4416 ;; represents a byte whose value is ignored in this context and
4417 ;; vv, the least significant byte, holds the byte value that is to
4418 ;; be tested for membership within the set specified by operand 2.
4419 ;; The bytes of operand 2 are organized as e0:e1:e2:e3:e4:e5:e6:e7.
4421 ;; Set bit 1 (the GT bit, 0x4) of CR register operand 0 to 1 if vv
4422 ;; equals one of the values e0, e1, e2, e3, e4, e5, e6, or e7. Otherwise,
4423 ;; set the GT bit to zero. The other 3 bits of the target CR register
4424 ;; are all set to 0.
4425 (define_insn "*cmpeqb_internal"
4426 [(set (match_operand:CC 0 "cc_reg_operand" "=y")
4427 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
4428 (match_operand:DI 2 "gpc_reg_operand" "r")]
4430 "TARGET_P9_MISC && TARGET_64BIT"
4432 [(set_attr "type" "logical")])
4434 (define_expand "bcd<bcd_add_sub>_<code>"
4435 [(parallel [(set (reg:CCFP CR6_REGNO)
4437 (unspec:V2DF [(match_operand:V1TI 1 "register_operand" "")
4438 (match_operand:V1TI 2 "register_operand" "")
4439 (match_operand:QI 3 "const_0_to_1_operand" "")]
4442 (clobber (match_scratch:V1TI 5 ""))])
4443 (set (match_operand:SI 0 "register_operand" "")
4444 (BCD_TEST:SI (reg:CCFP CR6_REGNO)
4448 operands[4] = CONST0_RTX (V2DFmode);
4451 ;; Peephole2 pattern to combine a bcdadd/bcdsub that calculates the value and
4452 ;; the bcdadd/bcdsub that tests the value. The combiner won't work since
4453 ;; CR6 is a hard coded register. Unfortunately, all of the Altivec predicate
4454 ;; support is hard coded to use the fixed register CR6 instead of creating
4455 ;; a register class for CR6.
4458 [(parallel [(set (match_operand:V1TI 0 "register_operand" "")
4459 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "")
4460 (match_operand:V1TI 2 "register_operand" "")
4461 (match_operand:QI 3 "const_0_to_1_operand" "")]
4462 UNSPEC_BCD_ADD_SUB))
4463 (clobber (reg:CCFP CR6_REGNO))])
4464 (parallel [(set (reg:CCFP CR6_REGNO)
4466 (unspec:V2DF [(match_dup 1)
4470 (match_operand:V2DF 4 "zero_constant" "")))
4471 (clobber (match_operand:V1TI 5 "register_operand" ""))])]
4473 [(parallel [(set (match_dup 0)
4474 (unspec:V1TI [(match_dup 1)
4477 UNSPEC_BCD_ADD_SUB))
4478 (set (reg:CCFP CR6_REGNO)
4480 (unspec:V2DF [(match_dup 1)