2 ;; Copyright (C) 2002-2015 Free Software Foundation, Inc.
3 ;; Contributed by Aldy Hernandez (aldy@quesejoda.com)
5 ;; This file is part of GCC.
7 ;; GCC is free software; you can redistribute it and/or modify it
8 ;; under the terms of the GNU General Public License as published
9 ;; by the Free Software Foundation; either version 3, or (at your
10 ;; option) any later version.
12 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
13 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 ;; License for more details.
17 ;; You should have received a copy of the GNU General Public License
18 ;; along with GCC; see the file COPYING3. If not see
19 ;; <http://www.gnu.org/licenses/>.
21 (define_c_enum "unspec"
44 UNSPEC_VPACK_SIGN_SIGN_SAT
45 UNSPEC_VPACK_SIGN_UNS_SAT
46 UNSPEC_VPACK_UNS_UNS_SAT
47 UNSPEC_VPACK_UNS_UNS_MOD
48 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT
70 UNSPEC_VUNPACK_HI_SIGN
71 UNSPEC_VUNPACK_LO_SIGN
72 UNSPEC_VUNPACK_HI_SIGN_DIRECT
73 UNSPEC_VUNPACK_LO_SIGN_DIRECT
137 UNSPEC_VSUMSWS_DIRECT
150 (define_c_enum "unspecv"
159 (define_mode_iterator VI [V4SI V8HI V16QI])
160 ;; Like VI, but add ISA 2.07 integer vector ops
161 (define_mode_iterator VI2 [V4SI V8HI V16QI V2DI])
162 ;; Short vec in modes
163 (define_mode_iterator VIshort [V8HI V16QI])
165 (define_mode_iterator VF [V4SF])
166 ;; Vec modes, pity mode iterators are not composable
167 (define_mode_iterator V [V4SI V8HI V16QI V4SF])
168 ;; Vec modes for move/logical/permute ops, include vector types for move not
169 ;; otherwise handled by altivec (v2df, v2di, ti)
170 (define_mode_iterator VM [V4SI
178 (KF "FLOAT128_VECTOR_P (KFmode)")
179 (TF "FLOAT128_VECTOR_P (TFmode)")])
181 ;; Like VM, except don't do TImode
182 (define_mode_iterator VM2 [V4SI
189 (KF "FLOAT128_VECTOR_P (KFmode)")
190 (TF "FLOAT128_VECTOR_P (TFmode)")])
192 (define_mode_attr VI_char [(V2DI "d") (V4SI "w") (V8HI "h") (V16QI "b")])
193 (define_mode_attr VI_scalar [(V2DI "DI") (V4SI "SI") (V8HI "HI") (V16QI "QI")])
194 (define_mode_attr VI_unit [(V16QI "VECTOR_UNIT_ALTIVEC_P (V16QImode)")
195 (V8HI "VECTOR_UNIT_ALTIVEC_P (V8HImode)")
196 (V4SI "VECTOR_UNIT_ALTIVEC_P (V4SImode)")
197 (V2DI "VECTOR_UNIT_P8_VECTOR_P (V2DImode)")
198 (V1TI "VECTOR_UNIT_ALTIVEC_P (V1TImode)")])
200 ;; Vector pack/unpack
201 (define_mode_iterator VP [V2DI V4SI V8HI])
202 (define_mode_attr VP_small [(V2DI "V4SI") (V4SI "V8HI") (V8HI "V16QI")])
203 (define_mode_attr VP_small_lc [(V2DI "v4si") (V4SI "v8hi") (V8HI "v16qi")])
204 (define_mode_attr VU_char [(V2DI "w") (V4SI "h") (V8HI "b")])
206 ;; Vector move instructions.
207 (define_insn "*altivec_mov<mode>"
208 [(set (match_operand:VM2 0 "nonimmediate_operand" "=Z,v,v,*Y,*r,*r,v,v,*r")
209 (match_operand:VM2 1 "input_operand" "v,Z,v,r,Y,r,j,W,W"))]
210 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)
211 && (register_operand (operands[0], <MODE>mode)
212 || register_operand (operands[1], <MODE>mode))"
214 switch (which_alternative)
216 case 0: return "stvx %1,%y0";
217 case 1: return "lvx %0,%y1";
218 case 2: return "vor %0,%1,%1";
222 case 6: return "vxor %0,%0,%0";
223 case 7: return output_vec_const_move (operands);
225 default: gcc_unreachable ();
228 [(set_attr "type" "vecstore,vecload,vecsimple,store,load,*,vecsimple,*,*")
229 (set_attr "length" "4,4,4,20,20,20,4,8,32")])
231 ;; Unlike other altivec moves, allow the GPRs, since a normal use of TImode
232 ;; is for unions. However for plain data movement, slightly favor the vector
234 (define_insn "*altivec_movti"
235 [(set (match_operand:TI 0 "nonimmediate_operand" "=Z,v,v,?Y,?r,?r,v,v")
236 (match_operand:TI 1 "input_operand" "v,Z,v,r,Y,r,j,W"))]
237 "VECTOR_MEM_ALTIVEC_P (TImode)
238 && (register_operand (operands[0], TImode)
239 || register_operand (operands[1], TImode))"
241 switch (which_alternative)
243 case 0: return "stvx %1,%y0";
244 case 1: return "lvx %0,%y1";
245 case 2: return "vor %0,%1,%1";
249 case 6: return "vxor %0,%0,%0";
250 case 7: return output_vec_const_move (operands);
251 default: gcc_unreachable ();
254 [(set_attr "type" "vecstore,vecload,vecsimple,store,load,*,vecsimple,*")])
256 ;; Load up a vector with the most significant bit set by loading up -1 and
257 ;; doing a shift left
259 [(set (match_operand:VM 0 "altivec_register_operand" "")
260 (match_operand:VM 1 "easy_vector_constant_msb" ""))]
261 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
264 rtx dest = operands[0];
265 machine_mode mode = GET_MODE (operands[0]);
269 if (mode == V4SFmode)
272 dest = gen_lowpart (V4SImode, dest);
275 num_elements = GET_MODE_NUNITS (mode);
276 v = rtvec_alloc (num_elements);
277 for (i = 0; i < num_elements; i++)
278 RTVEC_ELT (v, i) = constm1_rtx;
280 emit_insn (gen_vec_initv4si (dest, gen_rtx_PARALLEL (mode, v)));
281 emit_insn (gen_rtx_SET (dest, gen_rtx_ASHIFT (mode, dest, dest)));
286 [(set (match_operand:VM 0 "altivec_register_operand" "")
287 (match_operand:VM 1 "easy_vector_constant_add_self" ""))]
288 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
289 [(set (match_dup 0) (match_dup 3))
290 (set (match_dup 0) (match_dup 4))]
292 rtx dup = gen_easy_altivec_constant (operands[1]);
294 machine_mode op_mode = <MODE>mode;
296 /* Divide the operand of the resulting VEC_DUPLICATE, and use
297 simplify_rtx to make a CONST_VECTOR. */
298 XEXP (dup, 0) = simplify_const_binary_operation (ASHIFTRT, QImode,
299 XEXP (dup, 0), const1_rtx);
300 const_vec = simplify_rtx (dup);
302 if (op_mode == V4SFmode)
305 operands[0] = gen_lowpart (op_mode, operands[0]);
307 if (GET_MODE (const_vec) == op_mode)
308 operands[3] = const_vec;
310 operands[3] = gen_lowpart (op_mode, const_vec);
311 operands[4] = gen_rtx_PLUS (op_mode, operands[0], operands[0]);
315 [(set (match_operand:VM 0 "altivec_register_operand" "")
316 (match_operand:VM 1 "easy_vector_constant_vsldoi" ""))]
317 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && can_create_pseudo_p ()"
318 [(set (match_dup 2) (match_dup 3))
319 (set (match_dup 4) (match_dup 5))
321 (unspec:VM [(match_dup 2)
326 rtx op1 = operands[1];
327 int elt = (BYTES_BIG_ENDIAN) ? 0 : GET_MODE_NUNITS (<MODE>mode) - 1;
328 HOST_WIDE_INT val = const_vector_elt_as_int (op1, elt);
329 rtx rtx_val = GEN_INT (val);
330 int shift = vspltis_shifted (op1);
331 int nunits = GET_MODE_NUNITS (<MODE>mode);
334 gcc_assert (shift != 0);
335 operands[2] = gen_reg_rtx (<MODE>mode);
336 operands[3] = gen_rtx_CONST_VECTOR (<MODE>mode, rtvec_alloc (nunits));
337 operands[4] = gen_reg_rtx (<MODE>mode);
341 operands[5] = CONSTM1_RTX (<MODE>mode);
342 operands[6] = GEN_INT (-shift);
346 operands[5] = CONST0_RTX (<MODE>mode);
347 operands[6] = GEN_INT (shift);
350 /* Populate the constant vectors. */
351 for (i = 0; i < nunits; i++)
352 XVECEXP (operands[3], 0, i) = rtx_val;
355 (define_insn "get_vrsave_internal"
356 [(set (match_operand:SI 0 "register_operand" "=r")
357 (unspec:SI [(reg:SI 109)] UNSPEC_GET_VRSAVE))]
361 return "mfspr %0,256";
363 return "mfvrsave %0";
365 [(set_attr "type" "*")])
367 (define_insn "*set_vrsave_internal"
368 [(match_parallel 0 "vrsave_operation"
370 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "r")
371 (reg:SI 109)] UNSPECV_SET_VRSAVE))])]
375 return "mtspr 256,%1";
377 return "mtvrsave %1";
379 [(set_attr "type" "*")])
381 (define_insn "*save_world"
382 [(match_parallel 0 "save_world_operation"
383 [(clobber (reg:SI 65))
384 (use (match_operand:SI 1 "call_operand" "s"))])]
385 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
387 [(set_attr "type" "branch")
388 (set_attr "length" "4")])
390 (define_insn "*restore_world"
391 [(match_parallel 0 "restore_world_operation"
394 (use (match_operand:SI 1 "call_operand" "s"))
395 (clobber (match_operand:SI 2 "gpc_reg_operand" "=r"))])]
396 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
399 ;; The save_vregs and restore_vregs patterns don't use memory_operand
400 ;; because (plus (reg) (const_int)) is not a valid vector address.
401 ;; This way is more compact than describing exactly what happens in
402 ;; the out-of-line functions, ie. loading the constant into r11/r12
403 ;; then using indexed addressing, and requires less editing of rtl
404 ;; to describe the operation to dwarf2out_frame_debug_expr.
405 (define_insn "*save_vregs_<mode>_r11"
406 [(match_parallel 0 "any_parallel_operand"
407 [(clobber (reg:P 65))
408 (use (match_operand:P 1 "symbol_ref_operand" "s"))
411 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
412 (match_operand:P 3 "short_cint_operand" "I")))
413 (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
416 [(set_attr "type" "branch")
417 (set_attr "length" "4")])
419 (define_insn "*save_vregs_<mode>_r12"
420 [(match_parallel 0 "any_parallel_operand"
421 [(clobber (reg:P 65))
422 (use (match_operand:P 1 "symbol_ref_operand" "s"))
425 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
426 (match_operand:P 3 "short_cint_operand" "I")))
427 (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
430 [(set_attr "type" "branch")
431 (set_attr "length" "4")])
433 (define_insn "*restore_vregs_<mode>_r11"
434 [(match_parallel 0 "any_parallel_operand"
435 [(clobber (reg:P 65))
436 (use (match_operand:P 1 "symbol_ref_operand" "s"))
439 (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
440 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
441 (match_operand:P 4 "short_cint_operand" "I"))))])]
444 [(set_attr "type" "branch")
445 (set_attr "length" "4")])
447 (define_insn "*restore_vregs_<mode>_r12"
448 [(match_parallel 0 "any_parallel_operand"
449 [(clobber (reg:P 65))
450 (use (match_operand:P 1 "symbol_ref_operand" "s"))
453 (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
454 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
455 (match_operand:P 4 "short_cint_operand" "I"))))])]
458 [(set_attr "type" "branch")
459 (set_attr "length" "4")])
461 ;; Simple binary operations.
464 (define_insn "add<mode>3"
465 [(set (match_operand:VI2 0 "register_operand" "=v")
466 (plus:VI2 (match_operand:VI2 1 "register_operand" "v")
467 (match_operand:VI2 2 "register_operand" "v")))]
469 "vaddu<VI_char>m %0,%1,%2"
470 [(set_attr "type" "vecsimple")])
472 (define_insn "*altivec_addv4sf3"
473 [(set (match_operand:V4SF 0 "register_operand" "=v")
474 (plus:V4SF (match_operand:V4SF 1 "register_operand" "v")
475 (match_operand:V4SF 2 "register_operand" "v")))]
476 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
478 [(set_attr "type" "vecfloat")])
480 (define_insn "altivec_vaddcuw"
481 [(set (match_operand:V4SI 0 "register_operand" "=v")
482 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
483 (match_operand:V4SI 2 "register_operand" "v")]
485 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
487 [(set_attr "type" "vecsimple")])
489 (define_insn "altivec_vaddu<VI_char>s"
490 [(set (match_operand:VI 0 "register_operand" "=v")
491 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
492 (match_operand:VI 2 "register_operand" "v")]
494 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
496 "vaddu<VI_char>s %0,%1,%2"
497 [(set_attr "type" "vecsimple")])
499 (define_insn "altivec_vadds<VI_char>s"
500 [(set (match_operand:VI 0 "register_operand" "=v")
501 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
502 (match_operand:VI 2 "register_operand" "v")]
504 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
505 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
506 "vadds<VI_char>s %0,%1,%2"
507 [(set_attr "type" "vecsimple")])
510 (define_insn "sub<mode>3"
511 [(set (match_operand:VI2 0 "register_operand" "=v")
512 (minus:VI2 (match_operand:VI2 1 "register_operand" "v")
513 (match_operand:VI2 2 "register_operand" "v")))]
515 "vsubu<VI_char>m %0,%1,%2"
516 [(set_attr "type" "vecsimple")])
518 (define_insn "*altivec_subv4sf3"
519 [(set (match_operand:V4SF 0 "register_operand" "=v")
520 (minus:V4SF (match_operand:V4SF 1 "register_operand" "v")
521 (match_operand:V4SF 2 "register_operand" "v")))]
522 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
524 [(set_attr "type" "vecfloat")])
526 (define_insn "altivec_vsubcuw"
527 [(set (match_operand:V4SI 0 "register_operand" "=v")
528 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
529 (match_operand:V4SI 2 "register_operand" "v")]
531 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
533 [(set_attr "type" "vecsimple")])
535 (define_insn "altivec_vsubu<VI_char>s"
536 [(set (match_operand:VI 0 "register_operand" "=v")
537 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
538 (match_operand:VI 2 "register_operand" "v")]
540 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
541 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
542 "vsubu<VI_char>s %0,%1,%2"
543 [(set_attr "type" "vecsimple")])
545 (define_insn "altivec_vsubs<VI_char>s"
546 [(set (match_operand:VI 0 "register_operand" "=v")
547 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
548 (match_operand:VI 2 "register_operand" "v")]
550 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
551 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
552 "vsubs<VI_char>s %0,%1,%2"
553 [(set_attr "type" "vecsimple")])
556 (define_insn "altivec_vavgu<VI_char>"
557 [(set (match_operand:VI 0 "register_operand" "=v")
558 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
559 (match_operand:VI 2 "register_operand" "v")]
562 "vavgu<VI_char> %0,%1,%2"
563 [(set_attr "type" "vecsimple")])
565 (define_insn "altivec_vavgs<VI_char>"
566 [(set (match_operand:VI 0 "register_operand" "=v")
567 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
568 (match_operand:VI 2 "register_operand" "v")]
570 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
571 "vavgs<VI_char> %0,%1,%2"
572 [(set_attr "type" "vecsimple")])
574 (define_insn "altivec_vcmpbfp"
575 [(set (match_operand:V4SI 0 "register_operand" "=v")
576 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
577 (match_operand:V4SF 2 "register_operand" "v")]
579 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
581 [(set_attr "type" "veccmp")])
583 (define_insn "*altivec_eq<mode>"
584 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
585 (eq:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
586 (match_operand:VI2 2 "altivec_register_operand" "v")))]
588 "vcmpequ<VI_char> %0,%1,%2"
589 [(set_attr "type" "veccmp")])
591 (define_insn "*altivec_gt<mode>"
592 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
593 (gt:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
594 (match_operand:VI2 2 "altivec_register_operand" "v")))]
596 "vcmpgts<VI_char> %0,%1,%2"
597 [(set_attr "type" "veccmp")])
599 (define_insn "*altivec_gtu<mode>"
600 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
601 (gtu:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
602 (match_operand:VI2 2 "altivec_register_operand" "v")))]
604 "vcmpgtu<VI_char> %0,%1,%2"
605 [(set_attr "type" "veccmp")])
607 (define_insn "*altivec_eqv4sf"
608 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
609 (eq:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
610 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
611 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
613 [(set_attr "type" "veccmp")])
615 (define_insn "*altivec_gtv4sf"
616 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
617 (gt:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
618 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
619 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
621 [(set_attr "type" "veccmp")])
623 (define_insn "*altivec_gev4sf"
624 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
625 (ge:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
626 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
627 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
629 [(set_attr "type" "veccmp")])
631 (define_insn "*altivec_vsel<mode>"
632 [(set (match_operand:VM 0 "altivec_register_operand" "=v")
634 (ne:CC (match_operand:VM 1 "altivec_register_operand" "v")
635 (match_operand:VM 4 "zero_constant" ""))
636 (match_operand:VM 2 "altivec_register_operand" "v")
637 (match_operand:VM 3 "altivec_register_operand" "v")))]
638 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
640 [(set_attr "type" "vecperm")])
642 (define_insn "*altivec_vsel<mode>_uns"
643 [(set (match_operand:VM 0 "altivec_register_operand" "=v")
645 (ne:CCUNS (match_operand:VM 1 "altivec_register_operand" "v")
646 (match_operand:VM 4 "zero_constant" ""))
647 (match_operand:VM 2 "altivec_register_operand" "v")
648 (match_operand:VM 3 "altivec_register_operand" "v")))]
649 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
651 [(set_attr "type" "vecperm")])
653 ;; Fused multiply add.
655 (define_insn "*altivec_fmav4sf4"
656 [(set (match_operand:V4SF 0 "register_operand" "=v")
657 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
658 (match_operand:V4SF 2 "register_operand" "v")
659 (match_operand:V4SF 3 "register_operand" "v")))]
660 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
661 "vmaddfp %0,%1,%2,%3"
662 [(set_attr "type" "vecfloat")])
664 ;; We do multiply as a fused multiply-add with an add of a -0.0 vector.
666 (define_expand "altivec_mulv4sf3"
667 [(set (match_operand:V4SF 0 "register_operand" "")
668 (fma:V4SF (match_operand:V4SF 1 "register_operand" "")
669 (match_operand:V4SF 2 "register_operand" "")
671 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
675 /* Generate [-0.0, -0.0, -0.0, -0.0]. */
676 neg0 = gen_reg_rtx (V4SImode);
677 emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
678 emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
680 operands[3] = gen_lowpart (V4SFmode, neg0);
683 ;; 32-bit integer multiplication
684 ;; A_high = Operand_0 & 0xFFFF0000 >> 16
685 ;; A_low = Operand_0 & 0xFFFF
686 ;; B_high = Operand_1 & 0xFFFF0000 >> 16
687 ;; B_low = Operand_1 & 0xFFFF
688 ;; result = A_low * B_low + (A_high * B_low + B_high * A_low) << 16
690 ;; (define_insn "mulv4si3"
691 ;; [(set (match_operand:V4SI 0 "register_operand" "=v")
692 ;; (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
693 ;; (match_operand:V4SI 2 "register_operand" "v")))]
694 (define_insn "mulv4si3_p8"
695 [(set (match_operand:V4SI 0 "register_operand" "=v")
696 (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
697 (match_operand:V4SI 2 "register_operand" "v")))]
700 [(set_attr "type" "veccomplex")])
702 (define_expand "mulv4si3"
703 [(use (match_operand:V4SI 0 "register_operand" ""))
704 (use (match_operand:V4SI 1 "register_operand" ""))
705 (use (match_operand:V4SI 2 "register_operand" ""))]
717 if (TARGET_P8_VECTOR)
719 emit_insn (gen_mulv4si3_p8 (operands[0], operands[1], operands[2]));
723 zero = gen_reg_rtx (V4SImode);
724 emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
726 sixteen = gen_reg_rtx (V4SImode);
727 emit_insn (gen_altivec_vspltisw (sixteen, gen_rtx_CONST_INT (V4SImode, -16)));
729 swap = gen_reg_rtx (V4SImode);
730 emit_insn (gen_vrotlv4si3 (swap, operands[2], sixteen));
732 one = gen_reg_rtx (V8HImode);
733 convert_move (one, operands[1], 0);
735 two = gen_reg_rtx (V8HImode);
736 convert_move (two, operands[2], 0);
738 small_swap = gen_reg_rtx (V8HImode);
739 convert_move (small_swap, swap, 0);
741 low_product = gen_reg_rtx (V4SImode);
742 emit_insn (gen_altivec_vmulouh (low_product, one, two));
744 high_product = gen_reg_rtx (V4SImode);
745 emit_insn (gen_altivec_vmsumuhm (high_product, one, small_swap, zero));
747 emit_insn (gen_vashlv4si3 (high_product, high_product, sixteen));
749 emit_insn (gen_addv4si3 (operands[0], high_product, low_product));
754 (define_expand "mulv8hi3"
755 [(use (match_operand:V8HI 0 "register_operand" ""))
756 (use (match_operand:V8HI 1 "register_operand" ""))
757 (use (match_operand:V8HI 2 "register_operand" ""))]
760 rtx zero = gen_reg_rtx (V8HImode);
762 emit_insn (gen_altivec_vspltish (zero, const0_rtx));
763 emit_insn (gen_altivec_vmladduhm(operands[0], operands[1], operands[2], zero));
768 ;; Fused multiply subtract
769 (define_insn "*altivec_vnmsubfp"
770 [(set (match_operand:V4SF 0 "register_operand" "=v")
772 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
773 (match_operand:V4SF 2 "register_operand" "v")
775 (match_operand:V4SF 3 "register_operand" "v")))))]
776 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
777 "vnmsubfp %0,%1,%2,%3"
778 [(set_attr "type" "vecfloat")])
780 (define_insn "altivec_vmsumu<VI_char>m"
781 [(set (match_operand:V4SI 0 "register_operand" "=v")
782 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
783 (match_operand:VIshort 2 "register_operand" "v")
784 (match_operand:V4SI 3 "register_operand" "v")]
787 "vmsumu<VI_char>m %0,%1,%2,%3"
788 [(set_attr "type" "veccomplex")])
790 (define_insn "altivec_vmsumm<VI_char>m"
791 [(set (match_operand:V4SI 0 "register_operand" "=v")
792 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
793 (match_operand:VIshort 2 "register_operand" "v")
794 (match_operand:V4SI 3 "register_operand" "v")]
797 "vmsumm<VI_char>m %0,%1,%2,%3"
798 [(set_attr "type" "veccomplex")])
800 (define_insn "altivec_vmsumshm"
801 [(set (match_operand:V4SI 0 "register_operand" "=v")
802 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
803 (match_operand:V8HI 2 "register_operand" "v")
804 (match_operand:V4SI 3 "register_operand" "v")]
807 "vmsumshm %0,%1,%2,%3"
808 [(set_attr "type" "veccomplex")])
810 (define_insn "altivec_vmsumuhs"
811 [(set (match_operand:V4SI 0 "register_operand" "=v")
812 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
813 (match_operand:V8HI 2 "register_operand" "v")
814 (match_operand:V4SI 3 "register_operand" "v")]
816 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
818 "vmsumuhs %0,%1,%2,%3"
819 [(set_attr "type" "veccomplex")])
821 (define_insn "altivec_vmsumshs"
822 [(set (match_operand:V4SI 0 "register_operand" "=v")
823 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
824 (match_operand:V8HI 2 "register_operand" "v")
825 (match_operand:V4SI 3 "register_operand" "v")]
827 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
829 "vmsumshs %0,%1,%2,%3"
830 [(set_attr "type" "veccomplex")])
834 (define_insn "umax<mode>3"
835 [(set (match_operand:VI2 0 "register_operand" "=v")
836 (umax:VI2 (match_operand:VI2 1 "register_operand" "v")
837 (match_operand:VI2 2 "register_operand" "v")))]
839 "vmaxu<VI_char> %0,%1,%2"
840 [(set_attr "type" "vecsimple")])
842 (define_insn "smax<mode>3"
843 [(set (match_operand:VI2 0 "register_operand" "=v")
844 (smax:VI2 (match_operand:VI2 1 "register_operand" "v")
845 (match_operand:VI2 2 "register_operand" "v")))]
847 "vmaxs<VI_char> %0,%1,%2"
848 [(set_attr "type" "vecsimple")])
850 (define_insn "*altivec_smaxv4sf3"
851 [(set (match_operand:V4SF 0 "register_operand" "=v")
852 (smax:V4SF (match_operand:V4SF 1 "register_operand" "v")
853 (match_operand:V4SF 2 "register_operand" "v")))]
854 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
856 [(set_attr "type" "veccmp")])
858 (define_insn "umin<mode>3"
859 [(set (match_operand:VI2 0 "register_operand" "=v")
860 (umin:VI2 (match_operand:VI2 1 "register_operand" "v")
861 (match_operand:VI2 2 "register_operand" "v")))]
863 "vminu<VI_char> %0,%1,%2"
864 [(set_attr "type" "vecsimple")])
866 (define_insn "smin<mode>3"
867 [(set (match_operand:VI2 0 "register_operand" "=v")
868 (smin:VI2 (match_operand:VI2 1 "register_operand" "v")
869 (match_operand:VI2 2 "register_operand" "v")))]
871 "vmins<VI_char> %0,%1,%2"
872 [(set_attr "type" "vecsimple")])
874 (define_insn "*altivec_sminv4sf3"
875 [(set (match_operand:V4SF 0 "register_operand" "=v")
876 (smin:V4SF (match_operand:V4SF 1 "register_operand" "v")
877 (match_operand:V4SF 2 "register_operand" "v")))]
878 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
880 [(set_attr "type" "veccmp")])
882 (define_insn "altivec_vmhaddshs"
883 [(set (match_operand:V8HI 0 "register_operand" "=v")
884 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
885 (match_operand:V8HI 2 "register_operand" "v")
886 (match_operand:V8HI 3 "register_operand" "v")]
888 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
890 "vmhaddshs %0,%1,%2,%3"
891 [(set_attr "type" "veccomplex")])
893 (define_insn "altivec_vmhraddshs"
894 [(set (match_operand:V8HI 0 "register_operand" "=v")
895 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
896 (match_operand:V8HI 2 "register_operand" "v")
897 (match_operand:V8HI 3 "register_operand" "v")]
899 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
901 "vmhraddshs %0,%1,%2,%3"
902 [(set_attr "type" "veccomplex")])
904 (define_insn "altivec_vmladduhm"
905 [(set (match_operand:V8HI 0 "register_operand" "=v")
906 (plus:V8HI (mult:V8HI (match_operand:V8HI 1 "register_operand" "v")
907 (match_operand:V8HI 2 "register_operand" "v"))
908 (match_operand:V8HI 3 "register_operand" "v")))]
910 "vmladduhm %0,%1,%2,%3"
911 [(set_attr "type" "veccomplex")])
913 (define_expand "altivec_vmrghb"
914 [(use (match_operand:V16QI 0 "register_operand" ""))
915 (use (match_operand:V16QI 1 "register_operand" ""))
916 (use (match_operand:V16QI 2 "register_operand" ""))]
922 /* Special handling for LE with -maltivec=be. */
923 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
925 v = gen_rtvec (16, GEN_INT (8), GEN_INT (24), GEN_INT (9), GEN_INT (25),
926 GEN_INT (10), GEN_INT (26), GEN_INT (11), GEN_INT (27),
927 GEN_INT (12), GEN_INT (28), GEN_INT (13), GEN_INT (29),
928 GEN_INT (14), GEN_INT (30), GEN_INT (15), GEN_INT (31));
929 x = gen_rtx_VEC_CONCAT (V32QImode, operands[2], operands[1]);
933 v = gen_rtvec (16, GEN_INT (0), GEN_INT (16), GEN_INT (1), GEN_INT (17),
934 GEN_INT (2), GEN_INT (18), GEN_INT (3), GEN_INT (19),
935 GEN_INT (4), GEN_INT (20), GEN_INT (5), GEN_INT (21),
936 GEN_INT (6), GEN_INT (22), GEN_INT (7), GEN_INT (23));
937 x = gen_rtx_VEC_CONCAT (V32QImode, operands[1], operands[2]);
940 x = gen_rtx_VEC_SELECT (V16QImode, x, gen_rtx_PARALLEL (VOIDmode, v));
941 emit_insn (gen_rtx_SET (operands[0], x));
945 (define_insn "*altivec_vmrghb_internal"
946 [(set (match_operand:V16QI 0 "register_operand" "=v")
949 (match_operand:V16QI 1 "register_operand" "v")
950 (match_operand:V16QI 2 "register_operand" "v"))
951 (parallel [(const_int 0) (const_int 16)
952 (const_int 1) (const_int 17)
953 (const_int 2) (const_int 18)
954 (const_int 3) (const_int 19)
955 (const_int 4) (const_int 20)
956 (const_int 5) (const_int 21)
957 (const_int 6) (const_int 22)
958 (const_int 7) (const_int 23)])))]
961 if (BYTES_BIG_ENDIAN)
962 return "vmrghb %0,%1,%2";
964 return "vmrglb %0,%2,%1";
966 [(set_attr "type" "vecperm")])
968 (define_insn "altivec_vmrghb_direct"
969 [(set (match_operand:V16QI 0 "register_operand" "=v")
970 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
971 (match_operand:V16QI 2 "register_operand" "v")]
972 UNSPEC_VMRGH_DIRECT))]
975 [(set_attr "type" "vecperm")])
977 (define_expand "altivec_vmrghh"
978 [(use (match_operand:V8HI 0 "register_operand" ""))
979 (use (match_operand:V8HI 1 "register_operand" ""))
980 (use (match_operand:V8HI 2 "register_operand" ""))]
986 /* Special handling for LE with -maltivec=be. */
987 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
989 v = gen_rtvec (8, GEN_INT (4), GEN_INT (12), GEN_INT (5), GEN_INT (13),
990 GEN_INT (6), GEN_INT (14), GEN_INT (7), GEN_INT (15));
991 x = gen_rtx_VEC_CONCAT (V16HImode, operands[2], operands[1]);
995 v = gen_rtvec (8, GEN_INT (0), GEN_INT (8), GEN_INT (1), GEN_INT (9),
996 GEN_INT (2), GEN_INT (10), GEN_INT (3), GEN_INT (11));
997 x = gen_rtx_VEC_CONCAT (V16HImode, operands[1], operands[2]);
1000 x = gen_rtx_VEC_SELECT (V8HImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1001 emit_insn (gen_rtx_SET (operands[0], x));
1005 (define_insn "*altivec_vmrghh_internal"
1006 [(set (match_operand:V8HI 0 "register_operand" "=v")
1009 (match_operand:V8HI 1 "register_operand" "v")
1010 (match_operand:V8HI 2 "register_operand" "v"))
1011 (parallel [(const_int 0) (const_int 8)
1012 (const_int 1) (const_int 9)
1013 (const_int 2) (const_int 10)
1014 (const_int 3) (const_int 11)])))]
1017 if (BYTES_BIG_ENDIAN)
1018 return "vmrghh %0,%1,%2";
1020 return "vmrglh %0,%2,%1";
1022 [(set_attr "type" "vecperm")])
1024 (define_insn "altivec_vmrghh_direct"
1025 [(set (match_operand:V8HI 0 "register_operand" "=v")
1026 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1027 (match_operand:V8HI 2 "register_operand" "v")]
1028 UNSPEC_VMRGH_DIRECT))]
1031 [(set_attr "type" "vecperm")])
1033 (define_expand "altivec_vmrghw"
1034 [(use (match_operand:V4SI 0 "register_operand" ""))
1035 (use (match_operand:V4SI 1 "register_operand" ""))
1036 (use (match_operand:V4SI 2 "register_operand" ""))]
1037 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1042 /* Special handling for LE with -maltivec=be. */
1043 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1045 v = gen_rtvec (4, GEN_INT (2), GEN_INT (6), GEN_INT (3), GEN_INT (7));
1046 x = gen_rtx_VEC_CONCAT (V8SImode, operands[2], operands[1]);
1050 v = gen_rtvec (4, GEN_INT (0), GEN_INT (4), GEN_INT (1), GEN_INT (5));
1051 x = gen_rtx_VEC_CONCAT (V8SImode, operands[1], operands[2]);
1054 x = gen_rtx_VEC_SELECT (V4SImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1055 emit_insn (gen_rtx_SET (operands[0], x));
1059 (define_insn "*altivec_vmrghw_internal"
1060 [(set (match_operand:V4SI 0 "register_operand" "=v")
1063 (match_operand:V4SI 1 "register_operand" "v")
1064 (match_operand:V4SI 2 "register_operand" "v"))
1065 (parallel [(const_int 0) (const_int 4)
1066 (const_int 1) (const_int 5)])))]
1067 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1069 if (BYTES_BIG_ENDIAN)
1070 return "vmrghw %0,%1,%2";
1072 return "vmrglw %0,%2,%1";
1074 [(set_attr "type" "vecperm")])
1076 (define_insn "altivec_vmrghw_direct"
1077 [(set (match_operand:V4SI 0 "register_operand" "=v")
1078 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1079 (match_operand:V4SI 2 "register_operand" "v")]
1080 UNSPEC_VMRGH_DIRECT))]
1083 [(set_attr "type" "vecperm")])
1085 (define_insn "*altivec_vmrghsf"
1086 [(set (match_operand:V4SF 0 "register_operand" "=v")
1089 (match_operand:V4SF 1 "register_operand" "v")
1090 (match_operand:V4SF 2 "register_operand" "v"))
1091 (parallel [(const_int 0) (const_int 4)
1092 (const_int 1) (const_int 5)])))]
1093 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1095 if (BYTES_BIG_ENDIAN)
1096 return "vmrghw %0,%1,%2";
1098 return "vmrglw %0,%2,%1";
1100 [(set_attr "type" "vecperm")])
1102 (define_expand "altivec_vmrglb"
1103 [(use (match_operand:V16QI 0 "register_operand" ""))
1104 (use (match_operand:V16QI 1 "register_operand" ""))
1105 (use (match_operand:V16QI 2 "register_operand" ""))]
1111 /* Special handling for LE with -maltivec=be. */
1112 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1114 v = gen_rtvec (16, GEN_INT (0), GEN_INT (16), GEN_INT (1), GEN_INT (17),
1115 GEN_INT (2), GEN_INT (18), GEN_INT (3), GEN_INT (19),
1116 GEN_INT (4), GEN_INT (20), GEN_INT (5), GEN_INT (21),
1117 GEN_INT (6), GEN_INT (22), GEN_INT (7), GEN_INT (23));
1118 x = gen_rtx_VEC_CONCAT (V32QImode, operands[2], operands[1]);
1122 v = gen_rtvec (16, GEN_INT (8), GEN_INT (24), GEN_INT (9), GEN_INT (25),
1123 GEN_INT (10), GEN_INT (26), GEN_INT (11), GEN_INT (27),
1124 GEN_INT (12), GEN_INT (28), GEN_INT (13), GEN_INT (29),
1125 GEN_INT (14), GEN_INT (30), GEN_INT (15), GEN_INT (31));
1126 x = gen_rtx_VEC_CONCAT (V32QImode, operands[1], operands[2]);
1129 x = gen_rtx_VEC_SELECT (V16QImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1130 emit_insn (gen_rtx_SET (operands[0], x));
1134 (define_insn "*altivec_vmrglb_internal"
1135 [(set (match_operand:V16QI 0 "register_operand" "=v")
1138 (match_operand:V16QI 1 "register_operand" "v")
1139 (match_operand:V16QI 2 "register_operand" "v"))
1140 (parallel [(const_int 8) (const_int 24)
1141 (const_int 9) (const_int 25)
1142 (const_int 10) (const_int 26)
1143 (const_int 11) (const_int 27)
1144 (const_int 12) (const_int 28)
1145 (const_int 13) (const_int 29)
1146 (const_int 14) (const_int 30)
1147 (const_int 15) (const_int 31)])))]
1150 if (BYTES_BIG_ENDIAN)
1151 return "vmrglb %0,%1,%2";
1153 return "vmrghb %0,%2,%1";
1155 [(set_attr "type" "vecperm")])
1157 (define_insn "altivec_vmrglb_direct"
1158 [(set (match_operand:V16QI 0 "register_operand" "=v")
1159 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1160 (match_operand:V16QI 2 "register_operand" "v")]
1161 UNSPEC_VMRGL_DIRECT))]
1164 [(set_attr "type" "vecperm")])
1166 (define_expand "altivec_vmrglh"
1167 [(use (match_operand:V8HI 0 "register_operand" ""))
1168 (use (match_operand:V8HI 1 "register_operand" ""))
1169 (use (match_operand:V8HI 2 "register_operand" ""))]
1175 /* Special handling for LE with -maltivec=be. */
1176 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1178 v = gen_rtvec (8, GEN_INT (0), GEN_INT (8), GEN_INT (1), GEN_INT (9),
1179 GEN_INT (2), GEN_INT (10), GEN_INT (3), GEN_INT (11));
1180 x = gen_rtx_VEC_CONCAT (V16HImode, operands[2], operands[1]);
1184 v = gen_rtvec (8, GEN_INT (4), GEN_INT (12), GEN_INT (5), GEN_INT (13),
1185 GEN_INT (6), GEN_INT (14), GEN_INT (7), GEN_INT (15));
1186 x = gen_rtx_VEC_CONCAT (V16HImode, operands[1], operands[2]);
1189 x = gen_rtx_VEC_SELECT (V8HImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1190 emit_insn (gen_rtx_SET (operands[0], x));
1194 (define_insn "*altivec_vmrglh_internal"
1195 [(set (match_operand:V8HI 0 "register_operand" "=v")
1198 (match_operand:V8HI 1 "register_operand" "v")
1199 (match_operand:V8HI 2 "register_operand" "v"))
1200 (parallel [(const_int 4) (const_int 12)
1201 (const_int 5) (const_int 13)
1202 (const_int 6) (const_int 14)
1203 (const_int 7) (const_int 15)])))]
1206 if (BYTES_BIG_ENDIAN)
1207 return "vmrglh %0,%1,%2";
1209 return "vmrghh %0,%2,%1";
1211 [(set_attr "type" "vecperm")])
1213 (define_insn "altivec_vmrglh_direct"
1214 [(set (match_operand:V8HI 0 "register_operand" "=v")
1215 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1216 (match_operand:V8HI 2 "register_operand" "v")]
1217 UNSPEC_VMRGL_DIRECT))]
1220 [(set_attr "type" "vecperm")])
1222 (define_expand "altivec_vmrglw"
1223 [(use (match_operand:V4SI 0 "register_operand" ""))
1224 (use (match_operand:V4SI 1 "register_operand" ""))
1225 (use (match_operand:V4SI 2 "register_operand" ""))]
1226 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1231 /* Special handling for LE with -maltivec=be. */
1232 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1234 v = gen_rtvec (4, GEN_INT (0), GEN_INT (4), GEN_INT (1), GEN_INT (5));
1235 x = gen_rtx_VEC_CONCAT (V8SImode, operands[2], operands[1]);
1239 v = gen_rtvec (4, GEN_INT (2), GEN_INT (6), GEN_INT (3), GEN_INT (7));
1240 x = gen_rtx_VEC_CONCAT (V8SImode, operands[1], operands[2]);
1243 x = gen_rtx_VEC_SELECT (V4SImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1244 emit_insn (gen_rtx_SET (operands[0], x));
1248 (define_insn "*altivec_vmrglw_internal"
1249 [(set (match_operand:V4SI 0 "register_operand" "=v")
1252 (match_operand:V4SI 1 "register_operand" "v")
1253 (match_operand:V4SI 2 "register_operand" "v"))
1254 (parallel [(const_int 2) (const_int 6)
1255 (const_int 3) (const_int 7)])))]
1256 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1258 if (BYTES_BIG_ENDIAN)
1259 return "vmrglw %0,%1,%2";
1261 return "vmrghw %0,%2,%1";
1263 [(set_attr "type" "vecperm")])
1265 (define_insn "altivec_vmrglw_direct"
1266 [(set (match_operand:V4SI 0 "register_operand" "=v")
1267 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1268 (match_operand:V4SI 2 "register_operand" "v")]
1269 UNSPEC_VMRGL_DIRECT))]
1272 [(set_attr "type" "vecperm")])
1274 (define_insn "*altivec_vmrglsf"
1275 [(set (match_operand:V4SF 0 "register_operand" "=v")
1278 (match_operand:V4SF 1 "register_operand" "v")
1279 (match_operand:V4SF 2 "register_operand" "v"))
1280 (parallel [(const_int 2) (const_int 6)
1281 (const_int 3) (const_int 7)])))]
1282 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1284 if (BYTES_BIG_ENDIAN)
1285 return "vmrglw %0,%1,%2";
1287 return "vmrghw %0,%2,%1";
1289 [(set_attr "type" "vecperm")])
1291 ;; Power8 vector merge even/odd
1292 (define_insn "p8_vmrgew"
1293 [(set (match_operand:V4SI 0 "register_operand" "=v")
1296 (match_operand:V4SI 1 "register_operand" "v")
1297 (match_operand:V4SI 2 "register_operand" "v"))
1298 (parallel [(const_int 0) (const_int 4)
1299 (const_int 2) (const_int 6)])))]
1302 if (BYTES_BIG_ENDIAN)
1303 return "vmrgew %0,%1,%2";
1305 return "vmrgow %0,%2,%1";
1307 [(set_attr "type" "vecperm")])
1309 (define_insn "p8_vmrgow"
1310 [(set (match_operand:V4SI 0 "register_operand" "=v")
1313 (match_operand:V4SI 1 "register_operand" "v")
1314 (match_operand:V4SI 2 "register_operand" "v"))
1315 (parallel [(const_int 1) (const_int 5)
1316 (const_int 3) (const_int 7)])))]
1319 if (BYTES_BIG_ENDIAN)
1320 return "vmrgow %0,%1,%2";
1322 return "vmrgew %0,%2,%1";
1324 [(set_attr "type" "vecperm")])
1326 (define_expand "vec_widen_umult_even_v16qi"
1327 [(use (match_operand:V8HI 0 "register_operand" ""))
1328 (use (match_operand:V16QI 1 "register_operand" ""))
1329 (use (match_operand:V16QI 2 "register_operand" ""))]
1332 if (VECTOR_ELT_ORDER_BIG)
1333 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1335 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1339 (define_expand "vec_widen_smult_even_v16qi"
1340 [(use (match_operand:V8HI 0 "register_operand" ""))
1341 (use (match_operand:V16QI 1 "register_operand" ""))
1342 (use (match_operand:V16QI 2 "register_operand" ""))]
1345 if (VECTOR_ELT_ORDER_BIG)
1346 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1348 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1352 (define_expand "vec_widen_umult_even_v8hi"
1353 [(use (match_operand:V4SI 0 "register_operand" ""))
1354 (use (match_operand:V8HI 1 "register_operand" ""))
1355 (use (match_operand:V8HI 2 "register_operand" ""))]
1358 if (VECTOR_ELT_ORDER_BIG)
1359 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1361 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1365 (define_expand "vec_widen_smult_even_v8hi"
1366 [(use (match_operand:V4SI 0 "register_operand" ""))
1367 (use (match_operand:V8HI 1 "register_operand" ""))
1368 (use (match_operand:V8HI 2 "register_operand" ""))]
1371 if (VECTOR_ELT_ORDER_BIG)
1372 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1374 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1378 (define_expand "vec_widen_umult_odd_v16qi"
1379 [(use (match_operand:V8HI 0 "register_operand" ""))
1380 (use (match_operand:V16QI 1 "register_operand" ""))
1381 (use (match_operand:V16QI 2 "register_operand" ""))]
1384 if (VECTOR_ELT_ORDER_BIG)
1385 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1387 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1391 (define_expand "vec_widen_smult_odd_v16qi"
1392 [(use (match_operand:V8HI 0 "register_operand" ""))
1393 (use (match_operand:V16QI 1 "register_operand" ""))
1394 (use (match_operand:V16QI 2 "register_operand" ""))]
1397 if (VECTOR_ELT_ORDER_BIG)
1398 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1400 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1404 (define_expand "vec_widen_umult_odd_v8hi"
1405 [(use (match_operand:V4SI 0 "register_operand" ""))
1406 (use (match_operand:V8HI 1 "register_operand" ""))
1407 (use (match_operand:V8HI 2 "register_operand" ""))]
1410 if (VECTOR_ELT_ORDER_BIG)
1411 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1413 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1417 (define_expand "vec_widen_smult_odd_v8hi"
1418 [(use (match_operand:V4SI 0 "register_operand" ""))
1419 (use (match_operand:V8HI 1 "register_operand" ""))
1420 (use (match_operand:V8HI 2 "register_operand" ""))]
1423 if (VECTOR_ELT_ORDER_BIG)
1424 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1426 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1430 (define_insn "altivec_vmuleub"
1431 [(set (match_operand:V8HI 0 "register_operand" "=v")
1432 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1433 (match_operand:V16QI 2 "register_operand" "v")]
1437 [(set_attr "type" "veccomplex")])
1439 (define_insn "altivec_vmuloub"
1440 [(set (match_operand:V8HI 0 "register_operand" "=v")
1441 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1442 (match_operand:V16QI 2 "register_operand" "v")]
1446 [(set_attr "type" "veccomplex")])
1448 (define_insn "altivec_vmulesb"
1449 [(set (match_operand:V8HI 0 "register_operand" "=v")
1450 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1451 (match_operand:V16QI 2 "register_operand" "v")]
1455 [(set_attr "type" "veccomplex")])
1457 (define_insn "altivec_vmulosb"
1458 [(set (match_operand:V8HI 0 "register_operand" "=v")
1459 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1460 (match_operand:V16QI 2 "register_operand" "v")]
1464 [(set_attr "type" "veccomplex")])
1466 (define_insn "altivec_vmuleuh"
1467 [(set (match_operand:V4SI 0 "register_operand" "=v")
1468 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1469 (match_operand:V8HI 2 "register_operand" "v")]
1473 [(set_attr "type" "veccomplex")])
1475 (define_insn "altivec_vmulouh"
1476 [(set (match_operand:V4SI 0 "register_operand" "=v")
1477 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1478 (match_operand:V8HI 2 "register_operand" "v")]
1482 [(set_attr "type" "veccomplex")])
1484 (define_insn "altivec_vmulesh"
1485 [(set (match_operand:V4SI 0 "register_operand" "=v")
1486 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1487 (match_operand:V8HI 2 "register_operand" "v")]
1491 [(set_attr "type" "veccomplex")])
1493 (define_insn "altivec_vmulosh"
1494 [(set (match_operand:V4SI 0 "register_operand" "=v")
1495 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1496 (match_operand:V8HI 2 "register_operand" "v")]
1500 [(set_attr "type" "veccomplex")])
1503 ;; Vector pack/unpack
1504 (define_insn "altivec_vpkpx"
1505 [(set (match_operand:V8HI 0 "register_operand" "=v")
1506 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v")
1507 (match_operand:V4SI 2 "register_operand" "v")]
1512 if (VECTOR_ELT_ORDER_BIG)
1513 return \"vpkpx %0,%1,%2\";
1515 return \"vpkpx %0,%2,%1\";
1517 [(set_attr "type" "vecperm")])
1519 (define_insn "altivec_vpks<VI_char>ss"
1520 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1521 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1522 (match_operand:VP 2 "register_operand" "v")]
1523 UNSPEC_VPACK_SIGN_SIGN_SAT))]
1527 if (VECTOR_ELT_ORDER_BIG)
1528 return \"vpks<VI_char>ss %0,%1,%2\";
1530 return \"vpks<VI_char>ss %0,%2,%1\";
1532 [(set_attr "type" "vecperm")])
1534 (define_insn "altivec_vpks<VI_char>us"
1535 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1536 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1537 (match_operand:VP 2 "register_operand" "v")]
1538 UNSPEC_VPACK_SIGN_UNS_SAT))]
1542 if (VECTOR_ELT_ORDER_BIG)
1543 return \"vpks<VI_char>us %0,%1,%2\";
1545 return \"vpks<VI_char>us %0,%2,%1\";
1547 [(set_attr "type" "vecperm")])
1549 (define_insn "altivec_vpku<VI_char>us"
1550 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1551 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1552 (match_operand:VP 2 "register_operand" "v")]
1553 UNSPEC_VPACK_UNS_UNS_SAT))]
1557 if (VECTOR_ELT_ORDER_BIG)
1558 return \"vpku<VI_char>us %0,%1,%2\";
1560 return \"vpku<VI_char>us %0,%2,%1\";
1562 [(set_attr "type" "vecperm")])
1564 (define_insn "altivec_vpku<VI_char>um"
1565 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1566 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1567 (match_operand:VP 2 "register_operand" "v")]
1568 UNSPEC_VPACK_UNS_UNS_MOD))]
1572 if (VECTOR_ELT_ORDER_BIG)
1573 return \"vpku<VI_char>um %0,%1,%2\";
1575 return \"vpku<VI_char>um %0,%2,%1\";
1577 [(set_attr "type" "vecperm")])
1579 (define_insn "altivec_vpku<VI_char>um_direct"
1580 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1581 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1582 (match_operand:VP 2 "register_operand" "v")]
1583 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT))]
1587 if (BYTES_BIG_ENDIAN)
1588 return \"vpku<VI_char>um %0,%1,%2\";
1590 return \"vpku<VI_char>um %0,%2,%1\";
1592 [(set_attr "type" "vecperm")])
1594 (define_insn "*altivec_vrl<VI_char>"
1595 [(set (match_operand:VI2 0 "register_operand" "=v")
1596 (rotate:VI2 (match_operand:VI2 1 "register_operand" "v")
1597 (match_operand:VI2 2 "register_operand" "v")))]
1599 "vrl<VI_char> %0,%1,%2"
1600 [(set_attr "type" "vecsimple")])
1602 (define_insn "altivec_vsl"
1603 [(set (match_operand:V4SI 0 "register_operand" "=v")
1604 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1605 (match_operand:V4SI 2 "register_operand" "v")]
1609 [(set_attr "type" "vecperm")])
1611 (define_insn "altivec_vslo"
1612 [(set (match_operand:V4SI 0 "register_operand" "=v")
1613 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1614 (match_operand:V4SI 2 "register_operand" "v")]
1618 [(set_attr "type" "vecperm")])
1620 (define_insn "*altivec_vsl<VI_char>"
1621 [(set (match_operand:VI2 0 "register_operand" "=v")
1622 (ashift:VI2 (match_operand:VI2 1 "register_operand" "v")
1623 (match_operand:VI2 2 "register_operand" "v")))]
1625 "vsl<VI_char> %0,%1,%2"
1626 [(set_attr "type" "vecsimple")])
1628 (define_insn "*altivec_vsr<VI_char>"
1629 [(set (match_operand:VI2 0 "register_operand" "=v")
1630 (lshiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
1631 (match_operand:VI2 2 "register_operand" "v")))]
1633 "vsr<VI_char> %0,%1,%2"
1634 [(set_attr "type" "vecsimple")])
1636 (define_insn "*altivec_vsra<VI_char>"
1637 [(set (match_operand:VI2 0 "register_operand" "=v")
1638 (ashiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
1639 (match_operand:VI2 2 "register_operand" "v")))]
1641 "vsra<VI_char> %0,%1,%2"
1642 [(set_attr "type" "vecsimple")])
1644 (define_insn "altivec_vsr"
1645 [(set (match_operand:V4SI 0 "register_operand" "=v")
1646 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1647 (match_operand:V4SI 2 "register_operand" "v")]
1651 [(set_attr "type" "vecperm")])
1653 (define_insn "altivec_vsro"
1654 [(set (match_operand:V4SI 0 "register_operand" "=v")
1655 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1656 (match_operand:V4SI 2 "register_operand" "v")]
1660 [(set_attr "type" "vecperm")])
1662 (define_insn "altivec_vsum4ubs"
1663 [(set (match_operand:V4SI 0 "register_operand" "=v")
1664 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")
1665 (match_operand:V4SI 2 "register_operand" "v")]
1667 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1670 [(set_attr "type" "veccomplex")])
1672 (define_insn "altivec_vsum4s<VI_char>s"
1673 [(set (match_operand:V4SI 0 "register_operand" "=v")
1674 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
1675 (match_operand:V4SI 2 "register_operand" "v")]
1677 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1679 "vsum4s<VI_char>s %0,%1,%2"
1680 [(set_attr "type" "veccomplex")])
1682 ;; FIXME: For the following two patterns, the scratch should only be
1683 ;; allocated for !VECTOR_ELT_ORDER_BIG, and the instructions should
1684 ;; be emitted separately.
1685 (define_insn "altivec_vsum2sws"
1686 [(set (match_operand:V4SI 0 "register_operand" "=v")
1687 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1688 (match_operand:V4SI 2 "register_operand" "v")]
1690 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))
1691 (clobber (match_scratch:V4SI 3 "=v"))]
1694 if (VECTOR_ELT_ORDER_BIG)
1695 return "vsum2sws %0,%1,%2";
1697 return "vsldoi %3,%2,%2,12\n\tvsum2sws %3,%1,%3\n\tvsldoi %0,%3,%3,4";
1699 [(set_attr "type" "veccomplex")
1700 (set (attr "length")
1702 (match_test "VECTOR_ELT_ORDER_BIG")
1704 (const_string "12")))])
1706 (define_insn "altivec_vsumsws"
1707 [(set (match_operand:V4SI 0 "register_operand" "=v")
1708 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1709 (match_operand:V4SI 2 "register_operand" "v")]
1711 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))
1712 (clobber (match_scratch:V4SI 3 "=v"))]
1715 if (VECTOR_ELT_ORDER_BIG)
1716 return "vsumsws %0,%1,%2";
1718 return "vspltw %3,%2,0\n\tvsumsws %3,%1,%3\n\tvsldoi %0,%3,%3,12";
1720 [(set_attr "type" "veccomplex")
1721 (set (attr "length")
1723 (match_test "(VECTOR_ELT_ORDER_BIG)")
1725 (const_string "12")))])
1727 (define_insn "altivec_vsumsws_direct"
1728 [(set (match_operand:V4SI 0 "register_operand" "=v")
1729 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1730 (match_operand:V4SI 2 "register_operand" "v")]
1731 UNSPEC_VSUMSWS_DIRECT))
1732 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1735 [(set_attr "type" "veccomplex")])
1737 (define_expand "altivec_vspltb"
1738 [(use (match_operand:V16QI 0 "register_operand" ""))
1739 (use (match_operand:V16QI 1 "register_operand" ""))
1740 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1746 /* Special handling for LE with -maltivec=be. We have to reflect
1747 the actual selected index for the splat in the RTL. */
1748 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1749 operands[2] = GEN_INT (15 - INTVAL (operands[2]));
1751 v = gen_rtvec (1, operands[2]);
1752 x = gen_rtx_VEC_SELECT (QImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1753 x = gen_rtx_VEC_DUPLICATE (V16QImode, x);
1754 emit_insn (gen_rtx_SET (operands[0], x));
1758 (define_insn "*altivec_vspltb_internal"
1759 [(set (match_operand:V16QI 0 "register_operand" "=v")
1760 (vec_duplicate:V16QI
1761 (vec_select:QI (match_operand:V16QI 1 "register_operand" "v")
1763 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))]
1766 /* For true LE, this adjusts the selected index. For LE with
1767 -maltivec=be, this reverses what was done in the define_expand
1768 because the instruction already has big-endian bias. */
1769 if (!BYTES_BIG_ENDIAN)
1770 operands[2] = GEN_INT (15 - INTVAL (operands[2]));
1772 return "vspltb %0,%1,%2";
1774 [(set_attr "type" "vecperm")])
1776 (define_insn "altivec_vspltb_direct"
1777 [(set (match_operand:V16QI 0 "register_operand" "=v")
1778 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1779 (match_operand:QI 2 "u5bit_cint_operand" "i")]
1780 UNSPEC_VSPLT_DIRECT))]
1783 [(set_attr "type" "vecperm")])
1785 (define_expand "altivec_vsplth"
1786 [(use (match_operand:V8HI 0 "register_operand" ""))
1787 (use (match_operand:V8HI 1 "register_operand" ""))
1788 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1794 /* Special handling for LE with -maltivec=be. We have to reflect
1795 the actual selected index for the splat in the RTL. */
1796 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1797 operands[2] = GEN_INT (7 - INTVAL (operands[2]));
1799 v = gen_rtvec (1, operands[2]);
1800 x = gen_rtx_VEC_SELECT (HImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1801 x = gen_rtx_VEC_DUPLICATE (V8HImode, x);
1802 emit_insn (gen_rtx_SET (operands[0], x));
1806 (define_insn "*altivec_vsplth_internal"
1807 [(set (match_operand:V8HI 0 "register_operand" "=v")
1809 (vec_select:HI (match_operand:V8HI 1 "register_operand" "v")
1811 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))]
1814 /* For true LE, this adjusts the selected index. For LE with
1815 -maltivec=be, this reverses what was done in the define_expand
1816 because the instruction already has big-endian bias. */
1817 if (!BYTES_BIG_ENDIAN)
1818 operands[2] = GEN_INT (7 - INTVAL (operands[2]));
1820 return "vsplth %0,%1,%2";
1822 [(set_attr "type" "vecperm")])
1824 (define_insn "altivec_vsplth_direct"
1825 [(set (match_operand:V8HI 0 "register_operand" "=v")
1826 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1827 (match_operand:QI 2 "u5bit_cint_operand" "i")]
1828 UNSPEC_VSPLT_DIRECT))]
1831 [(set_attr "type" "vecperm")])
1833 (define_expand "altivec_vspltw"
1834 [(use (match_operand:V4SI 0 "register_operand" ""))
1835 (use (match_operand:V4SI 1 "register_operand" ""))
1836 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1842 /* Special handling for LE with -maltivec=be. We have to reflect
1843 the actual selected index for the splat in the RTL. */
1844 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1845 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1847 v = gen_rtvec (1, operands[2]);
1848 x = gen_rtx_VEC_SELECT (SImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1849 x = gen_rtx_VEC_DUPLICATE (V4SImode, x);
1850 emit_insn (gen_rtx_SET (operands[0], x));
1854 (define_insn "*altivec_vspltw_internal"
1855 [(set (match_operand:V4SI 0 "register_operand" "=v")
1857 (vec_select:SI (match_operand:V4SI 1 "register_operand" "v")
1859 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))]
1862 /* For true LE, this adjusts the selected index. For LE with
1863 -maltivec=be, this reverses what was done in the define_expand
1864 because the instruction already has big-endian bias. */
1865 if (!BYTES_BIG_ENDIAN)
1866 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1868 return "vspltw %0,%1,%2";
1870 [(set_attr "type" "vecperm")])
1872 (define_insn "altivec_vspltw_direct"
1873 [(set (match_operand:V4SI 0 "register_operand" "=v")
1874 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1875 (match_operand:QI 2 "u5bit_cint_operand" "i")]
1876 UNSPEC_VSPLT_DIRECT))]
1879 [(set_attr "type" "vecperm")])
1881 (define_expand "altivec_vspltsf"
1882 [(use (match_operand:V4SF 0 "register_operand" ""))
1883 (use (match_operand:V4SF 1 "register_operand" ""))
1884 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1890 /* Special handling for LE with -maltivec=be. We have to reflect
1891 the actual selected index for the splat in the RTL. */
1892 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1893 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1895 v = gen_rtvec (1, operands[2]);
1896 x = gen_rtx_VEC_SELECT (SFmode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1897 x = gen_rtx_VEC_DUPLICATE (V4SFmode, x);
1898 emit_insn (gen_rtx_SET (operands[0], x));
1902 (define_insn "*altivec_vspltsf_internal"
1903 [(set (match_operand:V4SF 0 "register_operand" "=v")
1905 (vec_select:SF (match_operand:V4SF 1 "register_operand" "v")
1907 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))]
1908 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1910 /* For true LE, this adjusts the selected index. For LE with
1911 -maltivec=be, this reverses what was done in the define_expand
1912 because the instruction already has big-endian bias. */
1913 if (!BYTES_BIG_ENDIAN)
1914 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1916 return "vspltw %0,%1,%2";
1918 [(set_attr "type" "vecperm")])
1920 (define_insn "altivec_vspltis<VI_char>"
1921 [(set (match_operand:VI 0 "register_operand" "=v")
1923 (match_operand:QI 1 "s5bit_cint_operand" "i")))]
1925 "vspltis<VI_char> %0,%1"
1926 [(set_attr "type" "vecperm")])
1928 (define_insn "*altivec_vrfiz"
1929 [(set (match_operand:V4SF 0 "register_operand" "=v")
1930 (fix:V4SF (match_operand:V4SF 1 "register_operand" "v")))]
1931 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1933 [(set_attr "type" "vecfloat")])
1935 (define_expand "altivec_vperm_<mode>"
1936 [(set (match_operand:VM 0 "register_operand" "=v")
1937 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
1938 (match_operand:VM 2 "register_operand" "v")
1939 (match_operand:V16QI 3 "register_operand" "v")]
1943 if (!VECTOR_ELT_ORDER_BIG)
1945 altivec_expand_vec_perm_le (operands);
1950 (define_insn "*altivec_vperm_<mode>_internal"
1951 [(set (match_operand:VM 0 "register_operand" "=v")
1952 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
1953 (match_operand:VM 2 "register_operand" "v")
1954 (match_operand:V16QI 3 "register_operand" "v")]
1958 [(set_attr "type" "vecperm")])
1960 (define_insn "altivec_vperm_v8hiv16qi"
1961 [(set (match_operand:V16QI 0 "register_operand" "=v")
1962 (unspec:V16QI [(match_operand:V8HI 1 "register_operand" "v")
1963 (match_operand:V8HI 2 "register_operand" "v")
1964 (match_operand:V16QI 3 "register_operand" "v")]
1968 [(set_attr "type" "vecperm")])
1970 (define_expand "altivec_vperm_<mode>_uns"
1971 [(set (match_operand:VM 0 "register_operand" "=v")
1972 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
1973 (match_operand:VM 2 "register_operand" "v")
1974 (match_operand:V16QI 3 "register_operand" "v")]
1978 if (!VECTOR_ELT_ORDER_BIG)
1980 altivec_expand_vec_perm_le (operands);
1985 (define_insn "*altivec_vperm_<mode>_uns_internal"
1986 [(set (match_operand:VM 0 "register_operand" "=v")
1987 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
1988 (match_operand:VM 2 "register_operand" "v")
1989 (match_operand:V16QI 3 "register_operand" "v")]
1993 [(set_attr "type" "vecperm")])
1995 (define_expand "vec_permv16qi"
1996 [(set (match_operand:V16QI 0 "register_operand" "")
1997 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "")
1998 (match_operand:V16QI 2 "register_operand" "")
1999 (match_operand:V16QI 3 "register_operand" "")]
2003 if (!BYTES_BIG_ENDIAN) {
2004 altivec_expand_vec_perm_le (operands);
2009 (define_expand "vec_perm_constv16qi"
2010 [(match_operand:V16QI 0 "register_operand" "")
2011 (match_operand:V16QI 1 "register_operand" "")
2012 (match_operand:V16QI 2 "register_operand" "")
2013 (match_operand:V16QI 3 "" "")]
2016 if (altivec_expand_vec_perm_const (operands))
2022 (define_insn "altivec_vrfip" ; ceil
2023 [(set (match_operand:V4SF 0 "register_operand" "=v")
2024 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2028 [(set_attr "type" "vecfloat")])
2030 (define_insn "altivec_vrfin"
2031 [(set (match_operand:V4SF 0 "register_operand" "=v")
2032 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2036 [(set_attr "type" "vecfloat")])
2038 (define_insn "*altivec_vrfim" ; floor
2039 [(set (match_operand:V4SF 0 "register_operand" "=v")
2040 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2044 [(set_attr "type" "vecfloat")])
2046 (define_insn "altivec_vcfux"
2047 [(set (match_operand:V4SF 0 "register_operand" "=v")
2048 (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
2049 (match_operand:QI 2 "immediate_operand" "i")]
2053 [(set_attr "type" "vecfloat")])
2055 (define_insn "altivec_vcfsx"
2056 [(set (match_operand:V4SF 0 "register_operand" "=v")
2057 (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
2058 (match_operand:QI 2 "immediate_operand" "i")]
2062 [(set_attr "type" "vecfloat")])
2064 (define_insn "altivec_vctuxs"
2065 [(set (match_operand:V4SI 0 "register_operand" "=v")
2066 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2067 (match_operand:QI 2 "immediate_operand" "i")]
2069 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2072 [(set_attr "type" "vecfloat")])
2074 (define_insn "altivec_vctsxs"
2075 [(set (match_operand:V4SI 0 "register_operand" "=v")
2076 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2077 (match_operand:QI 2 "immediate_operand" "i")]
2079 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2082 [(set_attr "type" "vecfloat")])
2084 (define_insn "altivec_vlogefp"
2085 [(set (match_operand:V4SF 0 "register_operand" "=v")
2086 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2090 [(set_attr "type" "vecfloat")])
2092 (define_insn "altivec_vexptefp"
2093 [(set (match_operand:V4SF 0 "register_operand" "=v")
2094 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2098 [(set_attr "type" "vecfloat")])
2100 (define_insn "*altivec_vrsqrtefp"
2101 [(set (match_operand:V4SF 0 "register_operand" "=v")
2102 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2104 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2106 [(set_attr "type" "vecfloat")])
2108 (define_insn "altivec_vrefp"
2109 [(set (match_operand:V4SF 0 "register_operand" "=v")
2110 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2112 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2114 [(set_attr "type" "vecfloat")])
2116 (define_expand "altivec_copysign_v4sf3"
2117 [(use (match_operand:V4SF 0 "register_operand" ""))
2118 (use (match_operand:V4SF 1 "register_operand" ""))
2119 (use (match_operand:V4SF 2 "register_operand" ""))]
2120 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2123 rtx mask = gen_reg_rtx (V4SImode);
2124 rtvec v = rtvec_alloc (4);
2125 unsigned HOST_WIDE_INT mask_val = ((unsigned HOST_WIDE_INT)1) << 31;
2127 RTVEC_ELT (v, 0) = GEN_INT (mask_val);
2128 RTVEC_ELT (v, 1) = GEN_INT (mask_val);
2129 RTVEC_ELT (v, 2) = GEN_INT (mask_val);
2130 RTVEC_ELT (v, 3) = GEN_INT (mask_val);
2132 emit_insn (gen_vec_initv4si (mask, gen_rtx_PARALLEL (V4SImode, v)));
2133 emit_insn (gen_vector_select_v4sf (operands[0], operands[1], operands[2],
2134 gen_lowpart (V4SFmode, mask)));
2138 (define_insn "altivec_vsldoi_<mode>"
2139 [(set (match_operand:VM 0 "register_operand" "=v")
2140 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
2141 (match_operand:VM 2 "register_operand" "v")
2142 (match_operand:QI 3 "immediate_operand" "i")]
2145 "vsldoi %0,%1,%2,%3"
2146 [(set_attr "type" "vecperm")])
2148 (define_insn "altivec_vupkhs<VU_char>"
2149 [(set (match_operand:VP 0 "register_operand" "=v")
2150 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2151 UNSPEC_VUNPACK_HI_SIGN))]
2154 if (VECTOR_ELT_ORDER_BIG)
2155 return "vupkhs<VU_char> %0,%1";
2157 return "vupkls<VU_char> %0,%1";
2159 [(set_attr "type" "vecperm")])
2161 (define_insn "*altivec_vupkhs<VU_char>_direct"
2162 [(set (match_operand:VP 0 "register_operand" "=v")
2163 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2164 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2166 "vupkhs<VU_char> %0,%1"
2167 [(set_attr "type" "vecperm")])
2169 (define_insn "altivec_vupkls<VU_char>"
2170 [(set (match_operand:VP 0 "register_operand" "=v")
2171 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2172 UNSPEC_VUNPACK_LO_SIGN))]
2175 if (VECTOR_ELT_ORDER_BIG)
2176 return "vupkls<VU_char> %0,%1";
2178 return "vupkhs<VU_char> %0,%1";
2180 [(set_attr "type" "vecperm")])
2182 (define_insn "*altivec_vupkls<VU_char>_direct"
2183 [(set (match_operand:VP 0 "register_operand" "=v")
2184 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2185 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2187 "vupkls<VU_char> %0,%1"
2188 [(set_attr "type" "vecperm")])
2190 (define_insn "altivec_vupkhpx"
2191 [(set (match_operand:V4SI 0 "register_operand" "=v")
2192 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2196 if (VECTOR_ELT_ORDER_BIG)
2197 return "vupkhpx %0,%1";
2199 return "vupklpx %0,%1";
2201 [(set_attr "type" "vecperm")])
2203 (define_insn "altivec_vupklpx"
2204 [(set (match_operand:V4SI 0 "register_operand" "=v")
2205 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2209 if (VECTOR_ELT_ORDER_BIG)
2210 return "vupklpx %0,%1";
2212 return "vupkhpx %0,%1";
2214 [(set_attr "type" "vecperm")])
2216 ;; Compare vectors producing a vector result and a predicate, setting CR6 to
2217 ;; indicate a combined status
2218 (define_insn "*altivec_vcmpequ<VI_char>_p"
2220 (unspec:CC [(eq:CC (match_operand:VI2 1 "register_operand" "v")
2221 (match_operand:VI2 2 "register_operand" "v"))]
2223 (set (match_operand:VI2 0 "register_operand" "=v")
2224 (eq:VI2 (match_dup 1)
2227 "vcmpequ<VI_char>. %0,%1,%2"
2228 [(set_attr "type" "veccmp")])
2230 (define_insn "*altivec_vcmpgts<VI_char>_p"
2232 (unspec:CC [(gt:CC (match_operand:VI2 1 "register_operand" "v")
2233 (match_operand:VI2 2 "register_operand" "v"))]
2235 (set (match_operand:VI2 0 "register_operand" "=v")
2236 (gt:VI2 (match_dup 1)
2239 "vcmpgts<VI_char>. %0,%1,%2"
2240 [(set_attr "type" "veccmp")])
2242 (define_insn "*altivec_vcmpgtu<VI_char>_p"
2244 (unspec:CC [(gtu:CC (match_operand:VI2 1 "register_operand" "v")
2245 (match_operand:VI2 2 "register_operand" "v"))]
2247 (set (match_operand:VI2 0 "register_operand" "=v")
2248 (gtu:VI2 (match_dup 1)
2251 "vcmpgtu<VI_char>. %0,%1,%2"
2252 [(set_attr "type" "veccmp")])
2254 (define_insn "*altivec_vcmpeqfp_p"
2256 (unspec:CC [(eq:CC (match_operand:V4SF 1 "register_operand" "v")
2257 (match_operand:V4SF 2 "register_operand" "v"))]
2259 (set (match_operand:V4SF 0 "register_operand" "=v")
2260 (eq:V4SF (match_dup 1)
2262 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2263 "vcmpeqfp. %0,%1,%2"
2264 [(set_attr "type" "veccmp")])
2266 (define_insn "*altivec_vcmpgtfp_p"
2268 (unspec:CC [(gt:CC (match_operand:V4SF 1 "register_operand" "v")
2269 (match_operand:V4SF 2 "register_operand" "v"))]
2271 (set (match_operand:V4SF 0 "register_operand" "=v")
2272 (gt:V4SF (match_dup 1)
2274 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2275 "vcmpgtfp. %0,%1,%2"
2276 [(set_attr "type" "veccmp")])
2278 (define_insn "*altivec_vcmpgefp_p"
2280 (unspec:CC [(ge:CC (match_operand:V4SF 1 "register_operand" "v")
2281 (match_operand:V4SF 2 "register_operand" "v"))]
2283 (set (match_operand:V4SF 0 "register_operand" "=v")
2284 (ge:V4SF (match_dup 1)
2286 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2287 "vcmpgefp. %0,%1,%2"
2288 [(set_attr "type" "veccmp")])
2290 (define_insn "altivec_vcmpbfp_p"
2292 (unspec:CC [(match_operand:V4SF 1 "register_operand" "v")
2293 (match_operand:V4SF 2 "register_operand" "v")]
2295 (set (match_operand:V4SF 0 "register_operand" "=v")
2296 (unspec:V4SF [(match_dup 1)
2299 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode)"
2301 [(set_attr "type" "veccmp")])
2303 (define_insn "altivec_mtvscr"
2306 [(match_operand:V4SI 0 "register_operand" "v")] UNSPECV_MTVSCR))]
2309 [(set_attr "type" "vecsimple")])
2311 (define_insn "altivec_mfvscr"
2312 [(set (match_operand:V8HI 0 "register_operand" "=v")
2313 (unspec_volatile:V8HI [(reg:SI 110)] UNSPECV_MFVSCR))]
2316 [(set_attr "type" "vecsimple")])
2318 (define_insn "altivec_dssall"
2319 [(unspec_volatile [(const_int 0)] UNSPECV_DSSALL)]
2322 [(set_attr "type" "vecsimple")])
2324 (define_insn "altivec_dss"
2325 [(unspec_volatile [(match_operand:QI 0 "immediate_operand" "i")]
2329 [(set_attr "type" "vecsimple")])
2331 (define_insn "altivec_dst"
2332 [(unspec [(match_operand 0 "register_operand" "b")
2333 (match_operand:SI 1 "register_operand" "r")
2334 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DST)]
2335 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2337 [(set_attr "type" "vecsimple")])
2339 (define_insn "altivec_dstt"
2340 [(unspec [(match_operand 0 "register_operand" "b")
2341 (match_operand:SI 1 "register_operand" "r")
2342 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTT)]
2343 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2345 [(set_attr "type" "vecsimple")])
2347 (define_insn "altivec_dstst"
2348 [(unspec [(match_operand 0 "register_operand" "b")
2349 (match_operand:SI 1 "register_operand" "r")
2350 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTST)]
2351 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2353 [(set_attr "type" "vecsimple")])
2355 (define_insn "altivec_dststt"
2356 [(unspec [(match_operand 0 "register_operand" "b")
2357 (match_operand:SI 1 "register_operand" "r")
2358 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTSTT)]
2359 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2361 [(set_attr "type" "vecsimple")])
2363 (define_expand "altivec_lvsl"
2364 [(use (match_operand:V16QI 0 "register_operand" ""))
2365 (use (match_operand:V16QI 1 "memory_operand" ""))]
2368 if (VECTOR_ELT_ORDER_BIG)
2369 emit_insn (gen_altivec_lvsl_direct (operands[0], operands[1]));
2373 rtx mask, perm[16], constv, vperm;
2374 mask = gen_reg_rtx (V16QImode);
2375 emit_insn (gen_altivec_lvsl_direct (mask, operands[1]));
2376 for (i = 0; i < 16; ++i)
2377 perm[i] = GEN_INT (i);
2378 constv = gen_rtx_CONST_VECTOR (V16QImode, gen_rtvec_v (16, perm));
2379 constv = force_reg (V16QImode, constv);
2380 vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2382 emit_insn (gen_rtx_SET (operands[0], vperm));
2387 (define_insn "altivec_lvsl_direct"
2388 [(set (match_operand:V16QI 0 "register_operand" "=v")
2389 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2393 [(set_attr "type" "vecload")])
2395 (define_expand "altivec_lvsr"
2396 [(use (match_operand:V16QI 0 "register_operand" ""))
2397 (use (match_operand:V16QI 1 "memory_operand" ""))]
2400 if (VECTOR_ELT_ORDER_BIG)
2401 emit_insn (gen_altivec_lvsr_direct (operands[0], operands[1]));
2405 rtx mask, perm[16], constv, vperm;
2406 mask = gen_reg_rtx (V16QImode);
2407 emit_insn (gen_altivec_lvsr_direct (mask, operands[1]));
2408 for (i = 0; i < 16; ++i)
2409 perm[i] = GEN_INT (i);
2410 constv = gen_rtx_CONST_VECTOR (V16QImode, gen_rtvec_v (16, perm));
2411 constv = force_reg (V16QImode, constv);
2412 vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2414 emit_insn (gen_rtx_SET (operands[0], vperm));
2419 (define_insn "altivec_lvsr_direct"
2420 [(set (match_operand:V16QI 0 "register_operand" "=v")
2421 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2425 [(set_attr "type" "vecload")])
2427 (define_expand "build_vector_mask_for_load"
2428 [(set (match_operand:V16QI 0 "register_operand" "")
2429 (unspec:V16QI [(match_operand 1 "memory_operand" "")] UNSPEC_LVSR))]
2436 gcc_assert (GET_CODE (operands[1]) == MEM);
2438 addr = XEXP (operands[1], 0);
2439 temp = gen_reg_rtx (GET_MODE (addr));
2440 emit_insn (gen_rtx_SET (temp, gen_rtx_NEG (GET_MODE (addr), addr)));
2441 emit_insn (gen_altivec_lvsr (operands[0],
2442 replace_equiv_address (operands[1], temp)));
2446 ;; Parallel some of the LVE* and STV*'s with unspecs because some have
2447 ;; identical rtl but different instructions-- and gcc gets confused.
2449 (define_expand "altivec_lve<VI_char>x"
2451 [(set (match_operand:VI 0 "register_operand" "=v")
2452 (match_operand:VI 1 "memory_operand" "Z"))
2453 (unspec [(const_int 0)] UNSPEC_LVE)])]
2456 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2458 altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_LVE);
2463 (define_insn "*altivec_lve<VI_char>x_internal"
2465 [(set (match_operand:VI 0 "register_operand" "=v")
2466 (match_operand:VI 1 "memory_operand" "Z"))
2467 (unspec [(const_int 0)] UNSPEC_LVE)])]
2469 "lve<VI_char>x %0,%y1"
2470 [(set_attr "type" "vecload")])
2472 (define_insn "*altivec_lvesfx"
2474 [(set (match_operand:V4SF 0 "register_operand" "=v")
2475 (match_operand:V4SF 1 "memory_operand" "Z"))
2476 (unspec [(const_int 0)] UNSPEC_LVE)])]
2479 [(set_attr "type" "vecload")])
2481 (define_expand "altivec_lvxl_<mode>"
2483 [(set (match_operand:VM2 0 "register_operand" "=v")
2484 (match_operand:VM2 1 "memory_operand" "Z"))
2485 (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
2488 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2490 altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_SET_VSCR);
2495 (define_insn "*altivec_lvxl_<mode>_internal"
2497 [(set (match_operand:VM2 0 "register_operand" "=v")
2498 (match_operand:VM2 1 "memory_operand" "Z"))
2499 (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
2502 [(set_attr "type" "vecload")])
2504 (define_expand "altivec_lvx_<mode>"
2506 [(set (match_operand:VM2 0 "register_operand" "=v")
2507 (match_operand:VM2 1 "memory_operand" "Z"))
2508 (unspec [(const_int 0)] UNSPEC_LVX)])]
2511 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2513 altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_LVX);
2518 (define_insn "altivec_lvx_<mode>_internal"
2520 [(set (match_operand:VM2 0 "register_operand" "=v")
2521 (match_operand:VM2 1 "memory_operand" "Z"))
2522 (unspec [(const_int 0)] UNSPEC_LVX)])]
2525 [(set_attr "type" "vecload")])
2527 (define_expand "altivec_stvx_<mode>"
2529 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2530 (match_operand:VM2 1 "register_operand" "v"))
2531 (unspec [(const_int 0)] UNSPEC_STVX)])]
2534 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2536 altivec_expand_stvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVX);
2541 (define_insn "altivec_stvx_<mode>_internal"
2543 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2544 (match_operand:VM2 1 "register_operand" "v"))
2545 (unspec [(const_int 0)] UNSPEC_STVX)])]
2548 [(set_attr "type" "vecstore")])
2550 (define_expand "altivec_stvxl_<mode>"
2552 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2553 (match_operand:VM2 1 "register_operand" "v"))
2554 (unspec [(const_int 0)] UNSPEC_STVXL)])]
2557 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2559 altivec_expand_stvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVXL);
2564 (define_insn "*altivec_stvxl_<mode>_internal"
2566 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2567 (match_operand:VM2 1 "register_operand" "v"))
2568 (unspec [(const_int 0)] UNSPEC_STVXL)])]
2571 [(set_attr "type" "vecstore")])
2573 (define_expand "altivec_stve<VI_char>x"
2574 [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
2575 (unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
2578 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2580 altivec_expand_stvex_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVE);
2585 (define_insn "*altivec_stve<VI_char>x_internal"
2586 [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
2587 (unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
2589 "stve<VI_char>x %1,%y0"
2590 [(set_attr "type" "vecstore")])
2592 (define_insn "*altivec_stvesfx"
2593 [(set (match_operand:SF 0 "memory_operand" "=Z")
2594 (unspec:SF [(match_operand:V4SF 1 "register_operand" "v")] UNSPEC_STVE))]
2597 [(set_attr "type" "vecstore")])
2600 ;; xxlxor/vxor SCRATCH0,SCRATCH0,SCRATCH0
2601 ;; vsubu?m SCRATCH2,SCRATCH1,%1
2602 ;; vmaxs? %0,%1,SCRATCH2"
2603 (define_expand "abs<mode>2"
2604 [(set (match_dup 2) (match_dup 3))
2606 (minus:VI2 (match_dup 2)
2607 (match_operand:VI2 1 "register_operand" "v")))
2608 (set (match_operand:VI2 0 "register_operand" "=v")
2609 (smax:VI2 (match_dup 1) (match_dup 4)))]
2612 int i, n_elt = GET_MODE_NUNITS (<MODE>mode);
2613 rtvec v = rtvec_alloc (n_elt);
2615 /* Create an all 0 constant. */
2616 for (i = 0; i < n_elt; ++i)
2617 RTVEC_ELT (v, i) = const0_rtx;
2619 operands[2] = gen_reg_rtx (<MODE>mode);
2620 operands[3] = gen_rtx_CONST_VECTOR (<MODE>mode, v);
2621 operands[4] = gen_reg_rtx (<MODE>mode);
2625 ;; vspltisw SCRATCH1,-1
2626 ;; vslw SCRATCH2,SCRATCH1,SCRATCH1
2627 ;; vandc %0,%1,SCRATCH2
2628 (define_expand "altivec_absv4sf2"
2630 (vec_duplicate:V4SI (const_int -1)))
2632 (ashift:V4SI (match_dup 2) (match_dup 2)))
2633 (set (match_operand:V4SF 0 "register_operand" "=v")
2634 (and:V4SF (not:V4SF (subreg:V4SF (match_dup 3) 0))
2635 (match_operand:V4SF 1 "register_operand" "v")))]
2638 operands[2] = gen_reg_rtx (V4SImode);
2639 operands[3] = gen_reg_rtx (V4SImode);
2643 ;; vspltis? SCRATCH0,0
2644 ;; vsubs?s SCRATCH2,SCRATCH1,%1
2645 ;; vmaxs? %0,%1,SCRATCH2"
2646 (define_expand "altivec_abss_<mode>"
2647 [(set (match_dup 2) (vec_duplicate:VI (const_int 0)))
2648 (parallel [(set (match_dup 3)
2649 (unspec:VI [(match_dup 2)
2650 (match_operand:VI 1 "register_operand" "v")]
2652 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))])
2653 (set (match_operand:VI 0 "register_operand" "=v")
2654 (smax:VI (match_dup 1) (match_dup 3)))]
2657 operands[2] = gen_reg_rtx (GET_MODE (operands[0]));
2658 operands[3] = gen_reg_rtx (GET_MODE (operands[0]));
2661 (define_expand "reduc_splus_<mode>"
2662 [(set (match_operand:VIshort 0 "register_operand" "=v")
2663 (unspec:VIshort [(match_operand:VIshort 1 "register_operand" "v")]
2664 UNSPEC_REDUC_PLUS))]
2667 rtx vzero = gen_reg_rtx (V4SImode);
2668 rtx vtmp1 = gen_reg_rtx (V4SImode);
2669 rtx dest = gen_lowpart (V4SImode, operands[0]);
2671 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2672 emit_insn (gen_altivec_vsum4s<VI_char>s (vtmp1, operands[1], vzero));
2673 emit_insn (gen_altivec_vsumsws_direct (dest, vtmp1, vzero));
2677 (define_expand "reduc_uplus_v16qi"
2678 [(set (match_operand:V16QI 0 "register_operand" "=v")
2679 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")]
2680 UNSPEC_REDUC_PLUS))]
2683 rtx vzero = gen_reg_rtx (V4SImode);
2684 rtx vtmp1 = gen_reg_rtx (V4SImode);
2685 rtx dest = gen_lowpart (V4SImode, operands[0]);
2687 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2688 emit_insn (gen_altivec_vsum4ubs (vtmp1, operands[1], vzero));
2689 emit_insn (gen_altivec_vsumsws_direct (dest, vtmp1, vzero));
2693 (define_expand "neg<mode>2"
2694 [(use (match_operand:VI 0 "register_operand" ""))
2695 (use (match_operand:VI 1 "register_operand" ""))]
2701 vzero = gen_reg_rtx (GET_MODE (operands[0]));
2702 emit_insn (gen_altivec_vspltis<VI_char> (vzero, const0_rtx));
2703 emit_insn (gen_sub<mode>3 (operands[0], vzero, operands[1]));
2708 (define_expand "udot_prod<mode>"
2709 [(set (match_operand:V4SI 0 "register_operand" "=v")
2710 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
2711 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
2712 (match_operand:VIshort 2 "register_operand" "v")]
2717 emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], operands[2], operands[3]));
2721 (define_expand "sdot_prodv8hi"
2722 [(set (match_operand:V4SI 0 "register_operand" "=v")
2723 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
2724 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
2725 (match_operand:V8HI 2 "register_operand" "v")]
2730 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], operands[2], operands[3]));
2734 (define_expand "widen_usum<mode>3"
2735 [(set (match_operand:V4SI 0 "register_operand" "=v")
2736 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2737 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")]
2742 rtx vones = gen_reg_rtx (GET_MODE (operands[1]));
2744 emit_insn (gen_altivec_vspltis<VI_char> (vones, const1_rtx));
2745 emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], vones, operands[2]));
2749 (define_expand "widen_ssumv16qi3"
2750 [(set (match_operand:V4SI 0 "register_operand" "=v")
2751 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2752 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")]
2757 rtx vones = gen_reg_rtx (V16QImode);
2759 emit_insn (gen_altivec_vspltisb (vones, const1_rtx));
2760 emit_insn (gen_altivec_vmsummbm (operands[0], operands[1], vones, operands[2]));
2764 (define_expand "widen_ssumv8hi3"
2765 [(set (match_operand:V4SI 0 "register_operand" "=v")
2766 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2767 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2772 rtx vones = gen_reg_rtx (V8HImode);
2774 emit_insn (gen_altivec_vspltish (vones, const1_rtx));
2775 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], vones, operands[2]));
2779 (define_expand "vec_unpacks_hi_<VP_small_lc>"
2780 [(set (match_operand:VP 0 "register_operand" "=v")
2781 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2782 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2786 (define_expand "vec_unpacks_lo_<VP_small_lc>"
2787 [(set (match_operand:VP 0 "register_operand" "=v")
2788 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2789 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2793 (define_insn "vperm_v8hiv4si"
2794 [(set (match_operand:V4SI 0 "register_operand" "=v")
2795 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
2796 (match_operand:V4SI 2 "register_operand" "v")
2797 (match_operand:V16QI 3 "register_operand" "v")]
2801 [(set_attr "type" "vecperm")])
2803 (define_insn "vperm_v16qiv8hi"
2804 [(set (match_operand:V8HI 0 "register_operand" "=v")
2805 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
2806 (match_operand:V8HI 2 "register_operand" "v")
2807 (match_operand:V16QI 3 "register_operand" "v")]
2811 [(set_attr "type" "vecperm")])
2814 (define_expand "vec_unpacku_hi_v16qi"
2815 [(set (match_operand:V8HI 0 "register_operand" "=v")
2816 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
2821 rtx vzero = gen_reg_rtx (V8HImode);
2822 rtx mask = gen_reg_rtx (V16QImode);
2823 rtvec v = rtvec_alloc (16);
2824 bool be = BYTES_BIG_ENDIAN;
2826 emit_insn (gen_altivec_vspltish (vzero, const0_rtx));
2828 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 7);
2829 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 0 : 16);
2830 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 16 : 6);
2831 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 1 : 16);
2832 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 5);
2833 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 2 : 16);
2834 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 16 : 4);
2835 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 3 : 16);
2836 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 3);
2837 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 4 : 16);
2838 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 16 : 2);
2839 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 5 : 16);
2840 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 1);
2841 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 6 : 16);
2842 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 16 : 0);
2843 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 7 : 16);
2845 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2846 emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask));
2850 (define_expand "vec_unpacku_hi_v8hi"
2851 [(set (match_operand:V4SI 0 "register_operand" "=v")
2852 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2857 rtx vzero = gen_reg_rtx (V4SImode);
2858 rtx mask = gen_reg_rtx (V16QImode);
2859 rtvec v = rtvec_alloc (16);
2860 bool be = BYTES_BIG_ENDIAN;
2862 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2864 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 7);
2865 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 17 : 6);
2866 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 0 : 17);
2867 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 1 : 16);
2868 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 5);
2869 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 17 : 4);
2870 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 2 : 17);
2871 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 3 : 16);
2872 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 3);
2873 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 17 : 2);
2874 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 4 : 17);
2875 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 5 : 16);
2876 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 1);
2877 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 17 : 0);
2878 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 6 : 17);
2879 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 7 : 16);
2881 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2882 emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask));
2886 (define_expand "vec_unpacku_lo_v16qi"
2887 [(set (match_operand:V8HI 0 "register_operand" "=v")
2888 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
2893 rtx vzero = gen_reg_rtx (V8HImode);
2894 rtx mask = gen_reg_rtx (V16QImode);
2895 rtvec v = rtvec_alloc (16);
2896 bool be = BYTES_BIG_ENDIAN;
2898 emit_insn (gen_altivec_vspltish (vzero, const0_rtx));
2900 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 15);
2901 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 8 : 16);
2902 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 16 : 14);
2903 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 9 : 16);
2904 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 13);
2905 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 10 : 16);
2906 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 16 : 12);
2907 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 11 : 16);
2908 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 11);
2909 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 12 : 16);
2910 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 16 : 10);
2911 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 13 : 16);
2912 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 9);
2913 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 14 : 16);
2914 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 16 : 8);
2915 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 15 : 16);
2917 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2918 emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask));
2922 (define_expand "vec_unpacku_lo_v8hi"
2923 [(set (match_operand:V4SI 0 "register_operand" "=v")
2924 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2929 rtx vzero = gen_reg_rtx (V4SImode);
2930 rtx mask = gen_reg_rtx (V16QImode);
2931 rtvec v = rtvec_alloc (16);
2932 bool be = BYTES_BIG_ENDIAN;
2934 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2936 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 15);
2937 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 17 : 14);
2938 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 8 : 17);
2939 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 9 : 16);
2940 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 13);
2941 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 17 : 12);
2942 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 10 : 17);
2943 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 11 : 16);
2944 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 11);
2945 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 17 : 10);
2946 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 12 : 17);
2947 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 13 : 16);
2948 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 9);
2949 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 17 : 8);
2950 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 14 : 17);
2951 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 15 : 16);
2953 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2954 emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask));
2958 (define_expand "vec_widen_umult_hi_v16qi"
2959 [(set (match_operand:V8HI 0 "register_operand" "=v")
2960 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
2961 (match_operand:V16QI 2 "register_operand" "v")]
2966 rtx ve = gen_reg_rtx (V8HImode);
2967 rtx vo = gen_reg_rtx (V8HImode);
2969 if (BYTES_BIG_ENDIAN)
2971 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
2972 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
2973 emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
2977 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
2978 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
2979 emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
2984 (define_expand "vec_widen_umult_lo_v16qi"
2985 [(set (match_operand:V8HI 0 "register_operand" "=v")
2986 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
2987 (match_operand:V16QI 2 "register_operand" "v")]
2992 rtx ve = gen_reg_rtx (V8HImode);
2993 rtx vo = gen_reg_rtx (V8HImode);
2995 if (BYTES_BIG_ENDIAN)
2997 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
2998 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
2999 emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
3003 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3004 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3005 emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
3010 (define_expand "vec_widen_smult_hi_v16qi"
3011 [(set (match_operand:V8HI 0 "register_operand" "=v")
3012 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3013 (match_operand:V16QI 2 "register_operand" "v")]
3018 rtx ve = gen_reg_rtx (V8HImode);
3019 rtx vo = gen_reg_rtx (V8HImode);
3021 if (BYTES_BIG_ENDIAN)
3023 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3024 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3025 emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
3029 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3030 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3031 emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
3036 (define_expand "vec_widen_smult_lo_v16qi"
3037 [(set (match_operand:V8HI 0 "register_operand" "=v")
3038 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3039 (match_operand:V16QI 2 "register_operand" "v")]
3044 rtx ve = gen_reg_rtx (V8HImode);
3045 rtx vo = gen_reg_rtx (V8HImode);
3047 if (BYTES_BIG_ENDIAN)
3049 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3050 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3051 emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
3055 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3056 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3057 emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
3062 (define_expand "vec_widen_umult_hi_v8hi"
3063 [(set (match_operand:V4SI 0 "register_operand" "=v")
3064 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3065 (match_operand:V8HI 2 "register_operand" "v")]
3070 rtx ve = gen_reg_rtx (V4SImode);
3071 rtx vo = gen_reg_rtx (V4SImode);
3073 if (BYTES_BIG_ENDIAN)
3075 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3076 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3077 emit_insn (gen_altivec_vmrghw_direct (operands[0], ve, vo));
3081 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3082 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3083 emit_insn (gen_altivec_vmrghw_direct (operands[0], vo, ve));
3088 (define_expand "vec_widen_umult_lo_v8hi"
3089 [(set (match_operand:V4SI 0 "register_operand" "=v")
3090 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3091 (match_operand:V8HI 2 "register_operand" "v")]
3096 rtx ve = gen_reg_rtx (V4SImode);
3097 rtx vo = gen_reg_rtx (V4SImode);
3099 if (BYTES_BIG_ENDIAN)
3101 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3102 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3103 emit_insn (gen_altivec_vmrglw_direct (operands[0], ve, vo));
3107 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3108 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3109 emit_insn (gen_altivec_vmrglw_direct (operands[0], vo, ve));
3114 (define_expand "vec_widen_smult_hi_v8hi"
3115 [(set (match_operand:V4SI 0 "register_operand" "=v")
3116 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3117 (match_operand:V8HI 2 "register_operand" "v")]
3122 rtx ve = gen_reg_rtx (V4SImode);
3123 rtx vo = gen_reg_rtx (V4SImode);
3125 if (BYTES_BIG_ENDIAN)
3127 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3128 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3129 emit_insn (gen_altivec_vmrghw_direct (operands[0], ve, vo));
3133 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3134 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3135 emit_insn (gen_altivec_vmrghw_direct (operands[0], vo, ve));
3140 (define_expand "vec_widen_smult_lo_v8hi"
3141 [(set (match_operand:V4SI 0 "register_operand" "=v")
3142 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3143 (match_operand:V8HI 2 "register_operand" "v")]
3148 rtx ve = gen_reg_rtx (V4SImode);
3149 rtx vo = gen_reg_rtx (V4SImode);
3151 if (BYTES_BIG_ENDIAN)
3153 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3154 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3155 emit_insn (gen_altivec_vmrglw_direct (operands[0], ve, vo));
3159 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3160 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3161 emit_insn (gen_altivec_vmrglw_direct (operands[0], vo, ve));
3166 (define_expand "vec_pack_trunc_<mode>"
3167 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
3168 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
3169 (match_operand:VP 2 "register_operand" "v")]
3170 UNSPEC_VPACK_UNS_UNS_MOD))]
3174 (define_expand "mulv16qi3"
3175 [(set (match_operand:V16QI 0 "register_operand" "=v")
3176 (mult:V16QI (match_operand:V16QI 1 "register_operand" "v")
3177 (match_operand:V16QI 2 "register_operand" "v")))]
3181 rtx even = gen_reg_rtx (V8HImode);
3182 rtx odd = gen_reg_rtx (V8HImode);
3183 rtx mask = gen_reg_rtx (V16QImode);
3184 rtvec v = rtvec_alloc (16);
3187 for (i = 0; i < 8; ++i) {
3188 RTVEC_ELT (v, 2 * i)
3189 = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 1 : 31 - 2 * i);
3190 RTVEC_ELT (v, 2 * i + 1)
3191 = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 17 : 15 - 2 * i);
3194 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3195 emit_insn (gen_altivec_vmulesb (even, operands[1], operands[2]));
3196 emit_insn (gen_altivec_vmulosb (odd, operands[1], operands[2]));
3197 emit_insn (gen_altivec_vperm_v8hiv16qi (operands[0], even, odd, mask));
3201 (define_expand "altivec_negv4sf2"
3202 [(use (match_operand:V4SF 0 "register_operand" ""))
3203 (use (match_operand:V4SF 1 "register_operand" ""))]
3209 /* Generate [-0.0, -0.0, -0.0, -0.0]. */
3210 neg0 = gen_reg_rtx (V4SImode);
3211 emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
3212 emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
3215 emit_insn (gen_xorv4sf3 (operands[0],
3216 gen_lowpart (V4SFmode, neg0), operands[1]));
3221 ;; Vector SIMD PEM v2.06c defines LVLX, LVLXL, LVRX, LVRXL,
3222 ;; STVLX, STVLXL, STVVRX, STVRXL are available only on Cell.
3223 (define_insn "altivec_lvlx"
3224 [(set (match_operand:V16QI 0 "register_operand" "=v")
3225 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3227 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3229 [(set_attr "type" "vecload")])
3231 (define_insn "altivec_lvlxl"
3232 [(set (match_operand:V16QI 0 "register_operand" "=v")
3233 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3235 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3237 [(set_attr "type" "vecload")])
3239 (define_insn "altivec_lvrx"
3240 [(set (match_operand:V16QI 0 "register_operand" "=v")
3241 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3243 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3245 [(set_attr "type" "vecload")])
3247 (define_insn "altivec_lvrxl"
3248 [(set (match_operand:V16QI 0 "register_operand" "=v")
3249 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3251 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3253 [(set_attr "type" "vecload")])
3255 (define_insn "altivec_stvlx"
3257 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3258 (match_operand:V16QI 1 "register_operand" "v"))
3259 (unspec [(const_int 0)] UNSPEC_STVLX)])]
3260 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3262 [(set_attr "type" "vecstore")])
3264 (define_insn "altivec_stvlxl"
3266 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3267 (match_operand:V16QI 1 "register_operand" "v"))
3268 (unspec [(const_int 0)] UNSPEC_STVLXL)])]
3269 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3271 [(set_attr "type" "vecstore")])
3273 (define_insn "altivec_stvrx"
3275 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3276 (match_operand:V16QI 1 "register_operand" "v"))
3277 (unspec [(const_int 0)] UNSPEC_STVRX)])]
3278 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3280 [(set_attr "type" "vecstore")])
3282 (define_insn "altivec_stvrxl"
3284 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3285 (match_operand:V16QI 1 "register_operand" "v"))
3286 (unspec [(const_int 0)] UNSPEC_STVRXL)])]
3287 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3289 [(set_attr "type" "vecstore")])
3291 (define_expand "vec_unpacks_float_hi_v8hi"
3292 [(set (match_operand:V4SF 0 "register_operand" "")
3293 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3294 UNSPEC_VUPKHS_V4SF))]
3298 rtx tmp = gen_reg_rtx (V4SImode);
3300 emit_insn (gen_vec_unpacks_hi_v8hi (tmp, operands[1]));
3301 emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
3305 (define_expand "vec_unpacks_float_lo_v8hi"
3306 [(set (match_operand:V4SF 0 "register_operand" "")
3307 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3308 UNSPEC_VUPKLS_V4SF))]
3312 rtx tmp = gen_reg_rtx (V4SImode);
3314 emit_insn (gen_vec_unpacks_lo_v8hi (tmp, operands[1]));
3315 emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
3319 (define_expand "vec_unpacku_float_hi_v8hi"
3320 [(set (match_operand:V4SF 0 "register_operand" "")
3321 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3322 UNSPEC_VUPKHU_V4SF))]
3326 rtx tmp = gen_reg_rtx (V4SImode);
3328 emit_insn (gen_vec_unpacku_hi_v8hi (tmp, operands[1]));
3329 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
3333 (define_expand "vec_unpacku_float_lo_v8hi"
3334 [(set (match_operand:V4SF 0 "register_operand" "")
3335 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3336 UNSPEC_VUPKLU_V4SF))]
3340 rtx tmp = gen_reg_rtx (V4SImode);
3342 emit_insn (gen_vec_unpacku_lo_v8hi (tmp, operands[1]));
3343 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
3348 ;; Power8 vector instructions encoded as Altivec instructions
3350 ;; Vector count leading zeros
3351 (define_insn "*p8v_clz<mode>2"
3352 [(set (match_operand:VI2 0 "register_operand" "=v")
3353 (clz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
3356 [(set_attr "length" "4")
3357 (set_attr "type" "vecsimple")])
3359 ;; Vector population count
3360 (define_insn "*p8v_popcount<mode>2"
3361 [(set (match_operand:VI2 0 "register_operand" "=v")
3362 (popcount:VI2 (match_operand:VI2 1 "register_operand" "v")))]
3365 [(set_attr "length" "4")
3366 (set_attr "type" "vecsimple")])
3368 ;; Vector Gather Bits by Bytes by Doubleword
3369 (define_insn "p8v_vgbbd"
3370 [(set (match_operand:V16QI 0 "register_operand" "=v")
3371 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")]
3375 [(set_attr "length" "4")
3376 (set_attr "type" "vecsimple")])
3379 ;; 128-bit binary integer arithmetic
3380 ;; We have a special container type (V1TImode) to allow operations using the
3381 ;; ISA 2.07 128-bit binary support to target the VMX/altivec registers without
3382 ;; having to worry about the register allocator deciding GPRs are better.
3384 (define_insn "altivec_vadduqm"
3385 [(set (match_operand:V1TI 0 "register_operand" "=v")
3386 (plus:V1TI (match_operand:V1TI 1 "register_operand" "v")
3387 (match_operand:V1TI 2 "register_operand" "v")))]
3390 [(set_attr "length" "4")
3391 (set_attr "type" "vecsimple")])
3393 (define_insn "altivec_vaddcuq"
3394 [(set (match_operand:V1TI 0 "register_operand" "=v")
3395 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3396 (match_operand:V1TI 2 "register_operand" "v")]
3400 [(set_attr "length" "4")
3401 (set_attr "type" "vecsimple")])
3403 (define_insn "altivec_vsubuqm"
3404 [(set (match_operand:V1TI 0 "register_operand" "=v")
3405 (minus:V1TI (match_operand:V1TI 1 "register_operand" "v")
3406 (match_operand:V1TI 2 "register_operand" "v")))]
3409 [(set_attr "length" "4")
3410 (set_attr "type" "vecsimple")])
3412 (define_insn "altivec_vsubcuq"
3413 [(set (match_operand:V1TI 0 "register_operand" "=v")
3414 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3415 (match_operand:V1TI 2 "register_operand" "v")]
3419 [(set_attr "length" "4")
3420 (set_attr "type" "vecsimple")])
3422 (define_insn "altivec_vaddeuqm"
3423 [(set (match_operand:V1TI 0 "register_operand" "=v")
3424 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3425 (match_operand:V1TI 2 "register_operand" "v")
3426 (match_operand:V1TI 3 "register_operand" "v")]
3429 "vaddeuqm %0,%1,%2,%3"
3430 [(set_attr "length" "4")
3431 (set_attr "type" "vecsimple")])
3433 (define_insn "altivec_vaddecuq"
3434 [(set (match_operand:V1TI 0 "register_operand" "=v")
3435 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3436 (match_operand:V1TI 2 "register_operand" "v")
3437 (match_operand:V1TI 3 "register_operand" "v")]
3440 "vaddecuq %0,%1,%2,%3"
3441 [(set_attr "length" "4")
3442 (set_attr "type" "vecsimple")])
3444 (define_insn "altivec_vsubeuqm"
3445 [(set (match_operand:V1TI 0 "register_operand" "=v")
3446 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3447 (match_operand:V1TI 2 "register_operand" "v")
3448 (match_operand:V1TI 3 "register_operand" "v")]
3451 "vsubeuqm %0,%1,%2,%3"
3452 [(set_attr "length" "4")
3453 (set_attr "type" "vecsimple")])
3455 (define_insn "altivec_vsubecuq"
3456 [(set (match_operand:V1TI 0 "register_operand" "=v")
3457 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3458 (match_operand:V1TI 2 "register_operand" "v")
3459 (match_operand:V1TI 3 "register_operand" "v")]
3462 "vsubecuq %0,%1,%2,%3"
3463 [(set_attr "length" "4")
3464 (set_attr "type" "vecsimple")])
3466 ;; We use V2DI as the output type to simplify converting the permute
3467 ;; bits into an integer
3468 (define_insn "altivec_vbpermq"
3469 [(set (match_operand:V2DI 0 "register_operand" "=v")
3470 (unspec:V2DI [(match_operand:V16QI 1 "register_operand" "v")
3471 (match_operand:V16QI 2 "register_operand" "v")]
3475 [(set_attr "length" "4")
3476 (set_attr "type" "vecsimple")])
3478 ;; Decimal Integer operations
3479 (define_int_iterator UNSPEC_BCD_ADD_SUB [UNSPEC_BCDADD UNSPEC_BCDSUB])
3481 (define_int_attr bcd_add_sub [(UNSPEC_BCDADD "add")
3482 (UNSPEC_BCDSUB "sub")])
3484 (define_code_iterator BCD_TEST [eq lt gt unordered])
3486 (define_insn "bcd<bcd_add_sub>"
3487 [(set (match_operand:V1TI 0 "register_operand" "")
3488 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "")
3489 (match_operand:V1TI 2 "register_operand" "")
3490 (match_operand:QI 3 "const_0_to_1_operand" "")]
3491 UNSPEC_BCD_ADD_SUB))
3492 (clobber (reg:CCFP 74))]
3494 "bcd<bcd_add_sub>. %0,%1,%2,%3"
3495 [(set_attr "length" "4")
3496 (set_attr "type" "vecsimple")])
3498 ;; Use a floating point type (V2DFmode) for the compare to set CR6 so that we
3499 ;; can use the unordered test for BCD nans and add/subtracts that overflow. An
3500 ;; UNORDERED test on an integer type (like V1TImode) is not defined. The type
3501 ;; probably should be one that can go in the VMX (Altivec) registers, so we
3502 ;; can't use DDmode or DFmode.
3503 (define_insn "*bcd<bcd_add_sub>_test"
3506 (unspec:V2DF [(match_operand:V1TI 1 "register_operand" "v")
3507 (match_operand:V1TI 2 "register_operand" "v")
3508 (match_operand:QI 3 "const_0_to_1_operand" "i")]
3510 (match_operand:V2DF 4 "zero_constant" "j")))
3511 (clobber (match_scratch:V1TI 0 "=v"))]
3513 "bcd<bcd_add_sub>. %0,%1,%2,%3"
3514 [(set_attr "length" "4")
3515 (set_attr "type" "vecsimple")])
3517 (define_insn "*bcd<bcd_add_sub>_test2"
3518 [(set (match_operand:V1TI 0 "register_operand" "=v")
3519 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3520 (match_operand:V1TI 2 "register_operand" "v")
3521 (match_operand:QI 3 "const_0_to_1_operand" "i")]
3522 UNSPEC_BCD_ADD_SUB))
3525 (unspec:V2DF [(match_dup 1)
3529 (match_operand:V2DF 4 "zero_constant" "j")))]
3531 "bcd<bcd_add_sub>. %0,%1,%2,%3"
3532 [(set_attr "length" "4")
3533 (set_attr "type" "vecsimple")])
3535 (define_expand "bcd<bcd_add_sub>_<code>"
3536 [(parallel [(set (reg:CCFP 74)
3538 (unspec:V2DF [(match_operand:V1TI 1 "register_operand" "")
3539 (match_operand:V1TI 2 "register_operand" "")
3540 (match_operand:QI 3 "const_0_to_1_operand" "")]
3543 (clobber (match_scratch:V1TI 5 ""))])
3544 (set (match_operand:SI 0 "register_operand" "")
3545 (BCD_TEST:SI (reg:CCFP 74)
3549 operands[4] = CONST0_RTX (V2DFmode);
3552 ;; Peephole2 pattern to combine a bcdadd/bcdsub that calculates the value and
3553 ;; the bcdadd/bcdsub that tests the value. The combiner won't work since
3554 ;; CR6 is a hard coded register. Unfortunately, all of the Altivec predicate
3555 ;; support is hard coded to use the fixed register CR6 instead of creating
3556 ;; a register class for CR6.
3559 [(parallel [(set (match_operand:V1TI 0 "register_operand" "")
3560 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "")
3561 (match_operand:V1TI 2 "register_operand" "")
3562 (match_operand:QI 3 "const_0_to_1_operand" "")]
3563 UNSPEC_BCD_ADD_SUB))
3564 (clobber (reg:CCFP 74))])
3565 (parallel [(set (reg:CCFP 74)
3567 (unspec:V2DF [(match_dup 1)
3571 (match_operand:V2DF 4 "zero_constant" "")))
3572 (clobber (match_operand:V1TI 5 "register_operand" ""))])]
3574 [(parallel [(set (match_dup 0)
3575 (unspec:V1TI [(match_dup 1)
3578 UNSPEC_BCD_ADD_SUB))
3581 (unspec:V2DF [(match_dup 1)