2 ;; Copyright (C) 2002-2014 Free Software Foundation, Inc.
3 ;; Contributed by Aldy Hernandez (aldy@quesejoda.com)
5 ;; This file is part of GCC.
7 ;; GCC is free software; you can redistribute it and/or modify it
8 ;; under the terms of the GNU General Public License as published
9 ;; by the Free Software Foundation; either version 3, or (at your
10 ;; option) any later version.
12 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
13 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 ;; License for more details.
17 ;; You should have received a copy of the GNU General Public License
18 ;; along with GCC; see the file COPYING3. If not see
19 ;; <http://www.gnu.org/licenses/>.
21 (define_c_enum "unspec"
45 UNSPEC_VPACK_SIGN_SIGN_SAT
46 UNSPEC_VPACK_SIGN_UNS_SAT
47 UNSPEC_VPACK_UNS_UNS_SAT
48 UNSPEC_VPACK_UNS_UNS_MOD
49 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT
71 UNSPEC_VUNPACK_HI_SIGN
72 UNSPEC_VUNPACK_LO_SIGN
73 UNSPEC_VUNPACK_HI_SIGN_DIRECT
74 UNSPEC_VUNPACK_LO_SIGN_DIRECT
138 UNSPEC_VSUMSWS_DIRECT
151 (define_c_enum "unspecv"
160 (define_mode_iterator VI [V4SI V8HI V16QI])
161 ;; Like VI, but add ISA 2.07 integer vector ops
162 (define_mode_iterator VI2 [V4SI V8HI V16QI V2DI])
163 ;; Short vec in modes
164 (define_mode_iterator VIshort [V8HI V16QI])
166 (define_mode_iterator VF [V4SF])
167 ;; Vec modes, pity mode iterators are not composable
168 (define_mode_iterator V [V4SI V8HI V16QI V4SF])
169 ;; Vec modes for move/logical/permute ops, include vector types for move not
170 ;; otherwise handled by altivec (v2df, v2di, ti)
171 (define_mode_iterator VM [V4SI V8HI V16QI V4SF V2DF V2DI V1TI TI])
173 ;; Like VM, except don't do TImode
174 (define_mode_iterator VM2 [V4SI V8HI V16QI V4SF V2DF V2DI V1TI])
176 (define_mode_attr VI_char [(V2DI "d") (V4SI "w") (V8HI "h") (V16QI "b")])
177 (define_mode_attr VI_scalar [(V2DI "DI") (V4SI "SI") (V8HI "HI") (V16QI "QI")])
178 (define_mode_attr VI_unit [(V16QI "VECTOR_UNIT_ALTIVEC_P (V16QImode)")
179 (V8HI "VECTOR_UNIT_ALTIVEC_P (V8HImode)")
180 (V4SI "VECTOR_UNIT_ALTIVEC_P (V4SImode)")
181 (V2DI "VECTOR_UNIT_P8_VECTOR_P (V2DImode)")
182 (V1TI "VECTOR_UNIT_ALTIVEC_P (V1TImode)")])
184 ;; Vector pack/unpack
185 (define_mode_iterator VP [V2DI V4SI V8HI])
186 (define_mode_attr VP_small [(V2DI "V4SI") (V4SI "V8HI") (V8HI "V16QI")])
187 (define_mode_attr VP_small_lc [(V2DI "v4si") (V4SI "v8hi") (V8HI "v16qi")])
188 (define_mode_attr VU_char [(V2DI "w") (V4SI "h") (V8HI "b")])
190 ;; Vector move instructions.
191 (define_insn "*altivec_mov<mode>"
192 [(set (match_operand:VM2 0 "nonimmediate_operand" "=Z,v,v,*Y,*r,*r,v,v")
193 (match_operand:VM2 1 "input_operand" "v,Z,v,r,Y,r,j,W"))]
194 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)
195 && (register_operand (operands[0], <MODE>mode)
196 || register_operand (operands[1], <MODE>mode))"
198 switch (which_alternative)
200 case 0: return "stvx %1,%y0";
201 case 1: return "lvx %0,%y1";
202 case 2: return "vor %0,%1,%1";
206 case 6: return "vxor %0,%0,%0";
207 case 7: return output_vec_const_move (operands);
208 default: gcc_unreachable ();
211 [(set_attr "type" "vecstore,vecload,vecsimple,store,load,*,vecsimple,*")])
213 ;; Unlike other altivec moves, allow the GPRs, since a normal use of TImode
214 ;; is for unions. However for plain data movement, slightly favor the vector
216 (define_insn "*altivec_movti"
217 [(set (match_operand:TI 0 "nonimmediate_operand" "=Z,v,v,?Y,?r,?r,v,v")
218 (match_operand:TI 1 "input_operand" "v,Z,v,r,Y,r,j,W"))]
219 "VECTOR_MEM_ALTIVEC_P (TImode)
220 && (register_operand (operands[0], TImode)
221 || register_operand (operands[1], TImode))"
223 switch (which_alternative)
225 case 0: return "stvx %1,%y0";
226 case 1: return "lvx %0,%y1";
227 case 2: return "vor %0,%1,%1";
231 case 6: return "vxor %0,%0,%0";
232 case 7: return output_vec_const_move (operands);
233 default: gcc_unreachable ();
236 [(set_attr "type" "vecstore,vecload,vecsimple,store,load,*,vecsimple,*")])
238 ;; Load up a vector with the most significant bit set by loading up -1 and
239 ;; doing a shift left
241 [(set (match_operand:VM 0 "altivec_register_operand" "")
242 (match_operand:VM 1 "easy_vector_constant_msb" ""))]
243 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
246 rtx dest = operands[0];
247 enum machine_mode mode = GET_MODE (operands[0]);
251 if (mode == V4SFmode)
254 dest = gen_lowpart (V4SImode, dest);
257 num_elements = GET_MODE_NUNITS (mode);
258 v = rtvec_alloc (num_elements);
259 for (i = 0; i < num_elements; i++)
260 RTVEC_ELT (v, i) = constm1_rtx;
262 emit_insn (gen_vec_initv4si (dest, gen_rtx_PARALLEL (mode, v)));
263 emit_insn (gen_rtx_SET (VOIDmode, dest, gen_rtx_ASHIFT (mode, dest, dest)));
268 [(set (match_operand:VM 0 "altivec_register_operand" "")
269 (match_operand:VM 1 "easy_vector_constant_add_self" ""))]
270 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
271 [(set (match_dup 0) (match_dup 3))
272 (set (match_dup 0) (match_dup 4))]
274 rtx dup = gen_easy_altivec_constant (operands[1]);
276 enum machine_mode op_mode = <MODE>mode;
278 /* Divide the operand of the resulting VEC_DUPLICATE, and use
279 simplify_rtx to make a CONST_VECTOR. */
280 XEXP (dup, 0) = simplify_const_binary_operation (ASHIFTRT, QImode,
281 XEXP (dup, 0), const1_rtx);
282 const_vec = simplify_rtx (dup);
284 if (op_mode == V4SFmode)
287 operands[0] = gen_lowpart (op_mode, operands[0]);
289 if (GET_MODE (const_vec) == op_mode)
290 operands[3] = const_vec;
292 operands[3] = gen_lowpart (op_mode, const_vec);
293 operands[4] = gen_rtx_PLUS (op_mode, operands[0], operands[0]);
296 (define_insn "get_vrsave_internal"
297 [(set (match_operand:SI 0 "register_operand" "=r")
298 (unspec:SI [(reg:SI 109)] UNSPEC_GET_VRSAVE))]
302 return "mfspr %0,256";
304 return "mfvrsave %0";
306 [(set_attr "type" "*")])
308 (define_insn "*set_vrsave_internal"
309 [(match_parallel 0 "vrsave_operation"
311 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "r")
312 (reg:SI 109)] UNSPECV_SET_VRSAVE))])]
316 return "mtspr 256,%1";
318 return "mtvrsave %1";
320 [(set_attr "type" "*")])
322 (define_insn "*save_world"
323 [(match_parallel 0 "save_world_operation"
324 [(clobber (reg:SI 65))
325 (use (match_operand:SI 1 "call_operand" "s"))])]
326 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
328 [(set_attr "type" "branch")
329 (set_attr "length" "4")])
331 (define_insn "*restore_world"
332 [(match_parallel 0 "restore_world_operation"
335 (use (match_operand:SI 1 "call_operand" "s"))
336 (clobber (match_operand:SI 2 "gpc_reg_operand" "=r"))])]
337 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
340 ;; The save_vregs and restore_vregs patterns don't use memory_operand
341 ;; because (plus (reg) (const_int)) is not a valid vector address.
342 ;; This way is more compact than describing exactly what happens in
343 ;; the out-of-line functions, ie. loading the constant into r11/r12
344 ;; then using indexed addressing, and requires less editing of rtl
345 ;; to describe the operation to dwarf2out_frame_debug_expr.
346 (define_insn "*save_vregs_<mode>_r11"
347 [(match_parallel 0 "any_parallel_operand"
348 [(clobber (reg:P 65))
349 (use (match_operand:P 1 "symbol_ref_operand" "s"))
352 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
353 (match_operand:P 3 "short_cint_operand" "I")))
354 (match_operand:V4SI 4 "gpc_reg_operand" "v"))])]
357 [(set_attr "type" "branch")
358 (set_attr "length" "4")])
360 (define_insn "*save_vregs_<mode>_r12"
361 [(match_parallel 0 "any_parallel_operand"
362 [(clobber (reg:P 65))
363 (use (match_operand:P 1 "symbol_ref_operand" "s"))
366 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
367 (match_operand:P 3 "short_cint_operand" "I")))
368 (match_operand:V4SI 4 "gpc_reg_operand" "v"))])]
371 [(set_attr "type" "branch")
372 (set_attr "length" "4")])
374 (define_insn "*restore_vregs_<mode>_r11"
375 [(match_parallel 0 "any_parallel_operand"
376 [(clobber (reg:P 65))
377 (use (match_operand:P 1 "symbol_ref_operand" "s"))
380 (set (match_operand:V4SI 2 "gpc_reg_operand" "=v")
381 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
382 (match_operand:P 4 "short_cint_operand" "I"))))])]
385 [(set_attr "type" "branch")
386 (set_attr "length" "4")])
388 (define_insn "*restore_vregs_<mode>_r12"
389 [(match_parallel 0 "any_parallel_operand"
390 [(clobber (reg:P 65))
391 (use (match_operand:P 1 "symbol_ref_operand" "s"))
394 (set (match_operand:V4SI 2 "gpc_reg_operand" "=v")
395 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
396 (match_operand:P 4 "short_cint_operand" "I"))))])]
399 [(set_attr "type" "branch")
400 (set_attr "length" "4")])
402 ;; Simple binary operations.
405 (define_insn "add<mode>3"
406 [(set (match_operand:VI2 0 "register_operand" "=v")
407 (plus:VI2 (match_operand:VI2 1 "register_operand" "v")
408 (match_operand:VI2 2 "register_operand" "v")))]
410 "vaddu<VI_char>m %0,%1,%2"
411 [(set_attr "type" "vecsimple")])
413 (define_insn "*altivec_addv4sf3"
414 [(set (match_operand:V4SF 0 "register_operand" "=v")
415 (plus:V4SF (match_operand:V4SF 1 "register_operand" "v")
416 (match_operand:V4SF 2 "register_operand" "v")))]
417 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
419 [(set_attr "type" "vecfloat")])
421 (define_insn "altivec_vaddcuw"
422 [(set (match_operand:V4SI 0 "register_operand" "=v")
423 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
424 (match_operand:V4SI 2 "register_operand" "v")]
426 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
428 [(set_attr "type" "vecsimple")])
430 (define_insn "altivec_vaddu<VI_char>s"
431 [(set (match_operand:VI 0 "register_operand" "=v")
432 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
433 (match_operand:VI 2 "register_operand" "v")]
435 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
437 "vaddu<VI_char>s %0,%1,%2"
438 [(set_attr "type" "vecsimple")])
440 (define_insn "altivec_vadds<VI_char>s"
441 [(set (match_operand:VI 0 "register_operand" "=v")
442 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
443 (match_operand:VI 2 "register_operand" "v")]
445 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
446 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
447 "vadds<VI_char>s %0,%1,%2"
448 [(set_attr "type" "vecsimple")])
451 (define_insn "sub<mode>3"
452 [(set (match_operand:VI2 0 "register_operand" "=v")
453 (minus:VI2 (match_operand:VI2 1 "register_operand" "v")
454 (match_operand:VI2 2 "register_operand" "v")))]
456 "vsubu<VI_char>m %0,%1,%2"
457 [(set_attr "type" "vecsimple")])
459 (define_insn "*altivec_subv4sf3"
460 [(set (match_operand:V4SF 0 "register_operand" "=v")
461 (minus:V4SF (match_operand:V4SF 1 "register_operand" "v")
462 (match_operand:V4SF 2 "register_operand" "v")))]
463 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
465 [(set_attr "type" "vecfloat")])
467 (define_insn "altivec_vsubcuw"
468 [(set (match_operand:V4SI 0 "register_operand" "=v")
469 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
470 (match_operand:V4SI 2 "register_operand" "v")]
472 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
474 [(set_attr "type" "vecsimple")])
476 (define_insn "altivec_vsubu<VI_char>s"
477 [(set (match_operand:VI 0 "register_operand" "=v")
478 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
479 (match_operand:VI 2 "register_operand" "v")]
481 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
482 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
483 "vsubu<VI_char>s %0,%1,%2"
484 [(set_attr "type" "vecsimple")])
486 (define_insn "altivec_vsubs<VI_char>s"
487 [(set (match_operand:VI 0 "register_operand" "=v")
488 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
489 (match_operand:VI 2 "register_operand" "v")]
491 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
492 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
493 "vsubs<VI_char>s %0,%1,%2"
494 [(set_attr "type" "vecsimple")])
497 (define_insn "altivec_vavgu<VI_char>"
498 [(set (match_operand:VI 0 "register_operand" "=v")
499 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
500 (match_operand:VI 2 "register_operand" "v")]
503 "vavgu<VI_char> %0,%1,%2"
504 [(set_attr "type" "vecsimple")])
506 (define_insn "altivec_vavgs<VI_char>"
507 [(set (match_operand:VI 0 "register_operand" "=v")
508 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
509 (match_operand:VI 2 "register_operand" "v")]
511 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
512 "vavgs<VI_char> %0,%1,%2"
513 [(set_attr "type" "vecsimple")])
515 (define_insn "altivec_vcmpbfp"
516 [(set (match_operand:V4SI 0 "register_operand" "=v")
517 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
518 (match_operand:V4SF 2 "register_operand" "v")]
520 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
522 [(set_attr "type" "veccmp")])
524 (define_insn "*altivec_eq<mode>"
525 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
526 (eq:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
527 (match_operand:VI2 2 "altivec_register_operand" "v")))]
529 "vcmpequ<VI_char> %0,%1,%2"
530 [(set_attr "type" "veccmp")])
532 (define_insn "*altivec_gt<mode>"
533 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
534 (gt:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
535 (match_operand:VI2 2 "altivec_register_operand" "v")))]
537 "vcmpgts<VI_char> %0,%1,%2"
538 [(set_attr "type" "veccmp")])
540 (define_insn "*altivec_gtu<mode>"
541 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
542 (gtu:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
543 (match_operand:VI2 2 "altivec_register_operand" "v")))]
545 "vcmpgtu<VI_char> %0,%1,%2"
546 [(set_attr "type" "veccmp")])
548 (define_insn "*altivec_eqv4sf"
549 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
550 (eq:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
551 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
552 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
554 [(set_attr "type" "veccmp")])
556 (define_insn "*altivec_gtv4sf"
557 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
558 (gt:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
559 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
560 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
562 [(set_attr "type" "veccmp")])
564 (define_insn "*altivec_gev4sf"
565 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
566 (ge:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
567 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
568 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
570 [(set_attr "type" "veccmp")])
572 (define_insn "*altivec_vsel<mode>"
573 [(set (match_operand:VM 0 "altivec_register_operand" "=v")
575 (ne:CC (match_operand:VM 1 "altivec_register_operand" "v")
576 (match_operand:VM 4 "zero_constant" ""))
577 (match_operand:VM 2 "altivec_register_operand" "v")
578 (match_operand:VM 3 "altivec_register_operand" "v")))]
579 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
581 [(set_attr "type" "vecperm")])
583 (define_insn "*altivec_vsel<mode>_uns"
584 [(set (match_operand:VM 0 "altivec_register_operand" "=v")
586 (ne:CCUNS (match_operand:VM 1 "altivec_register_operand" "v")
587 (match_operand:VM 4 "zero_constant" ""))
588 (match_operand:VM 2 "altivec_register_operand" "v")
589 (match_operand:VM 3 "altivec_register_operand" "v")))]
590 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
592 [(set_attr "type" "vecperm")])
594 ;; Fused multiply add.
596 (define_insn "*altivec_fmav4sf4"
597 [(set (match_operand:V4SF 0 "register_operand" "=v")
598 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
599 (match_operand:V4SF 2 "register_operand" "v")
600 (match_operand:V4SF 3 "register_operand" "v")))]
601 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
602 "vmaddfp %0,%1,%2,%3"
603 [(set_attr "type" "vecfloat")])
605 ;; We do multiply as a fused multiply-add with an add of a -0.0 vector.
607 (define_expand "altivec_mulv4sf3"
608 [(set (match_operand:V4SF 0 "register_operand" "")
609 (fma:V4SF (match_operand:V4SF 1 "register_operand" "")
610 (match_operand:V4SF 2 "register_operand" "")
612 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
616 /* Generate [-0.0, -0.0, -0.0, -0.0]. */
617 neg0 = gen_reg_rtx (V4SImode);
618 emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
619 emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
621 operands[3] = gen_lowpart (V4SFmode, neg0);
624 ;; 32-bit integer multiplication
625 ;; A_high = Operand_0 & 0xFFFF0000 >> 16
626 ;; A_low = Operand_0 & 0xFFFF
627 ;; B_high = Operand_1 & 0xFFFF0000 >> 16
628 ;; B_low = Operand_1 & 0xFFFF
629 ;; result = A_low * B_low + (A_high * B_low + B_high * A_low) << 16
631 ;; (define_insn "mulv4si3"
632 ;; [(set (match_operand:V4SI 0 "register_operand" "=v")
633 ;; (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
634 ;; (match_operand:V4SI 2 "register_operand" "v")))]
635 (define_expand "mulv4si3"
636 [(use (match_operand:V4SI 0 "register_operand" ""))
637 (use (match_operand:V4SI 1 "register_operand" ""))
638 (use (match_operand:V4SI 2 "register_operand" ""))]
651 zero = gen_reg_rtx (V4SImode);
652 emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
654 sixteen = gen_reg_rtx (V4SImode);
655 emit_insn (gen_altivec_vspltisw (sixteen, gen_rtx_CONST_INT (V4SImode, -16)));
657 swap = gen_reg_rtx (V4SImode);
658 emit_insn (gen_vrotlv4si3 (swap, operands[2], sixteen));
660 one = gen_reg_rtx (V8HImode);
661 convert_move (one, operands[1], 0);
663 two = gen_reg_rtx (V8HImode);
664 convert_move (two, operands[2], 0);
666 small_swap = gen_reg_rtx (V8HImode);
667 convert_move (small_swap, swap, 0);
669 low_product = gen_reg_rtx (V4SImode);
670 emit_insn (gen_altivec_vmulouh (low_product, one, two));
672 high_product = gen_reg_rtx (V4SImode);
673 emit_insn (gen_altivec_vmsumuhm (high_product, one, small_swap, zero));
675 emit_insn (gen_vashlv4si3 (high_product, high_product, sixteen));
677 emit_insn (gen_addv4si3 (operands[0], high_product, low_product));
682 (define_expand "mulv8hi3"
683 [(use (match_operand:V8HI 0 "register_operand" ""))
684 (use (match_operand:V8HI 1 "register_operand" ""))
685 (use (match_operand:V8HI 2 "register_operand" ""))]
689 rtx odd = gen_reg_rtx (V4SImode);
690 rtx even = gen_reg_rtx (V4SImode);
691 rtx high = gen_reg_rtx (V4SImode);
692 rtx low = gen_reg_rtx (V4SImode);
694 if (BYTES_BIG_ENDIAN)
696 emit_insn (gen_altivec_vmulesh (even, operands[1], operands[2]));
697 emit_insn (gen_altivec_vmulosh (odd, operands[1], operands[2]));
698 emit_insn (gen_altivec_vmrghw_direct (high, even, odd));
699 emit_insn (gen_altivec_vmrglw_direct (low, even, odd));
700 emit_insn (gen_altivec_vpkuwum_direct (operands[0], high, low));
704 emit_insn (gen_altivec_vmulosh (even, operands[1], operands[2]));
705 emit_insn (gen_altivec_vmulesh (odd, operands[1], operands[2]));
706 emit_insn (gen_altivec_vmrghw_direct (high, odd, even));
707 emit_insn (gen_altivec_vmrglw_direct (low, odd, even));
708 emit_insn (gen_altivec_vpkuwum_direct (operands[0], low, high));
714 ;; Fused multiply subtract
715 (define_insn "*altivec_vnmsubfp"
716 [(set (match_operand:V4SF 0 "register_operand" "=v")
718 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
719 (match_operand:V4SF 2 "register_operand" "v")
721 (match_operand:V4SF 3 "register_operand" "v")))))]
722 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
723 "vnmsubfp %0,%1,%2,%3"
724 [(set_attr "type" "vecfloat")])
726 (define_insn "altivec_vmsumu<VI_char>m"
727 [(set (match_operand:V4SI 0 "register_operand" "=v")
728 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
729 (match_operand:VIshort 2 "register_operand" "v")
730 (match_operand:V4SI 3 "register_operand" "v")]
733 "vmsumu<VI_char>m %0,%1,%2,%3"
734 [(set_attr "type" "veccomplex")])
736 (define_insn "altivec_vmsumm<VI_char>m"
737 [(set (match_operand:V4SI 0 "register_operand" "=v")
738 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
739 (match_operand:VIshort 2 "register_operand" "v")
740 (match_operand:V4SI 3 "register_operand" "v")]
743 "vmsumm<VI_char>m %0,%1,%2,%3"
744 [(set_attr "type" "veccomplex")])
746 (define_insn "altivec_vmsumshm"
747 [(set (match_operand:V4SI 0 "register_operand" "=v")
748 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
749 (match_operand:V8HI 2 "register_operand" "v")
750 (match_operand:V4SI 3 "register_operand" "v")]
753 "vmsumshm %0,%1,%2,%3"
754 [(set_attr "type" "veccomplex")])
756 (define_insn "altivec_vmsumuhs"
757 [(set (match_operand:V4SI 0 "register_operand" "=v")
758 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
759 (match_operand:V8HI 2 "register_operand" "v")
760 (match_operand:V4SI 3 "register_operand" "v")]
762 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
764 "vmsumuhs %0,%1,%2,%3"
765 [(set_attr "type" "veccomplex")])
767 (define_insn "altivec_vmsumshs"
768 [(set (match_operand:V4SI 0 "register_operand" "=v")
769 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
770 (match_operand:V8HI 2 "register_operand" "v")
771 (match_operand:V4SI 3 "register_operand" "v")]
773 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
775 "vmsumshs %0,%1,%2,%3"
776 [(set_attr "type" "veccomplex")])
780 (define_insn "umax<mode>3"
781 [(set (match_operand:VI2 0 "register_operand" "=v")
782 (umax:VI2 (match_operand:VI2 1 "register_operand" "v")
783 (match_operand:VI2 2 "register_operand" "v")))]
785 "vmaxu<VI_char> %0,%1,%2"
786 [(set_attr "type" "vecsimple")])
788 (define_insn "smax<mode>3"
789 [(set (match_operand:VI2 0 "register_operand" "=v")
790 (smax:VI2 (match_operand:VI2 1 "register_operand" "v")
791 (match_operand:VI2 2 "register_operand" "v")))]
793 "vmaxs<VI_char> %0,%1,%2"
794 [(set_attr "type" "vecsimple")])
796 (define_insn "*altivec_smaxv4sf3"
797 [(set (match_operand:V4SF 0 "register_operand" "=v")
798 (smax:V4SF (match_operand:V4SF 1 "register_operand" "v")
799 (match_operand:V4SF 2 "register_operand" "v")))]
800 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
802 [(set_attr "type" "veccmp")])
804 (define_insn "umin<mode>3"
805 [(set (match_operand:VI2 0 "register_operand" "=v")
806 (umin:VI2 (match_operand:VI2 1 "register_operand" "v")
807 (match_operand:VI2 2 "register_operand" "v")))]
809 "vminu<VI_char> %0,%1,%2"
810 [(set_attr "type" "vecsimple")])
812 (define_insn "smin<mode>3"
813 [(set (match_operand:VI2 0 "register_operand" "=v")
814 (smin:VI2 (match_operand:VI2 1 "register_operand" "v")
815 (match_operand:VI2 2 "register_operand" "v")))]
817 "vmins<VI_char> %0,%1,%2"
818 [(set_attr "type" "vecsimple")])
820 (define_insn "*altivec_sminv4sf3"
821 [(set (match_operand:V4SF 0 "register_operand" "=v")
822 (smin:V4SF (match_operand:V4SF 1 "register_operand" "v")
823 (match_operand:V4SF 2 "register_operand" "v")))]
824 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
826 [(set_attr "type" "veccmp")])
828 (define_insn "altivec_vmhaddshs"
829 [(set (match_operand:V8HI 0 "register_operand" "=v")
830 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
831 (match_operand:V8HI 2 "register_operand" "v")
832 (match_operand:V8HI 3 "register_operand" "v")]
834 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
836 "vmhaddshs %0,%1,%2,%3"
837 [(set_attr "type" "veccomplex")])
839 (define_insn "altivec_vmhraddshs"
840 [(set (match_operand:V8HI 0 "register_operand" "=v")
841 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
842 (match_operand:V8HI 2 "register_operand" "v")
843 (match_operand:V8HI 3 "register_operand" "v")]
845 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
847 "vmhraddshs %0,%1,%2,%3"
848 [(set_attr "type" "veccomplex")])
850 (define_insn "altivec_vmladduhm"
851 [(set (match_operand:V8HI 0 "register_operand" "=v")
852 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
853 (match_operand:V8HI 2 "register_operand" "v")
854 (match_operand:V8HI 3 "register_operand" "v")]
857 "vmladduhm %0,%1,%2,%3"
858 [(set_attr "type" "veccomplex")])
860 (define_expand "altivec_vmrghb"
861 [(use (match_operand:V16QI 0 "register_operand" ""))
862 (use (match_operand:V16QI 1 "register_operand" ""))
863 (use (match_operand:V16QI 2 "register_operand" ""))]
869 /* Special handling for LE with -maltivec=be. */
870 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
872 v = gen_rtvec (16, GEN_INT (8), GEN_INT (24), GEN_INT (9), GEN_INT (25),
873 GEN_INT (10), GEN_INT (26), GEN_INT (11), GEN_INT (27),
874 GEN_INT (12), GEN_INT (28), GEN_INT (13), GEN_INT (29),
875 GEN_INT (14), GEN_INT (30), GEN_INT (15), GEN_INT (31));
876 x = gen_rtx_VEC_CONCAT (V32QImode, operands[2], operands[1]);
880 v = gen_rtvec (16, GEN_INT (0), GEN_INT (16), GEN_INT (1), GEN_INT (17),
881 GEN_INT (2), GEN_INT (18), GEN_INT (3), GEN_INT (19),
882 GEN_INT (4), GEN_INT (20), GEN_INT (5), GEN_INT (21),
883 GEN_INT (6), GEN_INT (22), GEN_INT (7), GEN_INT (23));
884 x = gen_rtx_VEC_CONCAT (V32QImode, operands[1], operands[2]);
887 x = gen_rtx_VEC_SELECT (V16QImode, x, gen_rtx_PARALLEL (VOIDmode, v));
888 emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
892 (define_insn "*altivec_vmrghb_internal"
893 [(set (match_operand:V16QI 0 "register_operand" "=v")
896 (match_operand:V16QI 1 "register_operand" "v")
897 (match_operand:V16QI 2 "register_operand" "v"))
898 (parallel [(const_int 0) (const_int 16)
899 (const_int 1) (const_int 17)
900 (const_int 2) (const_int 18)
901 (const_int 3) (const_int 19)
902 (const_int 4) (const_int 20)
903 (const_int 5) (const_int 21)
904 (const_int 6) (const_int 22)
905 (const_int 7) (const_int 23)])))]
908 if (BYTES_BIG_ENDIAN)
909 return "vmrghb %0,%1,%2";
911 return "vmrglb %0,%2,%1";
913 [(set_attr "type" "vecperm")])
915 (define_insn "altivec_vmrghb_direct"
916 [(set (match_operand:V16QI 0 "register_operand" "=v")
917 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
918 (match_operand:V16QI 2 "register_operand" "v")]
919 UNSPEC_VMRGH_DIRECT))]
922 [(set_attr "type" "vecperm")])
924 (define_expand "altivec_vmrghh"
925 [(use (match_operand:V8HI 0 "register_operand" ""))
926 (use (match_operand:V8HI 1 "register_operand" ""))
927 (use (match_operand:V8HI 2 "register_operand" ""))]
933 /* Special handling for LE with -maltivec=be. */
934 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
936 v = gen_rtvec (8, GEN_INT (4), GEN_INT (12), GEN_INT (5), GEN_INT (13),
937 GEN_INT (6), GEN_INT (14), GEN_INT (7), GEN_INT (15));
938 x = gen_rtx_VEC_CONCAT (V16HImode, operands[2], operands[1]);
942 v = gen_rtvec (8, GEN_INT (0), GEN_INT (8), GEN_INT (1), GEN_INT (9),
943 GEN_INT (2), GEN_INT (10), GEN_INT (3), GEN_INT (11));
944 x = gen_rtx_VEC_CONCAT (V16HImode, operands[1], operands[2]);
947 x = gen_rtx_VEC_SELECT (V8HImode, x, gen_rtx_PARALLEL (VOIDmode, v));
948 emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
952 (define_insn "*altivec_vmrghh_internal"
953 [(set (match_operand:V8HI 0 "register_operand" "=v")
956 (match_operand:V8HI 1 "register_operand" "v")
957 (match_operand:V8HI 2 "register_operand" "v"))
958 (parallel [(const_int 0) (const_int 8)
959 (const_int 1) (const_int 9)
960 (const_int 2) (const_int 10)
961 (const_int 3) (const_int 11)])))]
964 if (BYTES_BIG_ENDIAN)
965 return "vmrghh %0,%1,%2";
967 return "vmrglh %0,%2,%1";
969 [(set_attr "type" "vecperm")])
971 (define_insn "altivec_vmrghh_direct"
972 [(set (match_operand:V8HI 0 "register_operand" "=v")
973 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
974 (match_operand:V8HI 2 "register_operand" "v")]
975 UNSPEC_VMRGH_DIRECT))]
978 [(set_attr "type" "vecperm")])
980 (define_expand "altivec_vmrghw"
981 [(use (match_operand:V4SI 0 "register_operand" ""))
982 (use (match_operand:V4SI 1 "register_operand" ""))
983 (use (match_operand:V4SI 2 "register_operand" ""))]
984 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
989 /* Special handling for LE with -maltivec=be. */
990 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
992 v = gen_rtvec (4, GEN_INT (2), GEN_INT (6), GEN_INT (3), GEN_INT (7));
993 x = gen_rtx_VEC_CONCAT (V8SImode, operands[2], operands[1]);
997 v = gen_rtvec (4, GEN_INT (0), GEN_INT (4), GEN_INT (1), GEN_INT (5));
998 x = gen_rtx_VEC_CONCAT (V8SImode, operands[1], operands[2]);
1001 x = gen_rtx_VEC_SELECT (V4SImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1002 emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
1006 (define_insn "*altivec_vmrghw_internal"
1007 [(set (match_operand:V4SI 0 "register_operand" "=v")
1010 (match_operand:V4SI 1 "register_operand" "v")
1011 (match_operand:V4SI 2 "register_operand" "v"))
1012 (parallel [(const_int 0) (const_int 4)
1013 (const_int 1) (const_int 5)])))]
1014 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1016 if (BYTES_BIG_ENDIAN)
1017 return "vmrghw %0,%1,%2";
1019 return "vmrglw %0,%2,%1";
1021 [(set_attr "type" "vecperm")])
1023 (define_insn "altivec_vmrghw_direct"
1024 [(set (match_operand:V4SI 0 "register_operand" "=v")
1025 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1026 (match_operand:V4SI 2 "register_operand" "v")]
1027 UNSPEC_VMRGH_DIRECT))]
1030 [(set_attr "type" "vecperm")])
1032 (define_insn "*altivec_vmrghsf"
1033 [(set (match_operand:V4SF 0 "register_operand" "=v")
1036 (match_operand:V4SF 1 "register_operand" "v")
1037 (match_operand:V4SF 2 "register_operand" "v"))
1038 (parallel [(const_int 0) (const_int 4)
1039 (const_int 1) (const_int 5)])))]
1040 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1042 if (BYTES_BIG_ENDIAN)
1043 return "vmrghw %0,%1,%2";
1045 return "vmrglw %0,%2,%1";
1047 [(set_attr "type" "vecperm")])
1049 (define_expand "altivec_vmrglb"
1050 [(use (match_operand:V16QI 0 "register_operand" ""))
1051 (use (match_operand:V16QI 1 "register_operand" ""))
1052 (use (match_operand:V16QI 2 "register_operand" ""))]
1058 /* Special handling for LE with -maltivec=be. */
1059 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1061 v = gen_rtvec (16, GEN_INT (0), GEN_INT (16), GEN_INT (1), GEN_INT (17),
1062 GEN_INT (2), GEN_INT (18), GEN_INT (3), GEN_INT (19),
1063 GEN_INT (4), GEN_INT (20), GEN_INT (5), GEN_INT (21),
1064 GEN_INT (6), GEN_INT (22), GEN_INT (7), GEN_INT (23));
1065 x = gen_rtx_VEC_CONCAT (V32QImode, operands[2], operands[1]);
1069 v = gen_rtvec (16, GEN_INT (8), GEN_INT (24), GEN_INT (9), GEN_INT (25),
1070 GEN_INT (10), GEN_INT (26), GEN_INT (11), GEN_INT (27),
1071 GEN_INT (12), GEN_INT (28), GEN_INT (13), GEN_INT (29),
1072 GEN_INT (14), GEN_INT (30), GEN_INT (15), GEN_INT (31));
1073 x = gen_rtx_VEC_CONCAT (V32QImode, operands[1], operands[2]);
1076 x = gen_rtx_VEC_SELECT (V16QImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1077 emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
1081 (define_insn "*altivec_vmrglb_internal"
1082 [(set (match_operand:V16QI 0 "register_operand" "=v")
1085 (match_operand:V16QI 1 "register_operand" "v")
1086 (match_operand:V16QI 2 "register_operand" "v"))
1087 (parallel [(const_int 8) (const_int 24)
1088 (const_int 9) (const_int 25)
1089 (const_int 10) (const_int 26)
1090 (const_int 11) (const_int 27)
1091 (const_int 12) (const_int 28)
1092 (const_int 13) (const_int 29)
1093 (const_int 14) (const_int 30)
1094 (const_int 15) (const_int 31)])))]
1097 if (BYTES_BIG_ENDIAN)
1098 return "vmrglb %0,%1,%2";
1100 return "vmrghb %0,%2,%1";
1102 [(set_attr "type" "vecperm")])
1104 (define_insn "altivec_vmrglb_direct"
1105 [(set (match_operand:V16QI 0 "register_operand" "=v")
1106 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1107 (match_operand:V16QI 2 "register_operand" "v")]
1108 UNSPEC_VMRGL_DIRECT))]
1111 [(set_attr "type" "vecperm")])
1113 (define_expand "altivec_vmrglh"
1114 [(use (match_operand:V8HI 0 "register_operand" ""))
1115 (use (match_operand:V8HI 1 "register_operand" ""))
1116 (use (match_operand:V8HI 2 "register_operand" ""))]
1122 /* Special handling for LE with -maltivec=be. */
1123 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1125 v = gen_rtvec (8, GEN_INT (0), GEN_INT (8), GEN_INT (1), GEN_INT (9),
1126 GEN_INT (2), GEN_INT (10), GEN_INT (3), GEN_INT (11));
1127 x = gen_rtx_VEC_CONCAT (V16HImode, operands[2], operands[1]);
1131 v = gen_rtvec (8, GEN_INT (4), GEN_INT (12), GEN_INT (5), GEN_INT (13),
1132 GEN_INT (6), GEN_INT (14), GEN_INT (7), GEN_INT (15));
1133 x = gen_rtx_VEC_CONCAT (V16HImode, operands[1], operands[2]);
1136 x = gen_rtx_VEC_SELECT (V8HImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1137 emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
1141 (define_insn "*altivec_vmrglh_internal"
1142 [(set (match_operand:V8HI 0 "register_operand" "=v")
1145 (match_operand:V8HI 1 "register_operand" "v")
1146 (match_operand:V8HI 2 "register_operand" "v"))
1147 (parallel [(const_int 4) (const_int 12)
1148 (const_int 5) (const_int 13)
1149 (const_int 6) (const_int 14)
1150 (const_int 7) (const_int 15)])))]
1153 if (BYTES_BIG_ENDIAN)
1154 return "vmrglh %0,%1,%2";
1156 return "vmrghh %0,%2,%1";
1158 [(set_attr "type" "vecperm")])
1160 (define_insn "altivec_vmrglh_direct"
1161 [(set (match_operand:V8HI 0 "register_operand" "=v")
1162 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1163 (match_operand:V8HI 2 "register_operand" "v")]
1164 UNSPEC_VMRGL_DIRECT))]
1167 [(set_attr "type" "vecperm")])
1169 (define_expand "altivec_vmrglw"
1170 [(use (match_operand:V4SI 0 "register_operand" ""))
1171 (use (match_operand:V4SI 1 "register_operand" ""))
1172 (use (match_operand:V4SI 2 "register_operand" ""))]
1173 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1178 /* Special handling for LE with -maltivec=be. */
1179 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1181 v = gen_rtvec (4, GEN_INT (0), GEN_INT (4), GEN_INT (1), GEN_INT (5));
1182 x = gen_rtx_VEC_CONCAT (V8SImode, operands[2], operands[1]);
1186 v = gen_rtvec (4, GEN_INT (2), GEN_INT (6), GEN_INT (3), GEN_INT (7));
1187 x = gen_rtx_VEC_CONCAT (V8SImode, operands[1], operands[2]);
1190 x = gen_rtx_VEC_SELECT (V4SImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1191 emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
1195 (define_insn "*altivec_vmrglw_internal"
1196 [(set (match_operand:V4SI 0 "register_operand" "=v")
1199 (match_operand:V4SI 1 "register_operand" "v")
1200 (match_operand:V4SI 2 "register_operand" "v"))
1201 (parallel [(const_int 2) (const_int 6)
1202 (const_int 3) (const_int 7)])))]
1203 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1205 if (BYTES_BIG_ENDIAN)
1206 return "vmrglw %0,%1,%2";
1208 return "vmrghw %0,%2,%1";
1210 [(set_attr "type" "vecperm")])
1212 (define_insn "altivec_vmrglw_direct"
1213 [(set (match_operand:V4SI 0 "register_operand" "=v")
1214 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1215 (match_operand:V4SI 2 "register_operand" "v")]
1216 UNSPEC_VMRGL_DIRECT))]
1219 [(set_attr "type" "vecperm")])
1221 (define_insn "*altivec_vmrglsf"
1222 [(set (match_operand:V4SF 0 "register_operand" "=v")
1225 (match_operand:V4SF 1 "register_operand" "v")
1226 (match_operand:V4SF 2 "register_operand" "v"))
1227 (parallel [(const_int 2) (const_int 6)
1228 (const_int 3) (const_int 7)])))]
1229 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1231 if (BYTES_BIG_ENDIAN)
1232 return "vmrglw %0,%1,%2";
1234 return "vmrghw %0,%2,%1";
1236 [(set_attr "type" "vecperm")])
1238 ;; Power8 vector merge even/odd
1239 (define_insn "p8_vmrgew"
1240 [(set (match_operand:V4SI 0 "register_operand" "=v")
1243 (match_operand:V4SI 1 "register_operand" "v")
1244 (match_operand:V4SI 2 "register_operand" "v"))
1245 (parallel [(const_int 0) (const_int 4)
1246 (const_int 2) (const_int 6)])))]
1249 if (BYTES_BIG_ENDIAN)
1250 return "vmrgew %0,%1,%2";
1252 return "vmrgow %0,%2,%1";
1254 [(set_attr "type" "vecperm")])
1256 (define_insn "p8_vmrgow"
1257 [(set (match_operand:V4SI 0 "register_operand" "=v")
1260 (match_operand:V4SI 1 "register_operand" "v")
1261 (match_operand:V4SI 2 "register_operand" "v"))
1262 (parallel [(const_int 1) (const_int 5)
1263 (const_int 3) (const_int 7)])))]
1266 if (BYTES_BIG_ENDIAN)
1267 return "vmrgow %0,%1,%2";
1269 return "vmrgew %0,%2,%1";
1271 [(set_attr "type" "vecperm")])
1273 (define_expand "vec_widen_umult_even_v16qi"
1274 [(use (match_operand:V8HI 0 "register_operand" ""))
1275 (use (match_operand:V16QI 1 "register_operand" ""))
1276 (use (match_operand:V16QI 2 "register_operand" ""))]
1279 if (VECTOR_ELT_ORDER_BIG)
1280 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1282 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1286 (define_expand "vec_widen_smult_even_v16qi"
1287 [(use (match_operand:V8HI 0 "register_operand" ""))
1288 (use (match_operand:V16QI 1 "register_operand" ""))
1289 (use (match_operand:V16QI 2 "register_operand" ""))]
1292 if (VECTOR_ELT_ORDER_BIG)
1293 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1295 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1299 (define_expand "vec_widen_umult_even_v8hi"
1300 [(use (match_operand:V4SI 0 "register_operand" ""))
1301 (use (match_operand:V8HI 1 "register_operand" ""))
1302 (use (match_operand:V8HI 2 "register_operand" ""))]
1305 if (VECTOR_ELT_ORDER_BIG)
1306 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1308 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1312 (define_expand "vec_widen_smult_even_v8hi"
1313 [(use (match_operand:V4SI 0 "register_operand" ""))
1314 (use (match_operand:V8HI 1 "register_operand" ""))
1315 (use (match_operand:V8HI 2 "register_operand" ""))]
1318 if (VECTOR_ELT_ORDER_BIG)
1319 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1321 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1325 (define_expand "vec_widen_umult_odd_v16qi"
1326 [(use (match_operand:V8HI 0 "register_operand" ""))
1327 (use (match_operand:V16QI 1 "register_operand" ""))
1328 (use (match_operand:V16QI 2 "register_operand" ""))]
1331 if (VECTOR_ELT_ORDER_BIG)
1332 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1334 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1338 (define_expand "vec_widen_smult_odd_v16qi"
1339 [(use (match_operand:V8HI 0 "register_operand" ""))
1340 (use (match_operand:V16QI 1 "register_operand" ""))
1341 (use (match_operand:V16QI 2 "register_operand" ""))]
1344 if (VECTOR_ELT_ORDER_BIG)
1345 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1347 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1351 (define_expand "vec_widen_umult_odd_v8hi"
1352 [(use (match_operand:V4SI 0 "register_operand" ""))
1353 (use (match_operand:V8HI 1 "register_operand" ""))
1354 (use (match_operand:V8HI 2 "register_operand" ""))]
1357 if (VECTOR_ELT_ORDER_BIG)
1358 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1360 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1364 (define_expand "vec_widen_smult_odd_v8hi"
1365 [(use (match_operand:V4SI 0 "register_operand" ""))
1366 (use (match_operand:V8HI 1 "register_operand" ""))
1367 (use (match_operand:V8HI 2 "register_operand" ""))]
1370 if (VECTOR_ELT_ORDER_BIG)
1371 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1373 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1377 (define_insn "altivec_vmuleub"
1378 [(set (match_operand:V8HI 0 "register_operand" "=v")
1379 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1380 (match_operand:V16QI 2 "register_operand" "v")]
1384 [(set_attr "type" "veccomplex")])
1386 (define_insn "altivec_vmuloub"
1387 [(set (match_operand:V8HI 0 "register_operand" "=v")
1388 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1389 (match_operand:V16QI 2 "register_operand" "v")]
1393 [(set_attr "type" "veccomplex")])
1395 (define_insn "altivec_vmulesb"
1396 [(set (match_operand:V8HI 0 "register_operand" "=v")
1397 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1398 (match_operand:V16QI 2 "register_operand" "v")]
1402 [(set_attr "type" "veccomplex")])
1404 (define_insn "altivec_vmulosb"
1405 [(set (match_operand:V8HI 0 "register_operand" "=v")
1406 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1407 (match_operand:V16QI 2 "register_operand" "v")]
1411 [(set_attr "type" "veccomplex")])
1413 (define_insn "altivec_vmuleuh"
1414 [(set (match_operand:V4SI 0 "register_operand" "=v")
1415 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1416 (match_operand:V8HI 2 "register_operand" "v")]
1420 [(set_attr "type" "veccomplex")])
1422 (define_insn "altivec_vmulouh"
1423 [(set (match_operand:V4SI 0 "register_operand" "=v")
1424 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1425 (match_operand:V8HI 2 "register_operand" "v")]
1429 [(set_attr "type" "veccomplex")])
1431 (define_insn "altivec_vmulesh"
1432 [(set (match_operand:V4SI 0 "register_operand" "=v")
1433 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1434 (match_operand:V8HI 2 "register_operand" "v")]
1438 [(set_attr "type" "veccomplex")])
1440 (define_insn "altivec_vmulosh"
1441 [(set (match_operand:V4SI 0 "register_operand" "=v")
1442 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1443 (match_operand:V8HI 2 "register_operand" "v")]
1447 [(set_attr "type" "veccomplex")])
1450 ;; Vector pack/unpack
1451 (define_insn "altivec_vpkpx"
1452 [(set (match_operand:V8HI 0 "register_operand" "=v")
1453 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v")
1454 (match_operand:V4SI 2 "register_operand" "v")]
1459 if (VECTOR_ELT_ORDER_BIG)
1460 return \"vpkpx %0,%1,%2\";
1462 return \"vpkpx %0,%2,%1\";
1464 [(set_attr "type" "vecperm")])
1466 (define_insn "altivec_vpks<VI_char>ss"
1467 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1468 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1469 (match_operand:VP 2 "register_operand" "v")]
1470 UNSPEC_VPACK_SIGN_SIGN_SAT))]
1474 if (VECTOR_ELT_ORDER_BIG)
1475 return \"vpks<VI_char>ss %0,%1,%2\";
1477 return \"vpks<VI_char>ss %0,%2,%1\";
1479 [(set_attr "type" "vecperm")])
1481 (define_insn "altivec_vpks<VI_char>us"
1482 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1483 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1484 (match_operand:VP 2 "register_operand" "v")]
1485 UNSPEC_VPACK_SIGN_UNS_SAT))]
1489 if (VECTOR_ELT_ORDER_BIG)
1490 return \"vpks<VI_char>us %0,%1,%2\";
1492 return \"vpks<VI_char>us %0,%2,%1\";
1494 [(set_attr "type" "vecperm")])
1496 (define_insn "altivec_vpku<VI_char>us"
1497 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1498 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1499 (match_operand:VP 2 "register_operand" "v")]
1500 UNSPEC_VPACK_UNS_UNS_SAT))]
1504 if (VECTOR_ELT_ORDER_BIG)
1505 return \"vpku<VI_char>us %0,%1,%2\";
1507 return \"vpku<VI_char>us %0,%2,%1\";
1509 [(set_attr "type" "vecperm")])
1511 (define_insn "altivec_vpku<VI_char>um"
1512 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1513 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1514 (match_operand:VP 2 "register_operand" "v")]
1515 UNSPEC_VPACK_UNS_UNS_MOD))]
1519 if (VECTOR_ELT_ORDER_BIG)
1520 return \"vpku<VI_char>um %0,%1,%2\";
1522 return \"vpku<VI_char>um %0,%2,%1\";
1524 [(set_attr "type" "vecperm")])
1526 (define_insn "altivec_vpku<VI_char>um_direct"
1527 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1528 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1529 (match_operand:VP 2 "register_operand" "v")]
1530 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT))]
1534 if (BYTES_BIG_ENDIAN)
1535 return \"vpku<VI_char>um %0,%1,%2\";
1537 return \"vpku<VI_char>um %0,%2,%1\";
1539 [(set_attr "type" "vecperm")])
1541 (define_insn "*altivec_vrl<VI_char>"
1542 [(set (match_operand:VI2 0 "register_operand" "=v")
1543 (rotate:VI2 (match_operand:VI2 1 "register_operand" "v")
1544 (match_operand:VI2 2 "register_operand" "v")))]
1546 "vrl<VI_char> %0,%1,%2"
1547 [(set_attr "type" "vecsimple")])
1549 (define_insn "altivec_vsl"
1550 [(set (match_operand:V4SI 0 "register_operand" "=v")
1551 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1552 (match_operand:V4SI 2 "register_operand" "v")]
1556 [(set_attr "type" "vecperm")])
1558 (define_insn "altivec_vslo"
1559 [(set (match_operand:V4SI 0 "register_operand" "=v")
1560 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1561 (match_operand:V4SI 2 "register_operand" "v")]
1565 [(set_attr "type" "vecperm")])
1567 (define_insn "*altivec_vsl<VI_char>"
1568 [(set (match_operand:VI2 0 "register_operand" "=v")
1569 (ashift:VI2 (match_operand:VI2 1 "register_operand" "v")
1570 (match_operand:VI2 2 "register_operand" "v")))]
1572 "vsl<VI_char> %0,%1,%2"
1573 [(set_attr "type" "vecsimple")])
1575 (define_insn "*altivec_vsr<VI_char>"
1576 [(set (match_operand:VI2 0 "register_operand" "=v")
1577 (lshiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
1578 (match_operand:VI2 2 "register_operand" "v")))]
1580 "vsr<VI_char> %0,%1,%2"
1581 [(set_attr "type" "vecsimple")])
1583 (define_insn "*altivec_vsra<VI_char>"
1584 [(set (match_operand:VI2 0 "register_operand" "=v")
1585 (ashiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
1586 (match_operand:VI2 2 "register_operand" "v")))]
1588 "vsra<VI_char> %0,%1,%2"
1589 [(set_attr "type" "vecsimple")])
1591 (define_insn "altivec_vsr"
1592 [(set (match_operand:V4SI 0 "register_operand" "=v")
1593 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1594 (match_operand:V4SI 2 "register_operand" "v")]
1598 [(set_attr "type" "vecperm")])
1600 (define_insn "altivec_vsro"
1601 [(set (match_operand:V4SI 0 "register_operand" "=v")
1602 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1603 (match_operand:V4SI 2 "register_operand" "v")]
1607 [(set_attr "type" "vecperm")])
1609 (define_insn "altivec_vsum4ubs"
1610 [(set (match_operand:V4SI 0 "register_operand" "=v")
1611 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")
1612 (match_operand:V4SI 2 "register_operand" "v")]
1614 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1617 [(set_attr "type" "veccomplex")])
1619 (define_insn "altivec_vsum4s<VI_char>s"
1620 [(set (match_operand:V4SI 0 "register_operand" "=v")
1621 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
1622 (match_operand:V4SI 2 "register_operand" "v")]
1624 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1626 "vsum4s<VI_char>s %0,%1,%2"
1627 [(set_attr "type" "veccomplex")])
1629 ;; FIXME: For the following two patterns, the scratch should only be
1630 ;; allocated for !VECTOR_ELT_ORDER_BIG, and the instructions should
1631 ;; be emitted separately.
1632 (define_insn "altivec_vsum2sws"
1633 [(set (match_operand:V4SI 0 "register_operand" "=v")
1634 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1635 (match_operand:V4SI 2 "register_operand" "v")]
1637 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))
1638 (clobber (match_scratch:V4SI 3 "=v"))]
1641 if (VECTOR_ELT_ORDER_BIG)
1642 return "vsum2sws %0,%1,%2";
1644 return "vsldoi %3,%2,%2,12\n\tvsum2sws %3,%1,%3\n\tvsldoi %0,%3,%3,4";
1646 [(set_attr "type" "veccomplex")
1647 (set (attr "length")
1649 (match_test "VECTOR_ELT_ORDER_BIG")
1651 (const_string "12")))])
1653 (define_insn "altivec_vsumsws"
1654 [(set (match_operand:V4SI 0 "register_operand" "=v")
1655 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1656 (match_operand:V4SI 2 "register_operand" "v")]
1658 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))
1659 (clobber (match_scratch:V4SI 3 "=v"))]
1662 if (VECTOR_ELT_ORDER_BIG)
1663 return "vsumsws %0,%1,%2";
1665 return "vspltw %3,%2,0\n\tvsumsws %3,%1,%3\n\tvsldoi %0,%3,%3,12";
1667 [(set_attr "type" "veccomplex")
1668 (set (attr "length")
1670 (match_test "(VECTOR_ELT_ORDER_BIG)")
1672 (const_string "12")))])
1674 (define_insn "altivec_vsumsws_direct"
1675 [(set (match_operand:V4SI 0 "register_operand" "=v")
1676 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1677 (match_operand:V4SI 2 "register_operand" "v")]
1678 UNSPEC_VSUMSWS_DIRECT))
1679 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1682 [(set_attr "type" "veccomplex")])
1684 (define_expand "altivec_vspltb"
1685 [(use (match_operand:V16QI 0 "register_operand" ""))
1686 (use (match_operand:V16QI 1 "register_operand" ""))
1687 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1693 /* Special handling for LE with -maltivec=be. We have to reflect
1694 the actual selected index for the splat in the RTL. */
1695 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1696 operands[2] = GEN_INT (15 - INTVAL (operands[2]));
1698 v = gen_rtvec (1, operands[2]);
1699 x = gen_rtx_VEC_SELECT (QImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1700 x = gen_rtx_VEC_DUPLICATE (V16QImode, x);
1701 emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
1705 (define_insn "*altivec_vspltb_internal"
1706 [(set (match_operand:V16QI 0 "register_operand" "=v")
1707 (vec_duplicate:V16QI
1708 (vec_select:QI (match_operand:V16QI 1 "register_operand" "v")
1710 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))]
1713 /* For true LE, this adjusts the selected index. For LE with
1714 -maltivec=be, this reverses what was done in the define_expand
1715 because the instruction already has big-endian bias. */
1716 if (!BYTES_BIG_ENDIAN)
1717 operands[2] = GEN_INT (15 - INTVAL (operands[2]));
1719 return "vspltb %0,%1,%2";
1721 [(set_attr "type" "vecperm")])
1723 (define_insn "altivec_vspltb_direct"
1724 [(set (match_operand:V16QI 0 "register_operand" "=v")
1725 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1726 (match_operand:QI 2 "u5bit_cint_operand" "i")]
1727 UNSPEC_VSPLT_DIRECT))]
1730 [(set_attr "type" "vecperm")])
1732 (define_expand "altivec_vsplth"
1733 [(use (match_operand:V8HI 0 "register_operand" ""))
1734 (use (match_operand:V8HI 1 "register_operand" ""))
1735 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1741 /* Special handling for LE with -maltivec=be. We have to reflect
1742 the actual selected index for the splat in the RTL. */
1743 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1744 operands[2] = GEN_INT (7 - INTVAL (operands[2]));
1746 v = gen_rtvec (1, operands[2]);
1747 x = gen_rtx_VEC_SELECT (HImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1748 x = gen_rtx_VEC_DUPLICATE (V8HImode, x);
1749 emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
1753 (define_insn "*altivec_vsplth_internal"
1754 [(set (match_operand:V8HI 0 "register_operand" "=v")
1756 (vec_select:HI (match_operand:V8HI 1 "register_operand" "v")
1758 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))]
1761 /* For true LE, this adjusts the selected index. For LE with
1762 -maltivec=be, this reverses what was done in the define_expand
1763 because the instruction already has big-endian bias. */
1764 if (!BYTES_BIG_ENDIAN)
1765 operands[2] = GEN_INT (7 - INTVAL (operands[2]));
1767 return "vsplth %0,%1,%2";
1769 [(set_attr "type" "vecperm")])
1771 (define_insn "altivec_vsplth_direct"
1772 [(set (match_operand:V8HI 0 "register_operand" "=v")
1773 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1774 (match_operand:QI 2 "u5bit_cint_operand" "i")]
1775 UNSPEC_VSPLT_DIRECT))]
1778 [(set_attr "type" "vecperm")])
1780 (define_expand "altivec_vspltw"
1781 [(use (match_operand:V4SI 0 "register_operand" ""))
1782 (use (match_operand:V4SI 1 "register_operand" ""))
1783 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1789 /* Special handling for LE with -maltivec=be. We have to reflect
1790 the actual selected index for the splat in the RTL. */
1791 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1792 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1794 v = gen_rtvec (1, operands[2]);
1795 x = gen_rtx_VEC_SELECT (SImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1796 x = gen_rtx_VEC_DUPLICATE (V4SImode, x);
1797 emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
1801 (define_insn "*altivec_vspltw_internal"
1802 [(set (match_operand:V4SI 0 "register_operand" "=v")
1804 (vec_select:SI (match_operand:V4SI 1 "register_operand" "v")
1806 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))]
1809 /* For true LE, this adjusts the selected index. For LE with
1810 -maltivec=be, this reverses what was done in the define_expand
1811 because the instruction already has big-endian bias. */
1812 if (!BYTES_BIG_ENDIAN)
1813 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1815 return "vspltw %0,%1,%2";
1817 [(set_attr "type" "vecperm")])
1819 (define_insn "altivec_vspltw_direct"
1820 [(set (match_operand:V4SI 0 "register_operand" "=v")
1821 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1822 (match_operand:QI 2 "u5bit_cint_operand" "i")]
1823 UNSPEC_VSPLT_DIRECT))]
1826 [(set_attr "type" "vecperm")])
1828 (define_expand "altivec_vspltsf"
1829 [(use (match_operand:V4SF 0 "register_operand" ""))
1830 (use (match_operand:V4SF 1 "register_operand" ""))
1831 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1837 /* Special handling for LE with -maltivec=be. We have to reflect
1838 the actual selected index for the splat in the RTL. */
1839 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1840 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1842 v = gen_rtvec (1, operands[2]);
1843 x = gen_rtx_VEC_SELECT (SFmode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1844 x = gen_rtx_VEC_DUPLICATE (V4SFmode, x);
1845 emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
1849 (define_insn "*altivec_vspltsf_internal"
1850 [(set (match_operand:V4SF 0 "register_operand" "=v")
1852 (vec_select:SF (match_operand:V4SF 1 "register_operand" "v")
1854 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))]
1855 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1857 /* For true LE, this adjusts the selected index. For LE with
1858 -maltivec=be, this reverses what was done in the define_expand
1859 because the instruction already has big-endian bias. */
1860 if (!BYTES_BIG_ENDIAN)
1861 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1863 return "vspltw %0,%1,%2";
1865 [(set_attr "type" "vecperm")])
1867 (define_insn "altivec_vspltis<VI_char>"
1868 [(set (match_operand:VI 0 "register_operand" "=v")
1870 (match_operand:QI 1 "s5bit_cint_operand" "i")))]
1872 "vspltis<VI_char> %0,%1"
1873 [(set_attr "type" "vecperm")])
1875 (define_insn "*altivec_vrfiz"
1876 [(set (match_operand:V4SF 0 "register_operand" "=v")
1877 (fix:V4SF (match_operand:V4SF 1 "register_operand" "v")))]
1878 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1880 [(set_attr "type" "vecfloat")])
1882 (define_expand "altivec_vperm_<mode>"
1883 [(set (match_operand:VM 0 "register_operand" "=v")
1884 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
1885 (match_operand:VM 2 "register_operand" "v")
1886 (match_operand:V16QI 3 "register_operand" "v")]
1890 if (!VECTOR_ELT_ORDER_BIG)
1892 altivec_expand_vec_perm_le (operands);
1897 (define_insn "*altivec_vperm_<mode>_internal"
1898 [(set (match_operand:VM 0 "register_operand" "=v")
1899 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
1900 (match_operand:VM 2 "register_operand" "v")
1901 (match_operand:V16QI 3 "register_operand" "v")]
1905 [(set_attr "type" "vecperm")])
1907 (define_expand "altivec_vperm_<mode>_uns"
1908 [(set (match_operand:VM 0 "register_operand" "=v")
1909 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
1910 (match_operand:VM 2 "register_operand" "v")
1911 (match_operand:V16QI 3 "register_operand" "v")]
1915 if (!VECTOR_ELT_ORDER_BIG)
1917 altivec_expand_vec_perm_le (operands);
1922 (define_insn "*altivec_vperm_<mode>_uns_internal"
1923 [(set (match_operand:VM 0 "register_operand" "=v")
1924 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
1925 (match_operand:VM 2 "register_operand" "v")
1926 (match_operand:V16QI 3 "register_operand" "v")]
1930 [(set_attr "type" "vecperm")])
1932 (define_expand "vec_permv16qi"
1933 [(set (match_operand:V16QI 0 "register_operand" "")
1934 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "")
1935 (match_operand:V16QI 2 "register_operand" "")
1936 (match_operand:V16QI 3 "register_operand" "")]
1940 if (!BYTES_BIG_ENDIAN) {
1941 altivec_expand_vec_perm_le (operands);
1946 (define_expand "vec_perm_constv16qi"
1947 [(match_operand:V16QI 0 "register_operand" "")
1948 (match_operand:V16QI 1 "register_operand" "")
1949 (match_operand:V16QI 2 "register_operand" "")
1950 (match_operand:V16QI 3 "" "")]
1953 if (altivec_expand_vec_perm_const (operands))
1959 (define_insn "altivec_vrfip" ; ceil
1960 [(set (match_operand:V4SF 0 "register_operand" "=v")
1961 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
1965 [(set_attr "type" "vecfloat")])
1967 (define_insn "altivec_vrfin"
1968 [(set (match_operand:V4SF 0 "register_operand" "=v")
1969 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
1973 [(set_attr "type" "vecfloat")])
1975 (define_insn "*altivec_vrfim" ; floor
1976 [(set (match_operand:V4SF 0 "register_operand" "=v")
1977 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
1981 [(set_attr "type" "vecfloat")])
1983 (define_insn "altivec_vcfux"
1984 [(set (match_operand:V4SF 0 "register_operand" "=v")
1985 (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
1986 (match_operand:QI 2 "immediate_operand" "i")]
1990 [(set_attr "type" "vecfloat")])
1992 (define_insn "altivec_vcfsx"
1993 [(set (match_operand:V4SF 0 "register_operand" "=v")
1994 (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
1995 (match_operand:QI 2 "immediate_operand" "i")]
1999 [(set_attr "type" "vecfloat")])
2001 (define_insn "altivec_vctuxs"
2002 [(set (match_operand:V4SI 0 "register_operand" "=v")
2003 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2004 (match_operand:QI 2 "immediate_operand" "i")]
2006 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2009 [(set_attr "type" "vecfloat")])
2011 (define_insn "altivec_vctsxs"
2012 [(set (match_operand:V4SI 0 "register_operand" "=v")
2013 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2014 (match_operand:QI 2 "immediate_operand" "i")]
2016 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2019 [(set_attr "type" "vecfloat")])
2021 (define_insn "altivec_vlogefp"
2022 [(set (match_operand:V4SF 0 "register_operand" "=v")
2023 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2027 [(set_attr "type" "vecfloat")])
2029 (define_insn "altivec_vexptefp"
2030 [(set (match_operand:V4SF 0 "register_operand" "=v")
2031 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2035 [(set_attr "type" "vecfloat")])
2037 (define_insn "*altivec_vrsqrtefp"
2038 [(set (match_operand:V4SF 0 "register_operand" "=v")
2039 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2041 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2043 [(set_attr "type" "vecfloat")])
2045 (define_insn "altivec_vrefp"
2046 [(set (match_operand:V4SF 0 "register_operand" "=v")
2047 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2049 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2051 [(set_attr "type" "vecfloat")])
2053 (define_expand "altivec_copysign_v4sf3"
2054 [(use (match_operand:V4SF 0 "register_operand" ""))
2055 (use (match_operand:V4SF 1 "register_operand" ""))
2056 (use (match_operand:V4SF 2 "register_operand" ""))]
2057 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2060 rtx mask = gen_reg_rtx (V4SImode);
2061 rtvec v = rtvec_alloc (4);
2062 unsigned HOST_WIDE_INT mask_val = ((unsigned HOST_WIDE_INT)1) << 31;
2064 RTVEC_ELT (v, 0) = GEN_INT (mask_val);
2065 RTVEC_ELT (v, 1) = GEN_INT (mask_val);
2066 RTVEC_ELT (v, 2) = GEN_INT (mask_val);
2067 RTVEC_ELT (v, 3) = GEN_INT (mask_val);
2069 emit_insn (gen_vec_initv4si (mask, gen_rtx_PARALLEL (V4SImode, v)));
2070 emit_insn (gen_vector_select_v4sf (operands[0], operands[1], operands[2],
2071 gen_lowpart (V4SFmode, mask)));
2075 (define_insn "altivec_vsldoi_<mode>"
2076 [(set (match_operand:VM 0 "register_operand" "=v")
2077 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
2078 (match_operand:VM 2 "register_operand" "v")
2079 (match_operand:QI 3 "immediate_operand" "i")]
2082 "vsldoi %0,%1,%2,%3"
2083 [(set_attr "type" "vecperm")])
2085 (define_insn "altivec_vupkhs<VU_char>"
2086 [(set (match_operand:VP 0 "register_operand" "=v")
2087 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2088 UNSPEC_VUNPACK_HI_SIGN))]
2091 if (VECTOR_ELT_ORDER_BIG)
2092 return "vupkhs<VU_char> %0,%1";
2094 return "vupkls<VU_char> %0,%1";
2096 [(set_attr "type" "vecperm")])
2098 (define_insn "*altivec_vupkhs<VU_char>_direct"
2099 [(set (match_operand:VP 0 "register_operand" "=v")
2100 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2101 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2103 "vupkhs<VU_char> %0,%1"
2104 [(set_attr "type" "vecperm")])
2106 (define_insn "altivec_vupkls<VU_char>"
2107 [(set (match_operand:VP 0 "register_operand" "=v")
2108 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2109 UNSPEC_VUNPACK_LO_SIGN))]
2112 if (VECTOR_ELT_ORDER_BIG)
2113 return "vupkls<VU_char> %0,%1";
2115 return "vupkhs<VU_char> %0,%1";
2117 [(set_attr "type" "vecperm")])
2119 (define_insn "*altivec_vupkls<VU_char>_direct"
2120 [(set (match_operand:VP 0 "register_operand" "=v")
2121 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2122 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2124 "vupkls<VU_char> %0,%1"
2125 [(set_attr "type" "vecperm")])
2127 (define_insn "altivec_vupkhpx"
2128 [(set (match_operand:V4SI 0 "register_operand" "=v")
2129 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2133 if (VECTOR_ELT_ORDER_BIG)
2134 return "vupkhpx %0,%1";
2136 return "vupklpx %0,%1";
2138 [(set_attr "type" "vecperm")])
2140 (define_insn "altivec_vupklpx"
2141 [(set (match_operand:V4SI 0 "register_operand" "=v")
2142 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2146 if (VECTOR_ELT_ORDER_BIG)
2147 return "vupklpx %0,%1";
2149 return "vupkhpx %0,%1";
2151 [(set_attr "type" "vecperm")])
2153 ;; Compare vectors producing a vector result and a predicate, setting CR6 to
2154 ;; indicate a combined status
2155 (define_insn "*altivec_vcmpequ<VI_char>_p"
2157 (unspec:CC [(eq:CC (match_operand:VI2 1 "register_operand" "v")
2158 (match_operand:VI2 2 "register_operand" "v"))]
2160 (set (match_operand:VI2 0 "register_operand" "=v")
2161 (eq:VI2 (match_dup 1)
2164 "vcmpequ<VI_char>. %0,%1,%2"
2165 [(set_attr "type" "veccmp")])
2167 (define_insn "*altivec_vcmpgts<VI_char>_p"
2169 (unspec:CC [(gt:CC (match_operand:VI2 1 "register_operand" "v")
2170 (match_operand:VI2 2 "register_operand" "v"))]
2172 (set (match_operand:VI2 0 "register_operand" "=v")
2173 (gt:VI2 (match_dup 1)
2176 "vcmpgts<VI_char>. %0,%1,%2"
2177 [(set_attr "type" "veccmp")])
2179 (define_insn "*altivec_vcmpgtu<VI_char>_p"
2181 (unspec:CC [(gtu:CC (match_operand:VI2 1 "register_operand" "v")
2182 (match_operand:VI2 2 "register_operand" "v"))]
2184 (set (match_operand:VI2 0 "register_operand" "=v")
2185 (gtu:VI2 (match_dup 1)
2188 "vcmpgtu<VI_char>. %0,%1,%2"
2189 [(set_attr "type" "veccmp")])
2191 (define_insn "*altivec_vcmpeqfp_p"
2193 (unspec:CC [(eq:CC (match_operand:V4SF 1 "register_operand" "v")
2194 (match_operand:V4SF 2 "register_operand" "v"))]
2196 (set (match_operand:V4SF 0 "register_operand" "=v")
2197 (eq:V4SF (match_dup 1)
2199 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2200 "vcmpeqfp. %0,%1,%2"
2201 [(set_attr "type" "veccmp")])
2203 (define_insn "*altivec_vcmpgtfp_p"
2205 (unspec:CC [(gt:CC (match_operand:V4SF 1 "register_operand" "v")
2206 (match_operand:V4SF 2 "register_operand" "v"))]
2208 (set (match_operand:V4SF 0 "register_operand" "=v")
2209 (gt:V4SF (match_dup 1)
2211 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2212 "vcmpgtfp. %0,%1,%2"
2213 [(set_attr "type" "veccmp")])
2215 (define_insn "*altivec_vcmpgefp_p"
2217 (unspec:CC [(ge:CC (match_operand:V4SF 1 "register_operand" "v")
2218 (match_operand:V4SF 2 "register_operand" "v"))]
2220 (set (match_operand:V4SF 0 "register_operand" "=v")
2221 (ge:V4SF (match_dup 1)
2223 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2224 "vcmpgefp. %0,%1,%2"
2225 [(set_attr "type" "veccmp")])
2227 (define_insn "altivec_vcmpbfp_p"
2229 (unspec:CC [(match_operand:V4SF 1 "register_operand" "v")
2230 (match_operand:V4SF 2 "register_operand" "v")]
2232 (set (match_operand:V4SF 0 "register_operand" "=v")
2233 (unspec:V4SF [(match_dup 1)
2236 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode)"
2238 [(set_attr "type" "veccmp")])
2240 (define_insn "altivec_mtvscr"
2243 [(match_operand:V4SI 0 "register_operand" "v")] UNSPECV_MTVSCR))]
2246 [(set_attr "type" "vecsimple")])
2248 (define_insn "altivec_mfvscr"
2249 [(set (match_operand:V8HI 0 "register_operand" "=v")
2250 (unspec_volatile:V8HI [(reg:SI 110)] UNSPECV_MFVSCR))]
2253 [(set_attr "type" "vecsimple")])
2255 (define_insn "altivec_dssall"
2256 [(unspec_volatile [(const_int 0)] UNSPECV_DSSALL)]
2259 [(set_attr "type" "vecsimple")])
2261 (define_insn "altivec_dss"
2262 [(unspec_volatile [(match_operand:QI 0 "immediate_operand" "i")]
2266 [(set_attr "type" "vecsimple")])
2268 (define_insn "altivec_dst"
2269 [(unspec [(match_operand 0 "register_operand" "b")
2270 (match_operand:SI 1 "register_operand" "r")
2271 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DST)]
2272 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2274 [(set_attr "type" "vecsimple")])
2276 (define_insn "altivec_dstt"
2277 [(unspec [(match_operand 0 "register_operand" "b")
2278 (match_operand:SI 1 "register_operand" "r")
2279 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTT)]
2280 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2282 [(set_attr "type" "vecsimple")])
2284 (define_insn "altivec_dstst"
2285 [(unspec [(match_operand 0 "register_operand" "b")
2286 (match_operand:SI 1 "register_operand" "r")
2287 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTST)]
2288 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2290 [(set_attr "type" "vecsimple")])
2292 (define_insn "altivec_dststt"
2293 [(unspec [(match_operand 0 "register_operand" "b")
2294 (match_operand:SI 1 "register_operand" "r")
2295 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTSTT)]
2296 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2298 [(set_attr "type" "vecsimple")])
2300 (define_insn "altivec_lvsl"
2301 [(set (match_operand:V16QI 0 "register_operand" "=v")
2302 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2306 [(set_attr "type" "vecload")])
2308 (define_insn "altivec_lvsr"
2309 [(set (match_operand:V16QI 0 "register_operand" "=v")
2310 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2314 [(set_attr "type" "vecload")])
2316 (define_expand "build_vector_mask_for_load"
2317 [(set (match_operand:V16QI 0 "register_operand" "")
2318 (unspec:V16QI [(match_operand 1 "memory_operand" "")] UNSPEC_LVSR))]
2325 gcc_assert (GET_CODE (operands[1]) == MEM);
2327 addr = XEXP (operands[1], 0);
2328 temp = gen_reg_rtx (GET_MODE (addr));
2329 emit_insn (gen_rtx_SET (VOIDmode, temp,
2330 gen_rtx_NEG (GET_MODE (addr), addr)));
2331 emit_insn (gen_altivec_lvsr (operands[0],
2332 replace_equiv_address (operands[1], temp)));
2336 ;; Parallel some of the LVE* and STV*'s with unspecs because some have
2337 ;; identical rtl but different instructions-- and gcc gets confused.
2339 (define_expand "altivec_lve<VI_char>x"
2341 [(set (match_operand:VI 0 "register_operand" "=v")
2342 (match_operand:VI 1 "memory_operand" "Z"))
2343 (unspec [(const_int 0)] UNSPEC_LVE)])]
2346 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2348 altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_LVE);
2353 (define_insn "*altivec_lve<VI_char>x_internal"
2355 [(set (match_operand:VI 0 "register_operand" "=v")
2356 (match_operand:VI 1 "memory_operand" "Z"))
2357 (unspec [(const_int 0)] UNSPEC_LVE)])]
2359 "lve<VI_char>x %0,%y1"
2360 [(set_attr "type" "vecload")])
2362 (define_insn "*altivec_lvesfx"
2364 [(set (match_operand:V4SF 0 "register_operand" "=v")
2365 (match_operand:V4SF 1 "memory_operand" "Z"))
2366 (unspec [(const_int 0)] UNSPEC_LVE)])]
2369 [(set_attr "type" "vecload")])
2371 (define_expand "altivec_lvxl_<mode>"
2373 [(set (match_operand:VM2 0 "register_operand" "=v")
2374 (match_operand:VM2 1 "memory_operand" "Z"))
2375 (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
2378 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2380 altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_SET_VSCR);
2385 (define_insn "*altivec_lvxl_<mode>_internal"
2387 [(set (match_operand:VM2 0 "register_operand" "=v")
2388 (match_operand:VM2 1 "memory_operand" "Z"))
2389 (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
2392 [(set_attr "type" "vecload")])
2394 (define_expand "altivec_lvx_<mode>"
2396 [(set (match_operand:VM2 0 "register_operand" "=v")
2397 (match_operand:VM2 1 "memory_operand" "Z"))
2398 (unspec [(const_int 0)] UNSPEC_LVX)])]
2401 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2403 altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_LVX);
2408 (define_insn "*altivec_lvx_<mode>_internal"
2410 [(set (match_operand:VM2 0 "register_operand" "=v")
2411 (match_operand:VM2 1 "memory_operand" "Z"))
2412 (unspec [(const_int 0)] UNSPEC_LVX)])]
2415 [(set_attr "type" "vecload")])
2417 (define_expand "altivec_stvx_<mode>"
2419 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2420 (match_operand:VM2 1 "register_operand" "v"))
2421 (unspec [(const_int 0)] UNSPEC_STVX)])]
2424 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2426 altivec_expand_stvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVX);
2431 (define_insn "*altivec_stvx_<mode>_internal"
2433 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2434 (match_operand:VM2 1 "register_operand" "v"))
2435 (unspec [(const_int 0)] UNSPEC_STVX)])]
2438 [(set_attr "type" "vecstore")])
2440 (define_expand "altivec_stvxl_<mode>"
2442 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2443 (match_operand:VM2 1 "register_operand" "v"))
2444 (unspec [(const_int 0)] UNSPEC_STVXL)])]
2447 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2449 altivec_expand_stvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVXL);
2454 (define_insn "*altivec_stvxl_<mode>_internal"
2456 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2457 (match_operand:VM2 1 "register_operand" "v"))
2458 (unspec [(const_int 0)] UNSPEC_STVXL)])]
2461 [(set_attr "type" "vecstore")])
2463 (define_expand "altivec_stve<VI_char>x"
2464 [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
2465 (unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
2468 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2470 altivec_expand_stvex_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVE);
2475 (define_insn "*altivec_stve<VI_char>x_internal"
2476 [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
2477 (unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
2479 "stve<VI_char>x %1,%y0"
2480 [(set_attr "type" "vecstore")])
2482 (define_insn "*altivec_stvesfx"
2483 [(set (match_operand:SF 0 "memory_operand" "=Z")
2484 (unspec:SF [(match_operand:V4SF 1 "register_operand" "v")] UNSPEC_STVE))]
2487 [(set_attr "type" "vecstore")])
2490 ;; xxlxor/vxor SCRATCH0,SCRATCH0,SCRATCH0
2491 ;; vsubu?m SCRATCH2,SCRATCH1,%1
2492 ;; vmaxs? %0,%1,SCRATCH2"
2493 (define_expand "abs<mode>2"
2494 [(set (match_dup 2) (match_dup 3))
2496 (minus:VI2 (match_dup 2)
2497 (match_operand:VI2 1 "register_operand" "v")))
2498 (set (match_operand:VI2 0 "register_operand" "=v")
2499 (smax:VI2 (match_dup 1) (match_dup 4)))]
2502 int i, n_elt = GET_MODE_NUNITS (<MODE>mode);
2503 rtvec v = rtvec_alloc (n_elt);
2505 /* Create an all 0 constant. */
2506 for (i = 0; i < n_elt; ++i)
2507 RTVEC_ELT (v, i) = const0_rtx;
2509 operands[2] = gen_reg_rtx (<MODE>mode);
2510 operands[3] = gen_rtx_CONST_VECTOR (<MODE>mode, v);
2511 operands[4] = gen_reg_rtx (<MODE>mode);
2515 ;; vspltisw SCRATCH1,-1
2516 ;; vslw SCRATCH2,SCRATCH1,SCRATCH1
2517 ;; vandc %0,%1,SCRATCH2
2518 (define_expand "altivec_absv4sf2"
2520 (vec_duplicate:V4SI (const_int -1)))
2522 (ashift:V4SI (match_dup 2) (match_dup 2)))
2523 (set (match_operand:V4SF 0 "register_operand" "=v")
2524 (and:V4SF (not:V4SF (subreg:V4SF (match_dup 3) 0))
2525 (match_operand:V4SF 1 "register_operand" "v")))]
2528 operands[2] = gen_reg_rtx (V4SImode);
2529 operands[3] = gen_reg_rtx (V4SImode);
2533 ;; vspltis? SCRATCH0,0
2534 ;; vsubs?s SCRATCH2,SCRATCH1,%1
2535 ;; vmaxs? %0,%1,SCRATCH2"
2536 (define_expand "altivec_abss_<mode>"
2537 [(set (match_dup 2) (vec_duplicate:VI (const_int 0)))
2538 (parallel [(set (match_dup 3)
2539 (unspec:VI [(match_dup 2)
2540 (match_operand:VI 1 "register_operand" "v")]
2542 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))])
2543 (set (match_operand:VI 0 "register_operand" "=v")
2544 (smax:VI (match_dup 1) (match_dup 3)))]
2547 operands[2] = gen_reg_rtx (GET_MODE (operands[0]));
2548 operands[3] = gen_reg_rtx (GET_MODE (operands[0]));
2551 (define_expand "reduc_splus_<mode>"
2552 [(set (match_operand:VIshort 0 "register_operand" "=v")
2553 (unspec:VIshort [(match_operand:VIshort 1 "register_operand" "v")]
2554 UNSPEC_REDUC_PLUS))]
2557 rtx vzero = gen_reg_rtx (V4SImode);
2558 rtx vtmp1 = gen_reg_rtx (V4SImode);
2559 rtx dest = gen_lowpart (V4SImode, operands[0]);
2561 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2562 emit_insn (gen_altivec_vsum4s<VI_char>s (vtmp1, operands[1], vzero));
2563 emit_insn (gen_altivec_vsumsws_direct (dest, vtmp1, vzero));
2567 (define_expand "reduc_uplus_v16qi"
2568 [(set (match_operand:V16QI 0 "register_operand" "=v")
2569 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")]
2570 UNSPEC_REDUC_PLUS))]
2573 rtx vzero = gen_reg_rtx (V4SImode);
2574 rtx vtmp1 = gen_reg_rtx (V4SImode);
2575 rtx dest = gen_lowpart (V4SImode, operands[0]);
2577 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2578 emit_insn (gen_altivec_vsum4ubs (vtmp1, operands[1], vzero));
2579 emit_insn (gen_altivec_vsumsws_direct (dest, vtmp1, vzero));
2583 (define_expand "neg<mode>2"
2584 [(use (match_operand:VI 0 "register_operand" ""))
2585 (use (match_operand:VI 1 "register_operand" ""))]
2591 vzero = gen_reg_rtx (GET_MODE (operands[0]));
2592 emit_insn (gen_altivec_vspltis<VI_char> (vzero, const0_rtx));
2593 emit_insn (gen_sub<mode>3 (operands[0], vzero, operands[1]));
2598 (define_expand "udot_prod<mode>"
2599 [(set (match_operand:V4SI 0 "register_operand" "=v")
2600 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
2601 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
2602 (match_operand:VIshort 2 "register_operand" "v")]
2607 emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], operands[2], operands[3]));
2611 (define_expand "sdot_prodv8hi"
2612 [(set (match_operand:V4SI 0 "register_operand" "=v")
2613 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
2614 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
2615 (match_operand:V8HI 2 "register_operand" "v")]
2620 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], operands[2], operands[3]));
2624 (define_expand "widen_usum<mode>3"
2625 [(set (match_operand:V4SI 0 "register_operand" "=v")
2626 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2627 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")]
2632 rtx vones = gen_reg_rtx (GET_MODE (operands[1]));
2634 emit_insn (gen_altivec_vspltis<VI_char> (vones, const1_rtx));
2635 emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], vones, operands[2]));
2639 (define_expand "widen_ssumv16qi3"
2640 [(set (match_operand:V4SI 0 "register_operand" "=v")
2641 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2642 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")]
2647 rtx vones = gen_reg_rtx (V16QImode);
2649 emit_insn (gen_altivec_vspltisb (vones, const1_rtx));
2650 emit_insn (gen_altivec_vmsummbm (operands[0], operands[1], vones, operands[2]));
2654 (define_expand "widen_ssumv8hi3"
2655 [(set (match_operand:V4SI 0 "register_operand" "=v")
2656 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2657 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2662 rtx vones = gen_reg_rtx (V8HImode);
2664 emit_insn (gen_altivec_vspltish (vones, const1_rtx));
2665 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], vones, operands[2]));
2669 (define_expand "vec_unpacks_hi_<VP_small_lc>"
2670 [(set (match_operand:VP 0 "register_operand" "=v")
2671 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2672 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2676 (define_expand "vec_unpacks_lo_<VP_small_lc>"
2677 [(set (match_operand:VP 0 "register_operand" "=v")
2678 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2679 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2683 (define_insn "vperm_v8hiv4si"
2684 [(set (match_operand:V4SI 0 "register_operand" "=v")
2685 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
2686 (match_operand:V4SI 2 "register_operand" "v")
2687 (match_operand:V16QI 3 "register_operand" "v")]
2691 [(set_attr "type" "vecperm")])
2693 (define_insn "vperm_v16qiv8hi"
2694 [(set (match_operand:V8HI 0 "register_operand" "=v")
2695 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
2696 (match_operand:V8HI 2 "register_operand" "v")
2697 (match_operand:V16QI 3 "register_operand" "v")]
2701 [(set_attr "type" "vecperm")])
2704 (define_expand "vec_unpacku_hi_v16qi"
2705 [(set (match_operand:V8HI 0 "register_operand" "=v")
2706 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
2711 rtx vzero = gen_reg_rtx (V8HImode);
2712 rtx mask = gen_reg_rtx (V16QImode);
2713 rtvec v = rtvec_alloc (16);
2714 bool be = BYTES_BIG_ENDIAN;
2716 emit_insn (gen_altivec_vspltish (vzero, const0_rtx));
2718 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 7);
2719 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 0 : 16);
2720 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 16 : 6);
2721 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 1 : 16);
2722 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 5);
2723 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 2 : 16);
2724 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 16 : 4);
2725 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 3 : 16);
2726 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 3);
2727 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 4 : 16);
2728 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 16 : 2);
2729 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 5 : 16);
2730 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 1);
2731 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 6 : 16);
2732 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 16 : 0);
2733 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 7 : 16);
2735 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2736 emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask));
2740 (define_expand "vec_unpacku_hi_v8hi"
2741 [(set (match_operand:V4SI 0 "register_operand" "=v")
2742 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2747 rtx vzero = gen_reg_rtx (V4SImode);
2748 rtx mask = gen_reg_rtx (V16QImode);
2749 rtvec v = rtvec_alloc (16);
2750 bool be = BYTES_BIG_ENDIAN;
2752 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2754 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 7);
2755 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 17 : 6);
2756 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 0 : 17);
2757 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 1 : 16);
2758 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 5);
2759 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 17 : 4);
2760 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 2 : 17);
2761 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 3 : 16);
2762 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 3);
2763 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 17 : 2);
2764 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 4 : 17);
2765 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 5 : 16);
2766 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 1);
2767 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 17 : 0);
2768 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 6 : 17);
2769 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 7 : 16);
2771 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2772 emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask));
2776 (define_expand "vec_unpacku_lo_v16qi"
2777 [(set (match_operand:V8HI 0 "register_operand" "=v")
2778 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
2783 rtx vzero = gen_reg_rtx (V8HImode);
2784 rtx mask = gen_reg_rtx (V16QImode);
2785 rtvec v = rtvec_alloc (16);
2786 bool be = BYTES_BIG_ENDIAN;
2788 emit_insn (gen_altivec_vspltish (vzero, const0_rtx));
2790 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 15);
2791 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 8 : 16);
2792 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 16 : 14);
2793 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 9 : 16);
2794 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 13);
2795 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 10 : 16);
2796 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 16 : 12);
2797 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 11 : 16);
2798 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 11);
2799 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 12 : 16);
2800 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 16 : 10);
2801 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 13 : 16);
2802 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 9);
2803 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 14 : 16);
2804 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 16 : 8);
2805 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 15 : 16);
2807 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2808 emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask));
2812 (define_expand "vec_unpacku_lo_v8hi"
2813 [(set (match_operand:V4SI 0 "register_operand" "=v")
2814 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2819 rtx vzero = gen_reg_rtx (V4SImode);
2820 rtx mask = gen_reg_rtx (V16QImode);
2821 rtvec v = rtvec_alloc (16);
2822 bool be = BYTES_BIG_ENDIAN;
2824 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2826 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 15);
2827 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 17 : 14);
2828 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 8 : 17);
2829 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 9 : 16);
2830 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 13);
2831 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 17 : 12);
2832 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 10 : 17);
2833 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 11 : 16);
2834 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 11);
2835 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 17 : 10);
2836 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 12 : 17);
2837 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 13 : 16);
2838 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 9);
2839 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 17 : 8);
2840 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 14 : 17);
2841 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 15 : 16);
2843 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2844 emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask));
2848 (define_expand "vec_widen_umult_hi_v16qi"
2849 [(set (match_operand:V8HI 0 "register_operand" "=v")
2850 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
2851 (match_operand:V16QI 2 "register_operand" "v")]
2856 rtx ve = gen_reg_rtx (V8HImode);
2857 rtx vo = gen_reg_rtx (V8HImode);
2859 if (BYTES_BIG_ENDIAN)
2861 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
2862 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
2863 emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
2867 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
2868 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
2869 emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
2874 (define_expand "vec_widen_umult_lo_v16qi"
2875 [(set (match_operand:V8HI 0 "register_operand" "=v")
2876 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
2877 (match_operand:V16QI 2 "register_operand" "v")]
2882 rtx ve = gen_reg_rtx (V8HImode);
2883 rtx vo = gen_reg_rtx (V8HImode);
2885 if (BYTES_BIG_ENDIAN)
2887 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
2888 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
2889 emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
2893 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
2894 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
2895 emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
2900 (define_expand "vec_widen_smult_hi_v16qi"
2901 [(set (match_operand:V8HI 0 "register_operand" "=v")
2902 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
2903 (match_operand:V16QI 2 "register_operand" "v")]
2908 rtx ve = gen_reg_rtx (V8HImode);
2909 rtx vo = gen_reg_rtx (V8HImode);
2911 if (BYTES_BIG_ENDIAN)
2913 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
2914 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
2915 emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
2919 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
2920 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
2921 emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
2926 (define_expand "vec_widen_smult_lo_v16qi"
2927 [(set (match_operand:V8HI 0 "register_operand" "=v")
2928 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
2929 (match_operand:V16QI 2 "register_operand" "v")]
2934 rtx ve = gen_reg_rtx (V8HImode);
2935 rtx vo = gen_reg_rtx (V8HImode);
2937 if (BYTES_BIG_ENDIAN)
2939 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
2940 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
2941 emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
2945 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
2946 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
2947 emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
2952 (define_expand "vec_widen_umult_hi_v8hi"
2953 [(set (match_operand:V4SI 0 "register_operand" "=v")
2954 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
2955 (match_operand:V8HI 2 "register_operand" "v")]
2960 rtx ve = gen_reg_rtx (V4SImode);
2961 rtx vo = gen_reg_rtx (V4SImode);
2963 if (BYTES_BIG_ENDIAN)
2965 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
2966 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
2967 emit_insn (gen_altivec_vmrghw_direct (operands[0], ve, vo));
2971 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
2972 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
2973 emit_insn (gen_altivec_vmrghw_direct (operands[0], vo, ve));
2978 (define_expand "vec_widen_umult_lo_v8hi"
2979 [(set (match_operand:V4SI 0 "register_operand" "=v")
2980 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
2981 (match_operand:V8HI 2 "register_operand" "v")]
2986 rtx ve = gen_reg_rtx (V4SImode);
2987 rtx vo = gen_reg_rtx (V4SImode);
2989 if (BYTES_BIG_ENDIAN)
2991 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
2992 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
2993 emit_insn (gen_altivec_vmrglw_direct (operands[0], ve, vo));
2997 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
2998 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
2999 emit_insn (gen_altivec_vmrglw_direct (operands[0], vo, ve));
3004 (define_expand "vec_widen_smult_hi_v8hi"
3005 [(set (match_operand:V4SI 0 "register_operand" "=v")
3006 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3007 (match_operand:V8HI 2 "register_operand" "v")]
3012 rtx ve = gen_reg_rtx (V4SImode);
3013 rtx vo = gen_reg_rtx (V4SImode);
3015 if (BYTES_BIG_ENDIAN)
3017 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3018 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3019 emit_insn (gen_altivec_vmrghw_direct (operands[0], ve, vo));
3023 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3024 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3025 emit_insn (gen_altivec_vmrghw_direct (operands[0], vo, ve));
3030 (define_expand "vec_widen_smult_lo_v8hi"
3031 [(set (match_operand:V4SI 0 "register_operand" "=v")
3032 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3033 (match_operand:V8HI 2 "register_operand" "v")]
3038 rtx ve = gen_reg_rtx (V4SImode);
3039 rtx vo = gen_reg_rtx (V4SImode);
3041 if (BYTES_BIG_ENDIAN)
3043 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3044 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3045 emit_insn (gen_altivec_vmrglw_direct (operands[0], ve, vo));
3049 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3050 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3051 emit_insn (gen_altivec_vmrglw_direct (operands[0], vo, ve));
3056 (define_expand "vec_pack_trunc_<mode>"
3057 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
3058 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
3059 (match_operand:VP 2 "register_operand" "v")]
3060 UNSPEC_VPACK_UNS_UNS_MOD))]
3064 (define_expand "altivec_negv4sf2"
3065 [(use (match_operand:V4SF 0 "register_operand" ""))
3066 (use (match_operand:V4SF 1 "register_operand" ""))]
3072 /* Generate [-0.0, -0.0, -0.0, -0.0]. */
3073 neg0 = gen_reg_rtx (V4SImode);
3074 emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
3075 emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
3078 emit_insn (gen_xorv4sf3 (operands[0],
3079 gen_lowpart (V4SFmode, neg0), operands[1]));
3084 ;; Vector SIMD PEM v2.06c defines LVLX, LVLXL, LVRX, LVRXL,
3085 ;; STVLX, STVLXL, STVVRX, STVRXL are available only on Cell.
3086 (define_insn "altivec_lvlx"
3087 [(set (match_operand:V16QI 0 "register_operand" "=v")
3088 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3090 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3092 [(set_attr "type" "vecload")])
3094 (define_insn "altivec_lvlxl"
3095 [(set (match_operand:V16QI 0 "register_operand" "=v")
3096 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3098 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3100 [(set_attr "type" "vecload")])
3102 (define_insn "altivec_lvrx"
3103 [(set (match_operand:V16QI 0 "register_operand" "=v")
3104 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3106 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3108 [(set_attr "type" "vecload")])
3110 (define_insn "altivec_lvrxl"
3111 [(set (match_operand:V16QI 0 "register_operand" "=v")
3112 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3114 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3116 [(set_attr "type" "vecload")])
3118 (define_insn "altivec_stvlx"
3120 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3121 (match_operand:V16QI 1 "register_operand" "v"))
3122 (unspec [(const_int 0)] UNSPEC_STVLX)])]
3123 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3125 [(set_attr "type" "vecstore")])
3127 (define_insn "altivec_stvlxl"
3129 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3130 (match_operand:V16QI 1 "register_operand" "v"))
3131 (unspec [(const_int 0)] UNSPEC_STVLXL)])]
3132 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3134 [(set_attr "type" "vecstore")])
3136 (define_insn "altivec_stvrx"
3138 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3139 (match_operand:V16QI 1 "register_operand" "v"))
3140 (unspec [(const_int 0)] UNSPEC_STVRX)])]
3141 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3143 [(set_attr "type" "vecstore")])
3145 (define_insn "altivec_stvrxl"
3147 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3148 (match_operand:V16QI 1 "register_operand" "v"))
3149 (unspec [(const_int 0)] UNSPEC_STVRXL)])]
3150 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3152 [(set_attr "type" "vecstore")])
3154 (define_expand "vec_unpacks_float_hi_v8hi"
3155 [(set (match_operand:V4SF 0 "register_operand" "")
3156 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3157 UNSPEC_VUPKHS_V4SF))]
3161 rtx tmp = gen_reg_rtx (V4SImode);
3163 emit_insn (gen_vec_unpacks_hi_v8hi (tmp, operands[1]));
3164 emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
3168 (define_expand "vec_unpacks_float_lo_v8hi"
3169 [(set (match_operand:V4SF 0 "register_operand" "")
3170 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3171 UNSPEC_VUPKLS_V4SF))]
3175 rtx tmp = gen_reg_rtx (V4SImode);
3177 emit_insn (gen_vec_unpacks_lo_v8hi (tmp, operands[1]));
3178 emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
3182 (define_expand "vec_unpacku_float_hi_v8hi"
3183 [(set (match_operand:V4SF 0 "register_operand" "")
3184 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3185 UNSPEC_VUPKHU_V4SF))]
3189 rtx tmp = gen_reg_rtx (V4SImode);
3191 emit_insn (gen_vec_unpacku_hi_v8hi (tmp, operands[1]));
3192 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
3196 (define_expand "vec_unpacku_float_lo_v8hi"
3197 [(set (match_operand:V4SF 0 "register_operand" "")
3198 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3199 UNSPEC_VUPKLU_V4SF))]
3203 rtx tmp = gen_reg_rtx (V4SImode);
3205 emit_insn (gen_vec_unpacku_lo_v8hi (tmp, operands[1]));
3206 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
3211 ;; Power8 vector instructions encoded as Altivec instructions
3213 ;; Vector count leading zeros
3214 (define_insn "*p8v_clz<mode>2"
3215 [(set (match_operand:VI2 0 "register_operand" "=v")
3216 (clz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
3219 [(set_attr "length" "4")
3220 (set_attr "type" "vecsimple")])
3222 ;; Vector population count
3223 (define_insn "*p8v_popcount<mode>2"
3224 [(set (match_operand:VI2 0 "register_operand" "=v")
3225 (popcount:VI2 (match_operand:VI2 1 "register_operand" "v")))]
3228 [(set_attr "length" "4")
3229 (set_attr "type" "vecsimple")])
3231 ;; Vector Gather Bits by Bytes by Doubleword
3232 (define_insn "p8v_vgbbd"
3233 [(set (match_operand:V16QI 0 "register_operand" "=v")
3234 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")]
3238 [(set_attr "length" "4")
3239 (set_attr "type" "vecsimple")])
3242 ;; 128-bit binary integer arithmetic
3243 ;; We have a special container type (V1TImode) to allow operations using the
3244 ;; ISA 2.07 128-bit binary support to target the VMX/altivec registers without
3245 ;; having to worry about the register allocator deciding GPRs are better.
3247 (define_insn "altivec_vadduqm"
3248 [(set (match_operand:V1TI 0 "register_operand" "=v")
3249 (plus:V1TI (match_operand:V1TI 1 "register_operand" "v")
3250 (match_operand:V1TI 2 "register_operand" "v")))]
3253 [(set_attr "length" "4")
3254 (set_attr "type" "vecsimple")])
3256 (define_insn "altivec_vaddcuq"
3257 [(set (match_operand:V1TI 0 "register_operand" "=v")
3258 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3259 (match_operand:V1TI 2 "register_operand" "v")]
3263 [(set_attr "length" "4")
3264 (set_attr "type" "vecsimple")])
3266 (define_insn "altivec_vsubuqm"
3267 [(set (match_operand:V1TI 0 "register_operand" "=v")
3268 (minus:V1TI (match_operand:V1TI 1 "register_operand" "v")
3269 (match_operand:V1TI 2 "register_operand" "v")))]
3272 [(set_attr "length" "4")
3273 (set_attr "type" "vecsimple")])
3275 (define_insn "altivec_vsubcuq"
3276 [(set (match_operand:V1TI 0 "register_operand" "=v")
3277 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3278 (match_operand:V1TI 2 "register_operand" "v")]
3282 [(set_attr "length" "4")
3283 (set_attr "type" "vecsimple")])
3285 (define_insn "altivec_vaddeuqm"
3286 [(set (match_operand:V1TI 0 "register_operand" "=v")
3287 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3288 (match_operand:V1TI 2 "register_operand" "v")
3289 (match_operand:V1TI 3 "register_operand" "v")]
3292 "vaddeuqm %0,%1,%2,%3"
3293 [(set_attr "length" "4")
3294 (set_attr "type" "vecsimple")])
3296 (define_insn "altivec_vaddecuq"
3297 [(set (match_operand:V1TI 0 "register_operand" "=v")
3298 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3299 (match_operand:V1TI 2 "register_operand" "v")
3300 (match_operand:V1TI 3 "register_operand" "v")]
3303 "vaddecuq %0,%1,%2,%3"
3304 [(set_attr "length" "4")
3305 (set_attr "type" "vecsimple")])
3307 (define_insn "altivec_vsubeuqm"
3308 [(set (match_operand:V1TI 0 "register_operand" "=v")
3309 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3310 (match_operand:V1TI 2 "register_operand" "v")
3311 (match_operand:V1TI 3 "register_operand" "v")]
3314 "vsubeuqm %0,%1,%2,%3"
3315 [(set_attr "length" "4")
3316 (set_attr "type" "vecsimple")])
3318 (define_insn "altivec_vsubecuq"
3319 [(set (match_operand:V1TI 0 "register_operand" "=v")
3320 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3321 (match_operand:V1TI 2 "register_operand" "v")
3322 (match_operand:V1TI 3 "register_operand" "v")]
3325 "vsubecuq %0,%1,%2,%3"
3326 [(set_attr "length" "4")
3327 (set_attr "type" "vecsimple")])
3329 ;; We use V2DI as the output type to simplify converting the permute
3330 ;; bits into an integer
3331 (define_insn "altivec_vbpermq"
3332 [(set (match_operand:V2DI 0 "register_operand" "=v")
3333 (unspec:V2DI [(match_operand:V16QI 1 "register_operand" "v")
3334 (match_operand:V16QI 2 "register_operand" "v")]
3338 [(set_attr "length" "4")
3339 (set_attr "type" "vecsimple")])
3341 ;; Decimal Integer operations
3342 (define_int_iterator UNSPEC_BCD_ADD_SUB [UNSPEC_BCDADD UNSPEC_BCDSUB])
3344 (define_int_attr bcd_add_sub [(UNSPEC_BCDADD "add")
3345 (UNSPEC_BCDSUB "sub")])
3347 (define_code_iterator BCD_TEST [eq lt gt unordered])
3349 (define_insn "bcd<bcd_add_sub>"
3350 [(set (match_operand:V1TI 0 "register_operand" "")
3351 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "")
3352 (match_operand:V1TI 2 "register_operand" "")
3353 (match_operand:QI 3 "const_0_to_1_operand" "")]
3354 UNSPEC_BCD_ADD_SUB))
3355 (clobber (reg:CCFP 74))]
3357 "bcd<bcd_add_sub>. %0,%1,%2,%3"
3358 [(set_attr "length" "4")
3359 (set_attr "type" "vecsimple")])
3361 ;; Use a floating point type (V2DFmode) for the compare to set CR6 so that we
3362 ;; can use the unordered test for BCD nans and add/subtracts that overflow. An
3363 ;; UNORDERED test on an integer type (like V1TImode) is not defined. The type
3364 ;; probably should be one that can go in the VMX (Altivec) registers, so we
3365 ;; can't use DDmode or DFmode.
3366 (define_insn "*bcd<bcd_add_sub>_test"
3369 (unspec:V2DF [(match_operand:V1TI 1 "register_operand" "v")
3370 (match_operand:V1TI 2 "register_operand" "v")
3371 (match_operand:QI 3 "const_0_to_1_operand" "i")]
3373 (match_operand:V2DF 4 "zero_constant" "j")))
3374 (clobber (match_scratch:V1TI 0 "=v"))]
3376 "bcd<bcd_add_sub>. %0,%1,%2,%3"
3377 [(set_attr "length" "4")
3378 (set_attr "type" "vecsimple")])
3380 (define_insn "*bcd<bcd_add_sub>_test2"
3381 [(set (match_operand:V1TI 0 "register_operand" "=v")
3382 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3383 (match_operand:V1TI 2 "register_operand" "v")
3384 (match_operand:QI 3 "const_0_to_1_operand" "i")]
3385 UNSPEC_BCD_ADD_SUB))
3388 (unspec:V2DF [(match_dup 1)
3392 (match_operand:V2DF 4 "zero_constant" "j")))]
3394 "bcd<bcd_add_sub>. %0,%1,%2,%3"
3395 [(set_attr "length" "4")
3396 (set_attr "type" "vecsimple")])
3398 (define_expand "bcd<bcd_add_sub>_<code>"
3399 [(parallel [(set (reg:CCFP 74)
3401 (unspec:V2DF [(match_operand:V1TI 1 "register_operand" "")
3402 (match_operand:V1TI 2 "register_operand" "")
3403 (match_operand:QI 3 "const_0_to_1_operand" "")]
3406 (clobber (match_scratch:V1TI 5 ""))])
3407 (set (match_operand:SI 0 "register_operand" "")
3408 (BCD_TEST:SI (reg:CCFP 74)
3412 operands[4] = CONST0_RTX (V2DFmode);
3415 ;; Peephole2 pattern to combine a bcdadd/bcdsub that calculates the value and
3416 ;; the bcdadd/bcdsub that tests the value. The combiner won't work since
3417 ;; CR6 is a hard coded register. Unfortunately, all of the Altivec predicate
3418 ;; support is hard coded to use the fixed register CR6 instead of creating
3419 ;; a register class for CR6.
3422 [(parallel [(set (match_operand:V1TI 0 "register_operand" "")
3423 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "")
3424 (match_operand:V1TI 2 "register_operand" "")
3425 (match_operand:QI 3 "const_0_to_1_operand" "")]
3426 UNSPEC_BCD_ADD_SUB))
3427 (clobber (reg:CCFP 74))])
3428 (parallel [(set (reg:CCFP 74)
3430 (unspec:V2DF [(match_dup 1)
3434 (match_operand:V2DF 4 "zero_constant" "")))
3435 (clobber (match_operand:V1TI 5 "register_operand" ""))])]
3437 [(parallel [(set (match_dup 0)
3438 (unspec:V1TI [(match_dup 1)
3441 UNSPEC_BCD_ADD_SUB))
3444 (unspec:V2DF [(match_dup 1)