xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/rs6000/altivec.md (revision f3cfa6f6ce31685c6c4a758bc430e69eb99f50a4)
1;; AltiVec patterns.
2;; Copyright (C) 2002-2016 Free Software Foundation, Inc.
3;; Contributed by Aldy Hernandez (aldy@quesejoda.com)
4
5;; This file is part of GCC.
6
7;; GCC is free software; you can redistribute it and/or modify it
8;; under the terms of the GNU General Public License as published
9;; by the Free Software Foundation; either version 3, or (at your
10;; option) any later version.
11
12;; GCC is distributed in the hope that it will be useful, but WITHOUT
13;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14;; or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
15;; License for more details.
16
17;; You should have received a copy of the GNU General Public License
18;; along with GCC; see the file COPYING3.  If not see
19;; <http://www.gnu.org/licenses/>.
20
21(define_c_enum "unspec"
22  [UNSPEC_VCMPBFP
23   UNSPEC_VMSUMU
24   UNSPEC_VMSUMM
25   UNSPEC_VMSUMSHM
26   UNSPEC_VMSUMUHS
27   UNSPEC_VMSUMSHS
28   UNSPEC_VMHADDSHS
29   UNSPEC_VMHRADDSHS
30   UNSPEC_VADDCUW
31   UNSPEC_VADDU
32   UNSPEC_VADDS
33   UNSPEC_VAVGU
34   UNSPEC_VAVGS
35   UNSPEC_VMULEUB
36   UNSPEC_VMULESB
37   UNSPEC_VMULEUH
38   UNSPEC_VMULESH
39   UNSPEC_VMULOUB
40   UNSPEC_VMULOSB
41   UNSPEC_VMULOUH
42   UNSPEC_VMULOSH
43   UNSPEC_VPKPX
44   UNSPEC_VPACK_SIGN_SIGN_SAT
45   UNSPEC_VPACK_SIGN_UNS_SAT
46   UNSPEC_VPACK_UNS_UNS_SAT
47   UNSPEC_VPACK_UNS_UNS_MOD
48   UNSPEC_VPACK_UNS_UNS_MOD_DIRECT
49   UNSPEC_VSLV4SI
50   UNSPEC_VSLO
51   UNSPEC_VSR
52   UNSPEC_VSRO
53   UNSPEC_VSUBCUW
54   UNSPEC_VSUBU
55   UNSPEC_VSUBS
56   UNSPEC_VSUM4UBS
57   UNSPEC_VSUM4S
58   UNSPEC_VSUM2SWS
59   UNSPEC_VSUMSWS
60   UNSPEC_VPERM
61   UNSPEC_VPERMR
62   UNSPEC_VPERM_UNS
63   UNSPEC_VRFIN
64   UNSPEC_VCFUX
65   UNSPEC_VCFSX
66   UNSPEC_VCTUXS
67   UNSPEC_VCTSXS
68   UNSPEC_VLOGEFP
69   UNSPEC_VEXPTEFP
70   UNSPEC_VSLDOI
71   UNSPEC_VUNPACK_HI_SIGN
72   UNSPEC_VUNPACK_LO_SIGN
73   UNSPEC_VUNPACK_HI_SIGN_DIRECT
74   UNSPEC_VUNPACK_LO_SIGN_DIRECT
75   UNSPEC_VUPKHPX
76   UNSPEC_VUPKLPX
77   UNSPEC_DARN
78   UNSPEC_DARN_32
79   UNSPEC_DARN_RAW
80   UNSPEC_DST
81   UNSPEC_DSTT
82   UNSPEC_DSTST
83   UNSPEC_DSTSTT
84   UNSPEC_LVSL
85   UNSPEC_LVSR
86   UNSPEC_LVE
87   UNSPEC_STVX
88   UNSPEC_STVXL
89   UNSPEC_STVE
90   UNSPEC_SET_VSCR
91   UNSPEC_GET_VRSAVE
92   UNSPEC_LVX
93   UNSPEC_REDUC_PLUS
94   UNSPEC_VECSH
95   UNSPEC_EXTEVEN_V4SI
96   UNSPEC_EXTEVEN_V8HI
97   UNSPEC_EXTEVEN_V16QI
98   UNSPEC_EXTEVEN_V4SF
99   UNSPEC_EXTODD_V4SI
100   UNSPEC_EXTODD_V8HI
101   UNSPEC_EXTODD_V16QI
102   UNSPEC_EXTODD_V4SF
103   UNSPEC_INTERHI_V4SI
104   UNSPEC_INTERHI_V8HI
105   UNSPEC_INTERHI_V16QI
106   UNSPEC_INTERLO_V4SI
107   UNSPEC_INTERLO_V8HI
108   UNSPEC_INTERLO_V16QI
109   UNSPEC_LVLX
110   UNSPEC_LVLXL
111   UNSPEC_LVRX
112   UNSPEC_LVRXL
113   UNSPEC_STVLX
114   UNSPEC_STVLXL
115   UNSPEC_STVRX
116   UNSPEC_STVRXL
117   UNSPEC_VSLV
118   UNSPEC_VSRV
119   UNSPEC_VADU
120   UNSPEC_VMULWHUB
121   UNSPEC_VMULWLUB
122   UNSPEC_VMULWHSB
123   UNSPEC_VMULWLSB
124   UNSPEC_VMULWHUH
125   UNSPEC_VMULWLUH
126   UNSPEC_VMULWHSH
127   UNSPEC_VMULWLSH
128   UNSPEC_VUPKHUB
129   UNSPEC_VUPKHUH
130   UNSPEC_VUPKLUB
131   UNSPEC_VUPKLUH
132   UNSPEC_VPERMSI
133   UNSPEC_VPERMHI
134   UNSPEC_INTERHI
135   UNSPEC_INTERLO
136   UNSPEC_VUPKHS_V4SF
137   UNSPEC_VUPKLS_V4SF
138   UNSPEC_VUPKHU_V4SF
139   UNSPEC_VUPKLU_V4SF
140   UNSPEC_VGBBD
141   UNSPEC_VMRGH_DIRECT
142   UNSPEC_VMRGL_DIRECT
143   UNSPEC_VSPLT_DIRECT
144   UNSPEC_VSUMSWS_DIRECT
145   UNSPEC_VADDCUQ
146   UNSPEC_VADDEUQM
147   UNSPEC_VADDECUQ
148   UNSPEC_VSUBCUQ
149   UNSPEC_VSUBEUQM
150   UNSPEC_VSUBECUQ
151   UNSPEC_VBPERMQ
152   UNSPEC_BCDADD
153   UNSPEC_BCDSUB
154   UNSPEC_BCD_OVERFLOW
155])
156
157(define_c_enum "unspecv"
158  [UNSPECV_SET_VRSAVE
159   UNSPECV_MTVSCR
160   UNSPECV_MFVSCR
161   UNSPECV_DSSALL
162   UNSPECV_DSS
163  ])
164
165;; Vec int modes
166(define_mode_iterator VI [V4SI V8HI V16QI])
167;; Like VI, but add ISA 2.07 integer vector ops
168(define_mode_iterator VI2 [V4SI V8HI V16QI V2DI])
169;; Short vec in modes
170(define_mode_iterator VIshort [V8HI V16QI])
171;; Vec float modes
172(define_mode_iterator VF [V4SF])
173;; Vec modes, pity mode iterators are not composable
174(define_mode_iterator V [V4SI V8HI V16QI V4SF])
175;; Vec modes for move/logical/permute ops, include vector types for move not
176;; otherwise handled by altivec (v2df, v2di, ti)
177(define_mode_iterator VM [V4SI
178			  V8HI
179			  V16QI
180			  V4SF
181			  V2DF
182			  V2DI
183			  V1TI
184			  TI
185			  (KF "FLOAT128_VECTOR_P (KFmode)")
186			  (TF "FLOAT128_VECTOR_P (TFmode)")])
187
188;; Like VM, except don't do TImode
189(define_mode_iterator VM2 [V4SI
190			   V8HI
191			   V16QI
192			   V4SF
193			   V2DF
194			   V2DI
195			   V1TI
196			   (KF "FLOAT128_VECTOR_P (KFmode)")
197			   (TF "FLOAT128_VECTOR_P (TFmode)")])
198
199;; Specific iterator for parity which does not have a byte/half-word form, but
200;; does have a quad word form
201(define_mode_iterator VParity [V4SI
202			       V2DI
203			       V1TI
204			       (TI "TARGET_VSX_TIMODE")])
205
206(define_mode_attr VI_char [(V2DI "d") (V4SI "w") (V8HI "h") (V16QI "b")])
207(define_mode_attr VI_scalar [(V2DI "DI") (V4SI "SI") (V8HI "HI") (V16QI "QI")])
208(define_mode_attr VI_unit [(V16QI "VECTOR_UNIT_ALTIVEC_P (V16QImode)")
209			   (V8HI "VECTOR_UNIT_ALTIVEC_P (V8HImode)")
210			   (V4SI "VECTOR_UNIT_ALTIVEC_P (V4SImode)")
211			   (V2DI "VECTOR_UNIT_P8_VECTOR_P (V2DImode)")
212			   (V1TI "VECTOR_UNIT_ALTIVEC_P (V1TImode)")])
213
214;; Vector pack/unpack
215(define_mode_iterator VP [V2DI V4SI V8HI])
216(define_mode_attr VP_small [(V2DI "V4SI") (V4SI "V8HI") (V8HI "V16QI")])
217(define_mode_attr VP_small_lc [(V2DI "v4si") (V4SI "v8hi") (V8HI "v16qi")])
218(define_mode_attr VU_char [(V2DI "w") (V4SI "h") (V8HI "b")])
219
220;; Vector negate
221(define_mode_iterator VNEG [V4SI V2DI])
222
223;; Vector move instructions.
224(define_insn "*altivec_mov<mode>"
225  [(set (match_operand:VM2 0 "nonimmediate_operand" "=Z,v,v,*Y,*r,*r,v,v,*r")
226	(match_operand:VM2 1 "input_operand" "v,Z,v,r,Y,r,j,W,W"))]
227  "VECTOR_MEM_ALTIVEC_P (<MODE>mode)
228   && (register_operand (operands[0], <MODE>mode)
229       || register_operand (operands[1], <MODE>mode))"
230{
231  switch (which_alternative)
232    {
233    case 0: return "stvx %1,%y0";
234    case 1: return "lvx %0,%y1";
235    case 2: return "vor %0,%1,%1";
236    case 3: return "#";
237    case 4: return "#";
238    case 5: return "#";
239    case 6: return "vxor %0,%0,%0";
240    case 7: return output_vec_const_move (operands);
241    case 8: return "#";
242    default: gcc_unreachable ();
243    }
244}
245  [(set_attr "type" "vecstore,vecload,veclogical,store,load,*,veclogical,*,*")
246   (set_attr "length" "4,4,4,20,20,20,4,8,32")])
247
248;; Unlike other altivec moves, allow the GPRs, since a normal use of TImode
249;; is for unions.  However for plain data movement, slightly favor the vector
250;; loads
251(define_insn "*altivec_movti"
252  [(set (match_operand:TI 0 "nonimmediate_operand" "=Z,v,v,?Y,?r,?r,v,v")
253	(match_operand:TI 1 "input_operand" "v,Z,v,r,Y,r,j,W"))]
254  "VECTOR_MEM_ALTIVEC_P (TImode)
255   && (register_operand (operands[0], TImode)
256       || register_operand (operands[1], TImode))"
257{
258  switch (which_alternative)
259    {
260    case 0: return "stvx %1,%y0";
261    case 1: return "lvx %0,%y1";
262    case 2: return "vor %0,%1,%1";
263    case 3: return "#";
264    case 4: return "#";
265    case 5: return "#";
266    case 6: return "vxor %0,%0,%0";
267    case 7: return output_vec_const_move (operands);
268    default: gcc_unreachable ();
269    }
270}
271  [(set_attr "type" "vecstore,vecload,veclogical,store,load,*,veclogical,*")])
272
273;; Load up a vector with the most significant bit set by loading up -1 and
274;; doing a shift left
275(define_split
276  [(set (match_operand:VM 0 "altivec_register_operand" "")
277	(match_operand:VM 1 "easy_vector_constant_msb" ""))]
278  "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
279  [(const_int 0)]
280{
281  rtx dest = operands[0];
282  machine_mode mode = GET_MODE (operands[0]);
283  rtvec v;
284  int i, num_elements;
285
286  if (mode == V4SFmode)
287    {
288      mode = V4SImode;
289      dest = gen_lowpart (V4SImode, dest);
290    }
291
292  num_elements = GET_MODE_NUNITS (mode);
293  v = rtvec_alloc (num_elements);
294  for (i = 0; i < num_elements; i++)
295    RTVEC_ELT (v, i) = constm1_rtx;
296
297  emit_insn (gen_vec_initv4si (dest, gen_rtx_PARALLEL (mode, v)));
298  emit_insn (gen_rtx_SET (dest, gen_rtx_ASHIFT (mode, dest, dest)));
299  DONE;
300})
301
302(define_split
303  [(set (match_operand:VM 0 "altivec_register_operand" "")
304	(match_operand:VM 1 "easy_vector_constant_add_self" ""))]
305  "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
306  [(set (match_dup 0) (match_dup 3))
307   (set (match_dup 0) (match_dup 4))]
308{
309  rtx dup = gen_easy_altivec_constant (operands[1]);
310  rtx const_vec;
311  machine_mode op_mode = <MODE>mode;
312
313  /* Divide the operand of the resulting VEC_DUPLICATE, and use
314     simplify_rtx to make a CONST_VECTOR.  */
315  XEXP (dup, 0) = simplify_const_binary_operation (ASHIFTRT, QImode,
316						   XEXP (dup, 0), const1_rtx);
317  const_vec = simplify_rtx (dup);
318
319  if (op_mode == V4SFmode)
320    {
321      op_mode = V4SImode;
322      operands[0] = gen_lowpart (op_mode, operands[0]);
323    }
324  if (GET_MODE (const_vec) == op_mode)
325    operands[3] = const_vec;
326  else
327    operands[3] = gen_lowpart (op_mode, const_vec);
328  operands[4] = gen_rtx_PLUS (op_mode, operands[0], operands[0]);
329})
330
331(define_split
332  [(set (match_operand:VM 0 "altivec_register_operand" "")
333	(match_operand:VM 1 "easy_vector_constant_vsldoi" ""))]
334  "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && can_create_pseudo_p ()"
335  [(set (match_dup 2) (match_dup 3))
336   (set (match_dup 4) (match_dup 5))
337   (set (match_dup 0)
338        (unspec:VM [(match_dup 2)
339		    (match_dup 4)
340		    (match_dup 6)]
341		   UNSPEC_VSLDOI))]
342{
343  rtx op1 = operands[1];
344  int elt = (BYTES_BIG_ENDIAN) ? 0 : GET_MODE_NUNITS (<MODE>mode) - 1;
345  HOST_WIDE_INT val = const_vector_elt_as_int (op1, elt);
346  rtx rtx_val = GEN_INT (val);
347  int shift = vspltis_shifted (op1);
348  int nunits = GET_MODE_NUNITS (<MODE>mode);
349  int i;
350
351  gcc_assert (shift != 0);
352  operands[2] = gen_reg_rtx (<MODE>mode);
353  operands[3] = gen_rtx_CONST_VECTOR (<MODE>mode, rtvec_alloc (nunits));
354  operands[4] = gen_reg_rtx (<MODE>mode);
355
356  if (shift < 0)
357    {
358      operands[5] = CONSTM1_RTX (<MODE>mode);
359      operands[6] = GEN_INT (-shift);
360    }
361  else
362    {
363      operands[5] = CONST0_RTX (<MODE>mode);
364      operands[6] = GEN_INT (shift);
365    }
366
367  /* Populate the constant vectors.  */
368  for (i = 0; i < nunits; i++)
369    XVECEXP (operands[3], 0, i) = rtx_val;
370})
371
372(define_insn "get_vrsave_internal"
373  [(set (match_operand:SI 0 "register_operand" "=r")
374	(unspec:SI [(reg:SI 109)] UNSPEC_GET_VRSAVE))]
375  "TARGET_ALTIVEC"
376{
377  if (TARGET_MACHO)
378     return "mfspr %0,256";
379  else
380     return "mfvrsave %0";
381}
382  [(set_attr "type" "*")])
383
384(define_insn "*set_vrsave_internal"
385  [(match_parallel 0 "vrsave_operation"
386     [(set (reg:SI 109)
387	   (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "r")
388				(reg:SI 109)] UNSPECV_SET_VRSAVE))])]
389  "TARGET_ALTIVEC"
390{
391  if (TARGET_MACHO)
392    return "mtspr 256,%1";
393  else
394    return "mtvrsave %1";
395}
396  [(set_attr "type" "*")])
397
398(define_insn "*save_world"
399 [(match_parallel 0 "save_world_operation"
400                  [(clobber (reg:SI 65))
401                   (use (match_operand:SI 1 "call_operand" "s"))])]
402 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
403 "bl %z1"
404  [(set_attr "type" "branch")
405   (set_attr "length" "4")])
406
407(define_insn "*restore_world"
408 [(match_parallel 0 "restore_world_operation"
409                  [(return)
410		   (use (reg:SI 65))
411                   (use (match_operand:SI 1 "call_operand" "s"))
412                   (clobber (match_operand:SI 2 "gpc_reg_operand" "=r"))])]
413 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
414 "b %z1")
415
416;; The save_vregs and restore_vregs patterns don't use memory_operand
417;; because (plus (reg) (const_int)) is not a valid vector address.
418;; This way is more compact than describing exactly what happens in
419;; the out-of-line functions, ie. loading the constant into r11/r12
420;; then using indexed addressing, and requires less editing of rtl
421;; to describe the operation to dwarf2out_frame_debug_expr.
422(define_insn "*save_vregs_<mode>_r11"
423  [(match_parallel 0 "any_parallel_operand"
424     [(clobber (reg:P 65))
425      (use (match_operand:P 1 "symbol_ref_operand" "s"))
426      (clobber (reg:P 11))
427      (use (reg:P 0))
428      (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
429			     (match_operand:P 3 "short_cint_operand" "I")))
430	   (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
431  "TARGET_ALTIVEC"
432  "bl %1"
433  [(set_attr "type" "branch")
434   (set_attr "length" "4")])
435
436(define_insn "*save_vregs_<mode>_r12"
437  [(match_parallel 0 "any_parallel_operand"
438     [(clobber (reg:P 65))
439      (use (match_operand:P 1 "symbol_ref_operand" "s"))
440      (clobber (reg:P 12))
441      (use (reg:P 0))
442      (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
443			     (match_operand:P 3 "short_cint_operand" "I")))
444	   (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
445  "TARGET_ALTIVEC"
446  "bl %1"
447  [(set_attr "type" "branch")
448   (set_attr "length" "4")])
449
450(define_insn "*restore_vregs_<mode>_r11"
451  [(match_parallel 0 "any_parallel_operand"
452     [(clobber (reg:P 65))
453      (use (match_operand:P 1 "symbol_ref_operand" "s"))
454      (clobber (reg:P 11))
455      (use (reg:P 0))
456      (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
457	   (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
458			     (match_operand:P 4 "short_cint_operand" "I"))))])]
459  "TARGET_ALTIVEC"
460  "bl %1"
461  [(set_attr "type" "branch")
462   (set_attr "length" "4")])
463
464(define_insn "*restore_vregs_<mode>_r12"
465  [(match_parallel 0 "any_parallel_operand"
466     [(clobber (reg:P 65))
467      (use (match_operand:P 1 "symbol_ref_operand" "s"))
468      (clobber (reg:P 12))
469      (use (reg:P 0))
470      (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
471	   (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
472			     (match_operand:P 4 "short_cint_operand" "I"))))])]
473  "TARGET_ALTIVEC"
474  "bl %1"
475  [(set_attr "type" "branch")
476   (set_attr "length" "4")])
477
478;; Simple binary operations.
479
480;; add
481(define_insn "add<mode>3"
482  [(set (match_operand:VI2 0 "register_operand" "=v")
483        (plus:VI2 (match_operand:VI2 1 "register_operand" "v")
484		  (match_operand:VI2 2 "register_operand" "v")))]
485  "<VI_unit>"
486  "vaddu<VI_char>m %0,%1,%2"
487  [(set_attr "type" "vecsimple")])
488
489(define_insn "*altivec_addv4sf3"
490  [(set (match_operand:V4SF 0 "register_operand" "=v")
491        (plus:V4SF (match_operand:V4SF 1 "register_operand" "v")
492		   (match_operand:V4SF 2 "register_operand" "v")))]
493  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
494  "vaddfp %0,%1,%2"
495  [(set_attr "type" "vecfloat")])
496
497(define_insn "altivec_vaddcuw"
498  [(set (match_operand:V4SI 0 "register_operand" "=v")
499        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
500                      (match_operand:V4SI 2 "register_operand" "v")]
501		     UNSPEC_VADDCUW))]
502  "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
503  "vaddcuw %0,%1,%2"
504  [(set_attr "type" "vecsimple")])
505
506(define_insn "altivec_vaddu<VI_char>s"
507  [(set (match_operand:VI 0 "register_operand" "=v")
508        (unspec:VI [(match_operand:VI 1 "register_operand" "v")
509		    (match_operand:VI 2 "register_operand" "v")]
510		   UNSPEC_VADDU))
511   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
512  "<VI_unit>"
513  "vaddu<VI_char>s %0,%1,%2"
514  [(set_attr "type" "vecsimple")])
515
516(define_insn "altivec_vadds<VI_char>s"
517  [(set (match_operand:VI 0 "register_operand" "=v")
518        (unspec:VI [(match_operand:VI 1 "register_operand" "v")
519                    (match_operand:VI 2 "register_operand" "v")]
520		   UNSPEC_VADDS))
521   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
522  "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
523  "vadds<VI_char>s %0,%1,%2"
524  [(set_attr "type" "vecsimple")])
525
526;; sub
527(define_insn "sub<mode>3"
528  [(set (match_operand:VI2 0 "register_operand" "=v")
529        (minus:VI2 (match_operand:VI2 1 "register_operand" "v")
530		   (match_operand:VI2 2 "register_operand" "v")))]
531  "<VI_unit>"
532  "vsubu<VI_char>m %0,%1,%2"
533  [(set_attr "type" "vecsimple")])
534
535(define_insn "*altivec_subv4sf3"
536  [(set (match_operand:V4SF 0 "register_operand" "=v")
537        (minus:V4SF (match_operand:V4SF 1 "register_operand" "v")
538                    (match_operand:V4SF 2 "register_operand" "v")))]
539  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
540  "vsubfp %0,%1,%2"
541  [(set_attr "type" "vecfloat")])
542
543(define_insn "altivec_vsubcuw"
544  [(set (match_operand:V4SI 0 "register_operand" "=v")
545        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
546                      (match_operand:V4SI 2 "register_operand" "v")]
547		     UNSPEC_VSUBCUW))]
548  "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
549  "vsubcuw %0,%1,%2"
550  [(set_attr "type" "vecsimple")])
551
552(define_insn "altivec_vsubu<VI_char>s"
553  [(set (match_operand:VI 0 "register_operand" "=v")
554        (unspec:VI [(match_operand:VI 1 "register_operand" "v")
555                    (match_operand:VI 2 "register_operand" "v")]
556		   UNSPEC_VSUBU))
557   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
558  "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
559  "vsubu<VI_char>s %0,%1,%2"
560  [(set_attr "type" "vecsimple")])
561
562(define_insn "altivec_vsubs<VI_char>s"
563  [(set (match_operand:VI 0 "register_operand" "=v")
564        (unspec:VI [(match_operand:VI 1 "register_operand" "v")
565                    (match_operand:VI 2 "register_operand" "v")]
566		   UNSPEC_VSUBS))
567   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
568  "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
569  "vsubs<VI_char>s %0,%1,%2"
570  [(set_attr "type" "vecsimple")])
571
572;;
573(define_insn "altivec_vavgu<VI_char>"
574  [(set (match_operand:VI 0 "register_operand" "=v")
575        (unspec:VI [(match_operand:VI 1 "register_operand" "v")
576                    (match_operand:VI 2 "register_operand" "v")]
577		   UNSPEC_VAVGU))]
578  "TARGET_ALTIVEC"
579  "vavgu<VI_char> %0,%1,%2"
580  [(set_attr "type" "vecsimple")])
581
582(define_insn "altivec_vavgs<VI_char>"
583  [(set (match_operand:VI 0 "register_operand" "=v")
584        (unspec:VI [(match_operand:VI 1 "register_operand" "v")
585                    (match_operand:VI 2 "register_operand" "v")]
586		   UNSPEC_VAVGS))]
587  "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
588  "vavgs<VI_char> %0,%1,%2"
589  [(set_attr "type" "vecsimple")])
590
591(define_insn "altivec_vcmpbfp"
592  [(set (match_operand:V4SI 0 "register_operand" "=v")
593        (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
594                      (match_operand:V4SF 2 "register_operand" "v")]
595                      UNSPEC_VCMPBFP))]
596  "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
597  "vcmpbfp %0,%1,%2"
598  [(set_attr "type" "veccmp")])
599
600(define_insn "*altivec_eq<mode>"
601  [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
602	(eq:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
603		(match_operand:VI2 2 "altivec_register_operand" "v")))]
604  "<VI_unit>"
605  "vcmpequ<VI_char> %0,%1,%2"
606  [(set_attr "type" "veccmpfx")])
607
608(define_insn "*altivec_gt<mode>"
609  [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
610	(gt:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
611		(match_operand:VI2 2 "altivec_register_operand" "v")))]
612  "<VI_unit>"
613  "vcmpgts<VI_char> %0,%1,%2"
614  [(set_attr "type" "veccmpfx")])
615
616(define_insn "*altivec_gtu<mode>"
617  [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
618	(gtu:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
619		 (match_operand:VI2 2 "altivec_register_operand" "v")))]
620  "<VI_unit>"
621  "vcmpgtu<VI_char> %0,%1,%2"
622  [(set_attr "type" "veccmpfx")])
623
624(define_insn "*altivec_eqv4sf"
625  [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
626	(eq:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
627		 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
628  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
629  "vcmpeqfp %0,%1,%2"
630  [(set_attr "type" "veccmp")])
631
632(define_insn "*altivec_gtv4sf"
633  [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
634	(gt:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
635		 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
636  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
637  "vcmpgtfp %0,%1,%2"
638  [(set_attr "type" "veccmp")])
639
640(define_insn "*altivec_gev4sf"
641  [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
642	(ge:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
643		 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
644  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
645  "vcmpgefp %0,%1,%2"
646  [(set_attr "type" "veccmp")])
647
648(define_insn "*altivec_vsel<mode>"
649  [(set (match_operand:VM 0 "altivec_register_operand" "=v")
650	(if_then_else:VM
651	 (ne:CC (match_operand:VM 1 "altivec_register_operand" "v")
652		(match_operand:VM 4 "zero_constant" ""))
653	 (match_operand:VM 2 "altivec_register_operand" "v")
654	 (match_operand:VM 3 "altivec_register_operand" "v")))]
655  "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
656  "vsel %0,%3,%2,%1"
657  [(set_attr "type" "vecmove")])
658
659(define_insn "*altivec_vsel<mode>_uns"
660  [(set (match_operand:VM 0 "altivec_register_operand" "=v")
661	(if_then_else:VM
662	 (ne:CCUNS (match_operand:VM 1 "altivec_register_operand" "v")
663		   (match_operand:VM 4 "zero_constant" ""))
664	 (match_operand:VM 2 "altivec_register_operand" "v")
665	 (match_operand:VM 3 "altivec_register_operand" "v")))]
666  "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
667  "vsel %0,%3,%2,%1"
668  [(set_attr "type" "vecmove")])
669
670;; Fused multiply add.
671
672(define_insn "*altivec_fmav4sf4"
673  [(set (match_operand:V4SF 0 "register_operand" "=v")
674	(fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
675		  (match_operand:V4SF 2 "register_operand" "v")
676		  (match_operand:V4SF 3 "register_operand" "v")))]
677  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
678  "vmaddfp %0,%1,%2,%3"
679  [(set_attr "type" "vecfloat")])
680
681;; We do multiply as a fused multiply-add with an add of a -0.0 vector.
682
683(define_expand "altivec_mulv4sf3"
684  [(set (match_operand:V4SF 0 "register_operand" "")
685	(fma:V4SF (match_operand:V4SF 1 "register_operand" "")
686		  (match_operand:V4SF 2 "register_operand" "")
687		  (match_dup 3)))]
688  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
689{
690  rtx neg0;
691
692  /* Generate [-0.0, -0.0, -0.0, -0.0].  */
693  neg0 = gen_reg_rtx (V4SImode);
694  emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
695  emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
696
697  operands[3] = gen_lowpart (V4SFmode, neg0);
698})
699
700;; 32-bit integer multiplication
701;; A_high = Operand_0 & 0xFFFF0000 >> 16
702;; A_low = Operand_0 & 0xFFFF
703;; B_high = Operand_1 & 0xFFFF0000 >> 16
704;; B_low = Operand_1 & 0xFFFF
705;; result = A_low * B_low + (A_high * B_low + B_high * A_low) << 16
706
707;; (define_insn "mulv4si3"
708;;   [(set (match_operand:V4SI 0 "register_operand" "=v")
709;;         (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
710;;                    (match_operand:V4SI 2 "register_operand" "v")))]
711(define_insn "mulv4si3_p8"
712  [(set (match_operand:V4SI 0 "register_operand" "=v")
713        (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
714                   (match_operand:V4SI 2 "register_operand" "v")))]
715  "TARGET_P8_VECTOR"
716  "vmuluwm %0,%1,%2"
717  [(set_attr "type" "veccomplex")])
718
719(define_expand "mulv4si3"
720  [(use (match_operand:V4SI 0 "register_operand" ""))
721   (use (match_operand:V4SI 1 "register_operand" ""))
722   (use (match_operand:V4SI 2 "register_operand" ""))]
723   "TARGET_ALTIVEC"
724{
725  rtx zero;
726  rtx swap;
727  rtx small_swap;
728  rtx sixteen;
729  rtx one;
730  rtx two;
731  rtx low_product;
732  rtx high_product;
733
734  if (TARGET_P8_VECTOR)
735    {
736      emit_insn (gen_mulv4si3_p8 (operands[0], operands[1], operands[2]));
737      DONE;
738    }
739
740  zero = gen_reg_rtx (V4SImode);
741  emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
742
743  sixteen = gen_reg_rtx (V4SImode);
744  emit_insn (gen_altivec_vspltisw (sixteen,  gen_rtx_CONST_INT (V4SImode, -16)));
745
746  swap = gen_reg_rtx (V4SImode);
747  emit_insn (gen_vrotlv4si3 (swap, operands[2], sixteen));
748
749  one = gen_reg_rtx (V8HImode);
750  convert_move (one, operands[1], 0);
751
752  two = gen_reg_rtx (V8HImode);
753  convert_move (two, operands[2], 0);
754
755  small_swap = gen_reg_rtx (V8HImode);
756  convert_move (small_swap, swap, 0);
757
758  low_product = gen_reg_rtx (V4SImode);
759  emit_insn (gen_altivec_vmulouh (low_product, one, two));
760
761  high_product = gen_reg_rtx (V4SImode);
762  emit_insn (gen_altivec_vmsumuhm (high_product, one, small_swap, zero));
763
764  emit_insn (gen_vashlv4si3 (high_product, high_product, sixteen));
765
766  emit_insn (gen_addv4si3 (operands[0], high_product, low_product));
767
768  DONE;
769})
770
771(define_expand "mulv8hi3"
772  [(use (match_operand:V8HI 0 "register_operand" ""))
773   (use (match_operand:V8HI 1 "register_operand" ""))
774   (use (match_operand:V8HI 2 "register_operand" ""))]
775   "TARGET_ALTIVEC"
776{
777  rtx zero = gen_reg_rtx (V8HImode);
778
779  emit_insn (gen_altivec_vspltish (zero, const0_rtx));
780  emit_insn (gen_altivec_vmladduhm(operands[0], operands[1], operands[2], zero));
781
782  DONE;
783})
784
785;; Fused multiply subtract
786(define_insn "*altivec_vnmsubfp"
787  [(set (match_operand:V4SF 0 "register_operand" "=v")
788	(neg:V4SF
789	 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
790		   (match_operand:V4SF 2 "register_operand" "v")
791		   (neg:V4SF
792		    (match_operand:V4SF 3 "register_operand" "v")))))]
793  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
794  "vnmsubfp %0,%1,%2,%3"
795  [(set_attr "type" "vecfloat")])
796
797(define_insn "altivec_vmsumu<VI_char>m"
798  [(set (match_operand:V4SI 0 "register_operand" "=v")
799        (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
800		      (match_operand:VIshort 2 "register_operand" "v")
801                      (match_operand:V4SI 3 "register_operand" "v")]
802		     UNSPEC_VMSUMU))]
803  "TARGET_ALTIVEC"
804  "vmsumu<VI_char>m %0,%1,%2,%3"
805  [(set_attr "type" "veccomplex")])
806
807(define_insn "altivec_vmsumm<VI_char>m"
808  [(set (match_operand:V4SI 0 "register_operand" "=v")
809        (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
810		      (match_operand:VIshort 2 "register_operand" "v")
811                      (match_operand:V4SI 3 "register_operand" "v")]
812		     UNSPEC_VMSUMM))]
813  "TARGET_ALTIVEC"
814  "vmsumm<VI_char>m %0,%1,%2,%3"
815  [(set_attr "type" "veccomplex")])
816
817(define_insn "altivec_vmsumshm"
818  [(set (match_operand:V4SI 0 "register_operand" "=v")
819        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
820		      (match_operand:V8HI 2 "register_operand" "v")
821                      (match_operand:V4SI 3 "register_operand" "v")]
822		     UNSPEC_VMSUMSHM))]
823  "TARGET_ALTIVEC"
824  "vmsumshm %0,%1,%2,%3"
825  [(set_attr "type" "veccomplex")])
826
827(define_insn "altivec_vmsumuhs"
828  [(set (match_operand:V4SI 0 "register_operand" "=v")
829        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
830		      (match_operand:V8HI 2 "register_operand" "v")
831                      (match_operand:V4SI 3 "register_operand" "v")]
832		     UNSPEC_VMSUMUHS))
833   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
834  "TARGET_ALTIVEC"
835  "vmsumuhs %0,%1,%2,%3"
836  [(set_attr "type" "veccomplex")])
837
838(define_insn "altivec_vmsumshs"
839  [(set (match_operand:V4SI 0 "register_operand" "=v")
840        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
841		      (match_operand:V8HI 2 "register_operand" "v")
842                      (match_operand:V4SI 3 "register_operand" "v")]
843		     UNSPEC_VMSUMSHS))
844   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
845  "TARGET_ALTIVEC"
846  "vmsumshs %0,%1,%2,%3"
847  [(set_attr "type" "veccomplex")])
848
849;; max
850
851(define_insn "umax<mode>3"
852  [(set (match_operand:VI2 0 "register_operand" "=v")
853        (umax:VI2 (match_operand:VI2 1 "register_operand" "v")
854		  (match_operand:VI2 2 "register_operand" "v")))]
855  "<VI_unit>"
856  "vmaxu<VI_char> %0,%1,%2"
857  [(set_attr "type" "vecsimple")])
858
859(define_insn "smax<mode>3"
860  [(set (match_operand:VI2 0 "register_operand" "=v")
861        (smax:VI2 (match_operand:VI2 1 "register_operand" "v")
862		  (match_operand:VI2 2 "register_operand" "v")))]
863  "<VI_unit>"
864  "vmaxs<VI_char> %0,%1,%2"
865  [(set_attr "type" "vecsimple")])
866
867(define_insn "*altivec_smaxv4sf3"
868  [(set (match_operand:V4SF 0 "register_operand" "=v")
869        (smax:V4SF (match_operand:V4SF 1 "register_operand" "v")
870                   (match_operand:V4SF 2 "register_operand" "v")))]
871  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
872  "vmaxfp %0,%1,%2"
873  [(set_attr "type" "veccmp")])
874
875(define_insn "umin<mode>3"
876  [(set (match_operand:VI2 0 "register_operand" "=v")
877        (umin:VI2 (match_operand:VI2 1 "register_operand" "v")
878		  (match_operand:VI2 2 "register_operand" "v")))]
879  "<VI_unit>"
880  "vminu<VI_char> %0,%1,%2"
881  [(set_attr "type" "vecsimple")])
882
883(define_insn "smin<mode>3"
884  [(set (match_operand:VI2 0 "register_operand" "=v")
885        (smin:VI2 (match_operand:VI2 1 "register_operand" "v")
886		  (match_operand:VI2 2 "register_operand" "v")))]
887  "<VI_unit>"
888  "vmins<VI_char> %0,%1,%2"
889  [(set_attr "type" "vecsimple")])
890
891(define_insn "*altivec_sminv4sf3"
892  [(set (match_operand:V4SF 0 "register_operand" "=v")
893        (smin:V4SF (match_operand:V4SF 1 "register_operand" "v")
894                   (match_operand:V4SF 2 "register_operand" "v")))]
895  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
896  "vminfp %0,%1,%2"
897  [(set_attr "type" "veccmp")])
898
899(define_insn "altivec_vmhaddshs"
900  [(set (match_operand:V8HI 0 "register_operand" "=v")
901        (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
902		      (match_operand:V8HI 2 "register_operand" "v")
903                      (match_operand:V8HI 3 "register_operand" "v")]
904		     UNSPEC_VMHADDSHS))
905   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
906  "TARGET_ALTIVEC"
907  "vmhaddshs %0,%1,%2,%3"
908  [(set_attr "type" "veccomplex")])
909
910(define_insn "altivec_vmhraddshs"
911  [(set (match_operand:V8HI 0 "register_operand" "=v")
912        (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
913		      (match_operand:V8HI 2 "register_operand" "v")
914                      (match_operand:V8HI 3 "register_operand" "v")]
915		     UNSPEC_VMHRADDSHS))
916   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
917  "TARGET_ALTIVEC"
918  "vmhraddshs %0,%1,%2,%3"
919  [(set_attr "type" "veccomplex")])
920
921(define_insn "altivec_vmladduhm"
922  [(set (match_operand:V8HI 0 "register_operand" "=v")
923        (plus:V8HI (mult:V8HI (match_operand:V8HI 1 "register_operand" "v")
924		   	      (match_operand:V8HI 2 "register_operand" "v"))
925		   (match_operand:V8HI 3 "register_operand" "v")))]
926  "TARGET_ALTIVEC"
927  "vmladduhm %0,%1,%2,%3"
928  [(set_attr "type" "veccomplex")])
929
930(define_expand "altivec_vmrghb"
931  [(use (match_operand:V16QI 0 "register_operand" ""))
932   (use (match_operand:V16QI 1 "register_operand" ""))
933   (use (match_operand:V16QI 2 "register_operand" ""))]
934  "TARGET_ALTIVEC"
935{
936  rtvec v;
937  rtx x;
938
939  /* Special handling for LE with -maltivec=be.  */
940  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
941    {
942      v = gen_rtvec (16, GEN_INT (8), GEN_INT (24), GEN_INT (9), GEN_INT (25),
943                     GEN_INT (10), GEN_INT (26), GEN_INT (11), GEN_INT (27),
944		     GEN_INT (12), GEN_INT (28), GEN_INT (13), GEN_INT (29),
945		     GEN_INT (14), GEN_INT (30), GEN_INT (15), GEN_INT (31));
946      x = gen_rtx_VEC_CONCAT (V32QImode, operands[2], operands[1]);
947    }
948  else
949    {
950      v = gen_rtvec (16, GEN_INT (0), GEN_INT (16), GEN_INT (1), GEN_INT (17),
951                     GEN_INT (2), GEN_INT (18), GEN_INT (3), GEN_INT (19),
952		     GEN_INT (4), GEN_INT (20), GEN_INT (5), GEN_INT (21),
953		     GEN_INT (6), GEN_INT (22), GEN_INT (7), GEN_INT (23));
954      x = gen_rtx_VEC_CONCAT (V32QImode, operands[1], operands[2]);
955    }
956
957  x = gen_rtx_VEC_SELECT (V16QImode, x, gen_rtx_PARALLEL (VOIDmode, v));
958  emit_insn (gen_rtx_SET (operands[0], x));
959  DONE;
960})
961
962(define_insn "*altivec_vmrghb_internal"
963  [(set (match_operand:V16QI 0 "register_operand" "=v")
964        (vec_select:V16QI
965	  (vec_concat:V32QI
966	    (match_operand:V16QI 1 "register_operand" "v")
967	    (match_operand:V16QI 2 "register_operand" "v"))
968	  (parallel [(const_int 0) (const_int 16)
969		     (const_int 1) (const_int 17)
970		     (const_int 2) (const_int 18)
971		     (const_int 3) (const_int 19)
972		     (const_int 4) (const_int 20)
973		     (const_int 5) (const_int 21)
974		     (const_int 6) (const_int 22)
975		     (const_int 7) (const_int 23)])))]
976  "TARGET_ALTIVEC"
977{
978  if (BYTES_BIG_ENDIAN)
979    return "vmrghb %0,%1,%2";
980  else
981    return "vmrglb %0,%2,%1";
982}
983  [(set_attr "type" "vecperm")])
984
985(define_insn "altivec_vmrghb_direct"
986  [(set (match_operand:V16QI 0 "register_operand" "=v")
987        (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
988                       (match_operand:V16QI 2 "register_operand" "v")]
989		      UNSPEC_VMRGH_DIRECT))]
990  "TARGET_ALTIVEC"
991  "vmrghb %0,%1,%2"
992  [(set_attr "type" "vecperm")])
993
994(define_expand "altivec_vmrghh"
995  [(use (match_operand:V8HI 0 "register_operand" ""))
996   (use (match_operand:V8HI 1 "register_operand" ""))
997   (use (match_operand:V8HI 2 "register_operand" ""))]
998  "TARGET_ALTIVEC"
999{
1000  rtvec v;
1001  rtx x;
1002
1003  /* Special handling for LE with -maltivec=be.  */
1004  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1005    {
1006      v = gen_rtvec (8, GEN_INT (4), GEN_INT (12), GEN_INT (5), GEN_INT (13),
1007                     GEN_INT (6), GEN_INT (14), GEN_INT (7), GEN_INT (15));
1008      x = gen_rtx_VEC_CONCAT (V16HImode, operands[2], operands[1]);
1009    }
1010  else
1011    {
1012      v = gen_rtvec (8, GEN_INT (0), GEN_INT (8), GEN_INT (1), GEN_INT (9),
1013                     GEN_INT (2), GEN_INT (10), GEN_INT (3), GEN_INT (11));
1014      x = gen_rtx_VEC_CONCAT (V16HImode, operands[1], operands[2]);
1015    }
1016
1017  x = gen_rtx_VEC_SELECT (V8HImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1018  emit_insn (gen_rtx_SET (operands[0], x));
1019  DONE;
1020})
1021
1022(define_insn "*altivec_vmrghh_internal"
1023  [(set (match_operand:V8HI 0 "register_operand" "=v")
1024        (vec_select:V8HI
1025	  (vec_concat:V16HI
1026	    (match_operand:V8HI 1 "register_operand" "v")
1027	    (match_operand:V8HI 2 "register_operand" "v"))
1028	  (parallel [(const_int 0) (const_int 8)
1029		     (const_int 1) (const_int 9)
1030		     (const_int 2) (const_int 10)
1031		     (const_int 3) (const_int 11)])))]
1032  "TARGET_ALTIVEC"
1033{
1034  if (BYTES_BIG_ENDIAN)
1035    return "vmrghh %0,%1,%2";
1036  else
1037    return "vmrglh %0,%2,%1";
1038}
1039  [(set_attr "type" "vecperm")])
1040
1041(define_insn "altivec_vmrghh_direct"
1042  [(set (match_operand:V8HI 0 "register_operand" "=v")
1043        (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1044                      (match_operand:V8HI 2 "register_operand" "v")]
1045                     UNSPEC_VMRGH_DIRECT))]
1046  "TARGET_ALTIVEC"
1047  "vmrghh %0,%1,%2"
1048  [(set_attr "type" "vecperm")])
1049
1050(define_expand "altivec_vmrghw"
1051  [(use (match_operand:V4SI 0 "register_operand" ""))
1052   (use (match_operand:V4SI 1 "register_operand" ""))
1053   (use (match_operand:V4SI 2 "register_operand" ""))]
1054  "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1055{
1056  rtvec v;
1057  rtx x;
1058
1059  /* Special handling for LE with -maltivec=be.  */
1060  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1061    {
1062      v = gen_rtvec (4, GEN_INT (2), GEN_INT (6), GEN_INT (3), GEN_INT (7));
1063      x = gen_rtx_VEC_CONCAT (V8SImode, operands[2], operands[1]);
1064    }
1065  else
1066    {
1067      v = gen_rtvec (4, GEN_INT (0), GEN_INT (4), GEN_INT (1), GEN_INT (5));
1068      x = gen_rtx_VEC_CONCAT (V8SImode, operands[1], operands[2]);
1069    }
1070
1071  x = gen_rtx_VEC_SELECT (V4SImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1072  emit_insn (gen_rtx_SET (operands[0], x));
1073  DONE;
1074})
1075
1076(define_insn "*altivec_vmrghw_internal"
1077  [(set (match_operand:V4SI 0 "register_operand" "=v")
1078        (vec_select:V4SI
1079	  (vec_concat:V8SI
1080	    (match_operand:V4SI 1 "register_operand" "v")
1081	    (match_operand:V4SI 2 "register_operand" "v"))
1082	  (parallel [(const_int 0) (const_int 4)
1083		     (const_int 1) (const_int 5)])))]
1084  "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1085{
1086  if (BYTES_BIG_ENDIAN)
1087    return "vmrghw %0,%1,%2";
1088  else
1089    return "vmrglw %0,%2,%1";
1090}
1091  [(set_attr "type" "vecperm")])
1092
1093(define_insn "altivec_vmrghw_direct"
1094  [(set (match_operand:V4SI 0 "register_operand" "=v")
1095        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1096                      (match_operand:V4SI 2 "register_operand" "v")]
1097                     UNSPEC_VMRGH_DIRECT))]
1098  "TARGET_ALTIVEC"
1099  "vmrghw %0,%1,%2"
1100  [(set_attr "type" "vecperm")])
1101
1102(define_insn "*altivec_vmrghsf"
1103  [(set (match_operand:V4SF 0 "register_operand" "=v")
1104        (vec_select:V4SF
1105	  (vec_concat:V8SF
1106	    (match_operand:V4SF 1 "register_operand" "v")
1107	    (match_operand:V4SF 2 "register_operand" "v"))
1108	  (parallel [(const_int 0) (const_int 4)
1109		     (const_int 1) (const_int 5)])))]
1110  "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1111{
1112  if (BYTES_BIG_ENDIAN)
1113    return "vmrghw %0,%1,%2";
1114  else
1115    return "vmrglw %0,%2,%1";
1116}
1117  [(set_attr "type" "vecperm")])
1118
1119(define_expand "altivec_vmrglb"
1120  [(use (match_operand:V16QI 0 "register_operand" ""))
1121   (use (match_operand:V16QI 1 "register_operand" ""))
1122   (use (match_operand:V16QI 2 "register_operand" ""))]
1123  "TARGET_ALTIVEC"
1124{
1125  rtvec v;
1126  rtx x;
1127
1128  /* Special handling for LE with -maltivec=be.  */
1129  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1130    {
1131      v = gen_rtvec (16, GEN_INT (0), GEN_INT (16), GEN_INT (1), GEN_INT (17),
1132                     GEN_INT (2), GEN_INT (18), GEN_INT (3), GEN_INT (19),
1133		     GEN_INT (4), GEN_INT (20), GEN_INT (5), GEN_INT (21),
1134		     GEN_INT (6), GEN_INT (22), GEN_INT (7), GEN_INT (23));
1135      x = gen_rtx_VEC_CONCAT (V32QImode, operands[2], operands[1]);
1136    }
1137  else
1138    {
1139      v = gen_rtvec (16, GEN_INT (8), GEN_INT (24), GEN_INT (9), GEN_INT (25),
1140                     GEN_INT (10), GEN_INT (26), GEN_INT (11), GEN_INT (27),
1141		     GEN_INT (12), GEN_INT (28), GEN_INT (13), GEN_INT (29),
1142		     GEN_INT (14), GEN_INT (30), GEN_INT (15), GEN_INT (31));
1143      x = gen_rtx_VEC_CONCAT (V32QImode, operands[1], operands[2]);
1144    }
1145
1146  x = gen_rtx_VEC_SELECT (V16QImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1147  emit_insn (gen_rtx_SET (operands[0], x));
1148  DONE;
1149})
1150
1151(define_insn "*altivec_vmrglb_internal"
1152  [(set (match_operand:V16QI 0 "register_operand" "=v")
1153        (vec_select:V16QI
1154	  (vec_concat:V32QI
1155	    (match_operand:V16QI 1 "register_operand" "v")
1156	    (match_operand:V16QI 2 "register_operand" "v"))
1157	  (parallel [(const_int  8) (const_int 24)
1158		     (const_int  9) (const_int 25)
1159		     (const_int 10) (const_int 26)
1160		     (const_int 11) (const_int 27)
1161		     (const_int 12) (const_int 28)
1162		     (const_int 13) (const_int 29)
1163		     (const_int 14) (const_int 30)
1164		     (const_int 15) (const_int 31)])))]
1165  "TARGET_ALTIVEC"
1166{
1167  if (BYTES_BIG_ENDIAN)
1168    return "vmrglb %0,%1,%2";
1169  else
1170    return "vmrghb %0,%2,%1";
1171}
1172  [(set_attr "type" "vecperm")])
1173
1174(define_insn "altivec_vmrglb_direct"
1175  [(set (match_operand:V16QI 0 "register_operand" "=v")
1176        (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1177    		       (match_operand:V16QI 2 "register_operand" "v")]
1178                      UNSPEC_VMRGL_DIRECT))]
1179  "TARGET_ALTIVEC"
1180  "vmrglb %0,%1,%2"
1181  [(set_attr "type" "vecperm")])
1182
1183(define_expand "altivec_vmrglh"
1184  [(use (match_operand:V8HI 0 "register_operand" ""))
1185   (use (match_operand:V8HI 1 "register_operand" ""))
1186   (use (match_operand:V8HI 2 "register_operand" ""))]
1187  "TARGET_ALTIVEC"
1188{
1189  rtvec v;
1190  rtx x;
1191
1192  /* Special handling for LE with -maltivec=be.  */
1193  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1194    {
1195      v = gen_rtvec (8, GEN_INT (0), GEN_INT (8), GEN_INT (1), GEN_INT (9),
1196                     GEN_INT (2), GEN_INT (10), GEN_INT (3), GEN_INT (11));
1197      x = gen_rtx_VEC_CONCAT (V16HImode, operands[2], operands[1]);
1198    }
1199  else
1200    {
1201      v = gen_rtvec (8, GEN_INT (4), GEN_INT (12), GEN_INT (5), GEN_INT (13),
1202                     GEN_INT (6), GEN_INT (14), GEN_INT (7), GEN_INT (15));
1203      x = gen_rtx_VEC_CONCAT (V16HImode, operands[1], operands[2]);
1204    }
1205
1206  x = gen_rtx_VEC_SELECT (V8HImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1207  emit_insn (gen_rtx_SET (operands[0], x));
1208  DONE;
1209})
1210
1211(define_insn "*altivec_vmrglh_internal"
1212  [(set (match_operand:V8HI 0 "register_operand" "=v")
1213        (vec_select:V8HI
1214	  (vec_concat:V16HI
1215	    (match_operand:V8HI 1 "register_operand" "v")
1216	    (match_operand:V8HI 2 "register_operand" "v"))
1217	  (parallel [(const_int 4) (const_int 12)
1218		     (const_int 5) (const_int 13)
1219		     (const_int 6) (const_int 14)
1220		     (const_int 7) (const_int 15)])))]
1221  "TARGET_ALTIVEC"
1222{
1223  if (BYTES_BIG_ENDIAN)
1224    return "vmrglh %0,%1,%2";
1225  else
1226    return "vmrghh %0,%2,%1";
1227}
1228  [(set_attr "type" "vecperm")])
1229
1230(define_insn "altivec_vmrglh_direct"
1231  [(set (match_operand:V8HI 0 "register_operand" "=v")
1232        (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1233		      (match_operand:V8HI 2 "register_operand" "v")]
1234                     UNSPEC_VMRGL_DIRECT))]
1235  "TARGET_ALTIVEC"
1236  "vmrglh %0,%1,%2"
1237  [(set_attr "type" "vecperm")])
1238
1239(define_expand "altivec_vmrglw"
1240  [(use (match_operand:V4SI 0 "register_operand" ""))
1241   (use (match_operand:V4SI 1 "register_operand" ""))
1242   (use (match_operand:V4SI 2 "register_operand" ""))]
1243  "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1244{
1245  rtvec v;
1246  rtx x;
1247
1248  /* Special handling for LE with -maltivec=be.  */
1249  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1250    {
1251      v = gen_rtvec (4, GEN_INT (0), GEN_INT (4), GEN_INT (1), GEN_INT (5));
1252      x = gen_rtx_VEC_CONCAT (V8SImode, operands[2], operands[1]);
1253    }
1254  else
1255    {
1256      v = gen_rtvec (4, GEN_INT (2), GEN_INT (6), GEN_INT (3), GEN_INT (7));
1257      x = gen_rtx_VEC_CONCAT (V8SImode, operands[1], operands[2]);
1258    }
1259
1260  x = gen_rtx_VEC_SELECT (V4SImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1261  emit_insn (gen_rtx_SET (operands[0], x));
1262  DONE;
1263})
1264
1265(define_insn "*altivec_vmrglw_internal"
1266  [(set (match_operand:V4SI 0 "register_operand" "=v")
1267        (vec_select:V4SI
1268	  (vec_concat:V8SI
1269	    (match_operand:V4SI 1 "register_operand" "v")
1270	    (match_operand:V4SI 2 "register_operand" "v"))
1271	  (parallel [(const_int 2) (const_int 6)
1272		     (const_int 3) (const_int 7)])))]
1273  "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1274{
1275  if (BYTES_BIG_ENDIAN)
1276    return "vmrglw %0,%1,%2";
1277  else
1278    return "vmrghw %0,%2,%1";
1279}
1280  [(set_attr "type" "vecperm")])
1281
1282(define_insn "altivec_vmrglw_direct"
1283  [(set (match_operand:V4SI 0 "register_operand" "=v")
1284        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1285	              (match_operand:V4SI 2 "register_operand" "v")]
1286                     UNSPEC_VMRGL_DIRECT))]
1287  "TARGET_ALTIVEC"
1288  "vmrglw %0,%1,%2"
1289  [(set_attr "type" "vecperm")])
1290
1291(define_insn "*altivec_vmrglsf"
1292  [(set (match_operand:V4SF 0 "register_operand" "=v")
1293        (vec_select:V4SF
1294	 (vec_concat:V8SF
1295	   (match_operand:V4SF 1 "register_operand" "v")
1296	   (match_operand:V4SF 2 "register_operand" "v"))
1297	 (parallel [(const_int 2) (const_int 6)
1298		    (const_int 3) (const_int 7)])))]
1299  "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1300{
1301  if (BYTES_BIG_ENDIAN)
1302    return "vmrglw %0,%1,%2";
1303  else
1304    return "vmrghw %0,%2,%1";
1305}
1306  [(set_attr "type" "vecperm")])
1307
1308;; Power8 vector merge even/odd
1309(define_insn "p8_vmrgew"
1310  [(set (match_operand:V4SI 0 "register_operand" "=v")
1311	(vec_select:V4SI
1312	  (vec_concat:V8SI
1313	    (match_operand:V4SI 1 "register_operand" "v")
1314	    (match_operand:V4SI 2 "register_operand" "v"))
1315	  (parallel [(const_int 0) (const_int 4)
1316		     (const_int 2) (const_int 6)])))]
1317  "TARGET_P8_VECTOR"
1318{
1319  if (BYTES_BIG_ENDIAN)
1320    return "vmrgew %0,%1,%2";
1321  else
1322    return "vmrgow %0,%2,%1";
1323}
1324  [(set_attr "type" "vecperm")])
1325
1326(define_insn "p8_vmrgow"
1327  [(set (match_operand:V4SI 0 "register_operand" "=v")
1328	(vec_select:V4SI
1329	  (vec_concat:V8SI
1330	    (match_operand:V4SI 1 "register_operand" "v")
1331	    (match_operand:V4SI 2 "register_operand" "v"))
1332	  (parallel [(const_int 1) (const_int 5)
1333		     (const_int 3) (const_int 7)])))]
1334  "TARGET_P8_VECTOR"
1335{
1336  if (BYTES_BIG_ENDIAN)
1337    return "vmrgow %0,%1,%2";
1338  else
1339    return "vmrgew %0,%2,%1";
1340}
1341  [(set_attr "type" "vecperm")])
1342
1343(define_expand "vec_widen_umult_even_v16qi"
1344  [(use (match_operand:V8HI 0 "register_operand" ""))
1345   (use (match_operand:V16QI 1 "register_operand" ""))
1346   (use (match_operand:V16QI 2 "register_operand" ""))]
1347  "TARGET_ALTIVEC"
1348{
1349  if (VECTOR_ELT_ORDER_BIG)
1350    emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1351  else
1352    emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1353  DONE;
1354})
1355
1356(define_expand "vec_widen_smult_even_v16qi"
1357  [(use (match_operand:V8HI 0 "register_operand" ""))
1358   (use (match_operand:V16QI 1 "register_operand" ""))
1359   (use (match_operand:V16QI 2 "register_operand" ""))]
1360  "TARGET_ALTIVEC"
1361{
1362  if (VECTOR_ELT_ORDER_BIG)
1363    emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1364  else
1365    emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1366  DONE;
1367})
1368
1369(define_expand "vec_widen_umult_even_v8hi"
1370  [(use (match_operand:V4SI 0 "register_operand" ""))
1371   (use (match_operand:V8HI 1 "register_operand" ""))
1372   (use (match_operand:V8HI 2 "register_operand" ""))]
1373  "TARGET_ALTIVEC"
1374{
1375  if (VECTOR_ELT_ORDER_BIG)
1376    emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1377  else
1378    emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1379  DONE;
1380})
1381
1382(define_expand "vec_widen_smult_even_v8hi"
1383  [(use (match_operand:V4SI 0 "register_operand" ""))
1384   (use (match_operand:V8HI 1 "register_operand" ""))
1385   (use (match_operand:V8HI 2 "register_operand" ""))]
1386  "TARGET_ALTIVEC"
1387{
1388  if (VECTOR_ELT_ORDER_BIG)
1389    emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1390  else
1391    emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1392  DONE;
1393})
1394
1395(define_expand "vec_widen_umult_odd_v16qi"
1396  [(use (match_operand:V8HI 0 "register_operand" ""))
1397   (use (match_operand:V16QI 1 "register_operand" ""))
1398   (use (match_operand:V16QI 2 "register_operand" ""))]
1399  "TARGET_ALTIVEC"
1400{
1401  if (VECTOR_ELT_ORDER_BIG)
1402    emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1403  else
1404    emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1405  DONE;
1406})
1407
1408(define_expand "vec_widen_smult_odd_v16qi"
1409  [(use (match_operand:V8HI 0 "register_operand" ""))
1410   (use (match_operand:V16QI 1 "register_operand" ""))
1411   (use (match_operand:V16QI 2 "register_operand" ""))]
1412  "TARGET_ALTIVEC"
1413{
1414  if (VECTOR_ELT_ORDER_BIG)
1415    emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1416  else
1417    emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1418  DONE;
1419})
1420
1421(define_expand "vec_widen_umult_odd_v8hi"
1422  [(use (match_operand:V4SI 0 "register_operand" ""))
1423   (use (match_operand:V8HI 1 "register_operand" ""))
1424   (use (match_operand:V8HI 2 "register_operand" ""))]
1425  "TARGET_ALTIVEC"
1426{
1427  if (VECTOR_ELT_ORDER_BIG)
1428    emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1429  else
1430    emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1431  DONE;
1432})
1433
1434(define_expand "vec_widen_smult_odd_v8hi"
1435  [(use (match_operand:V4SI 0 "register_operand" ""))
1436   (use (match_operand:V8HI 1 "register_operand" ""))
1437   (use (match_operand:V8HI 2 "register_operand" ""))]
1438  "TARGET_ALTIVEC"
1439{
1440  if (VECTOR_ELT_ORDER_BIG)
1441    emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1442  else
1443    emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1444  DONE;
1445})
1446
1447(define_insn "altivec_vmuleub"
1448  [(set (match_operand:V8HI 0 "register_operand" "=v")
1449        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1450                      (match_operand:V16QI 2 "register_operand" "v")]
1451		     UNSPEC_VMULEUB))]
1452  "TARGET_ALTIVEC"
1453  "vmuleub %0,%1,%2"
1454  [(set_attr "type" "veccomplex")])
1455
1456(define_insn "altivec_vmuloub"
1457  [(set (match_operand:V8HI 0 "register_operand" "=v")
1458        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1459                      (match_operand:V16QI 2 "register_operand" "v")]
1460		     UNSPEC_VMULOUB))]
1461  "TARGET_ALTIVEC"
1462  "vmuloub %0,%1,%2"
1463  [(set_attr "type" "veccomplex")])
1464
1465(define_insn "altivec_vmulesb"
1466  [(set (match_operand:V8HI 0 "register_operand" "=v")
1467        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1468                      (match_operand:V16QI 2 "register_operand" "v")]
1469		     UNSPEC_VMULESB))]
1470  "TARGET_ALTIVEC"
1471  "vmulesb %0,%1,%2"
1472  [(set_attr "type" "veccomplex")])
1473
1474(define_insn "altivec_vmulosb"
1475  [(set (match_operand:V8HI 0 "register_operand" "=v")
1476        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1477                      (match_operand:V16QI 2 "register_operand" "v")]
1478		     UNSPEC_VMULOSB))]
1479  "TARGET_ALTIVEC"
1480  "vmulosb %0,%1,%2"
1481  [(set_attr "type" "veccomplex")])
1482
1483(define_insn "altivec_vmuleuh"
1484  [(set (match_operand:V4SI 0 "register_operand" "=v")
1485        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1486                      (match_operand:V8HI 2 "register_operand" "v")]
1487		     UNSPEC_VMULEUH))]
1488  "TARGET_ALTIVEC"
1489  "vmuleuh %0,%1,%2"
1490  [(set_attr "type" "veccomplex")])
1491
1492(define_insn "altivec_vmulouh"
1493  [(set (match_operand:V4SI 0 "register_operand" "=v")
1494        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1495                      (match_operand:V8HI 2 "register_operand" "v")]
1496		     UNSPEC_VMULOUH))]
1497  "TARGET_ALTIVEC"
1498  "vmulouh %0,%1,%2"
1499  [(set_attr "type" "veccomplex")])
1500
1501(define_insn "altivec_vmulesh"
1502  [(set (match_operand:V4SI 0 "register_operand" "=v")
1503        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1504                      (match_operand:V8HI 2 "register_operand" "v")]
1505		     UNSPEC_VMULESH))]
1506  "TARGET_ALTIVEC"
1507  "vmulesh %0,%1,%2"
1508  [(set_attr "type" "veccomplex")])
1509
1510(define_insn "altivec_vmulosh"
1511  [(set (match_operand:V4SI 0 "register_operand" "=v")
1512        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1513                      (match_operand:V8HI 2 "register_operand" "v")]
1514		     UNSPEC_VMULOSH))]
1515  "TARGET_ALTIVEC"
1516  "vmulosh %0,%1,%2"
1517  [(set_attr "type" "veccomplex")])
1518
1519
1520;; Vector pack/unpack
1521(define_insn "altivec_vpkpx"
1522  [(set (match_operand:V8HI 0 "register_operand" "=v")
1523        (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v")
1524                      (match_operand:V4SI 2 "register_operand" "v")]
1525		     UNSPEC_VPKPX))]
1526  "TARGET_ALTIVEC"
1527  "*
1528  {
1529    if (VECTOR_ELT_ORDER_BIG)
1530      return \"vpkpx %0,%1,%2\";
1531    else
1532      return \"vpkpx %0,%2,%1\";
1533  }"
1534  [(set_attr "type" "vecperm")])
1535
1536(define_insn "altivec_vpks<VI_char>ss"
1537  [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1538	(unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1539			    (match_operand:VP 2 "register_operand" "v")]
1540			   UNSPEC_VPACK_SIGN_SIGN_SAT))]
1541  "<VI_unit>"
1542  "*
1543  {
1544    if (VECTOR_ELT_ORDER_BIG)
1545      return \"vpks<VI_char>ss %0,%1,%2\";
1546    else
1547      return \"vpks<VI_char>ss %0,%2,%1\";
1548  }"
1549  [(set_attr "type" "vecperm")])
1550
1551(define_insn "altivec_vpks<VI_char>us"
1552  [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1553	(unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1554			    (match_operand:VP 2 "register_operand" "v")]
1555			   UNSPEC_VPACK_SIGN_UNS_SAT))]
1556  "<VI_unit>"
1557  "*
1558  {
1559    if (VECTOR_ELT_ORDER_BIG)
1560      return \"vpks<VI_char>us %0,%1,%2\";
1561    else
1562      return \"vpks<VI_char>us %0,%2,%1\";
1563  }"
1564  [(set_attr "type" "vecperm")])
1565
1566(define_insn "altivec_vpku<VI_char>us"
1567  [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1568	(unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1569			    (match_operand:VP 2 "register_operand" "v")]
1570			   UNSPEC_VPACK_UNS_UNS_SAT))]
1571  "<VI_unit>"
1572  "*
1573  {
1574    if (VECTOR_ELT_ORDER_BIG)
1575      return \"vpku<VI_char>us %0,%1,%2\";
1576    else
1577      return \"vpku<VI_char>us %0,%2,%1\";
1578  }"
1579  [(set_attr "type" "vecperm")])
1580
1581(define_insn "altivec_vpku<VI_char>um"
1582  [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1583	(unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1584			    (match_operand:VP 2 "register_operand" "v")]
1585			   UNSPEC_VPACK_UNS_UNS_MOD))]
1586  "<VI_unit>"
1587  "*
1588  {
1589    if (VECTOR_ELT_ORDER_BIG)
1590      return \"vpku<VI_char>um %0,%1,%2\";
1591    else
1592      return \"vpku<VI_char>um %0,%2,%1\";
1593  }"
1594  [(set_attr "type" "vecperm")])
1595
1596(define_insn "altivec_vpku<VI_char>um_direct"
1597  [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1598	(unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1599			    (match_operand:VP 2 "register_operand" "v")]
1600			   UNSPEC_VPACK_UNS_UNS_MOD_DIRECT))]
1601  "<VI_unit>"
1602  "*
1603  {
1604    if (BYTES_BIG_ENDIAN)
1605      return \"vpku<VI_char>um %0,%1,%2\";
1606    else
1607      return \"vpku<VI_char>um %0,%2,%1\";
1608  }"
1609  [(set_attr "type" "vecperm")])
1610
1611(define_insn "*altivec_vrl<VI_char>"
1612  [(set (match_operand:VI2 0 "register_operand" "=v")
1613        (rotate:VI2 (match_operand:VI2 1 "register_operand" "v")
1614		    (match_operand:VI2 2 "register_operand" "v")))]
1615  "<VI_unit>"
1616  "vrl<VI_char> %0,%1,%2"
1617  [(set_attr "type" "vecsimple")])
1618
1619(define_insn "altivec_vsl"
1620  [(set (match_operand:V4SI 0 "register_operand" "=v")
1621        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1622                      (match_operand:V4SI 2 "register_operand" "v")]
1623		     UNSPEC_VSLV4SI))]
1624  "TARGET_ALTIVEC"
1625  "vsl %0,%1,%2"
1626  [(set_attr "type" "vecperm")])
1627
1628(define_insn "altivec_vslo"
1629  [(set (match_operand:V4SI 0 "register_operand" "=v")
1630        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1631                      (match_operand:V4SI 2 "register_operand" "v")]
1632		     UNSPEC_VSLO))]
1633  "TARGET_ALTIVEC"
1634  "vslo %0,%1,%2"
1635  [(set_attr "type" "vecperm")])
1636
1637(define_insn "vslv"
1638  [(set (match_operand:V16QI 0 "register_operand" "=v")
1639	(unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1640		       (match_operand:V16QI 2 "register_operand" "v")]
1641         UNSPEC_VSLV))]
1642  "TARGET_P9_VECTOR"
1643  "vslv %0,%1,%2"
1644  [(set_attr "type" "vecsimple")])
1645
1646(define_insn "vsrv"
1647  [(set (match_operand:V16QI 0 "register_operand" "=v")
1648	(unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1649		       (match_operand:V16QI 2 "register_operand" "v")]
1650         UNSPEC_VSRV))]
1651  "TARGET_P9_VECTOR"
1652  "vsrv %0,%1,%2"
1653  [(set_attr "type" "vecsimple")])
1654
1655(define_insn "*altivec_vsl<VI_char>"
1656  [(set (match_operand:VI2 0 "register_operand" "=v")
1657        (ashift:VI2 (match_operand:VI2 1 "register_operand" "v")
1658		    (match_operand:VI2 2 "register_operand" "v")))]
1659  "<VI_unit>"
1660  "vsl<VI_char> %0,%1,%2"
1661  [(set_attr "type" "vecsimple")])
1662
1663(define_insn "*altivec_vsr<VI_char>"
1664  [(set (match_operand:VI2 0 "register_operand" "=v")
1665        (lshiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
1666		      (match_operand:VI2 2 "register_operand" "v")))]
1667  "<VI_unit>"
1668  "vsr<VI_char> %0,%1,%2"
1669  [(set_attr "type" "vecsimple")])
1670
1671(define_insn "*altivec_vsra<VI_char>"
1672  [(set (match_operand:VI2 0 "register_operand" "=v")
1673        (ashiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
1674		      (match_operand:VI2 2 "register_operand" "v")))]
1675  "<VI_unit>"
1676  "vsra<VI_char> %0,%1,%2"
1677  [(set_attr "type" "vecsimple")])
1678
1679(define_insn "altivec_vsr"
1680  [(set (match_operand:V4SI 0 "register_operand" "=v")
1681        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1682                      (match_operand:V4SI 2 "register_operand" "v")]
1683		     UNSPEC_VSR))]
1684  "TARGET_ALTIVEC"
1685  "vsr %0,%1,%2"
1686  [(set_attr "type" "vecperm")])
1687
1688(define_insn "altivec_vsro"
1689  [(set (match_operand:V4SI 0 "register_operand" "=v")
1690        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1691                      (match_operand:V4SI 2 "register_operand" "v")]
1692		     UNSPEC_VSRO))]
1693  "TARGET_ALTIVEC"
1694  "vsro %0,%1,%2"
1695  [(set_attr "type" "vecperm")])
1696
1697(define_insn "altivec_vsum4ubs"
1698  [(set (match_operand:V4SI 0 "register_operand" "=v")
1699        (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")
1700                      (match_operand:V4SI 2 "register_operand" "v")]
1701		     UNSPEC_VSUM4UBS))
1702   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1703  "TARGET_ALTIVEC"
1704  "vsum4ubs %0,%1,%2"
1705  [(set_attr "type" "veccomplex")])
1706
1707(define_insn "altivec_vsum4s<VI_char>s"
1708  [(set (match_operand:V4SI 0 "register_operand" "=v")
1709        (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
1710                      (match_operand:V4SI 2 "register_operand" "v")]
1711		     UNSPEC_VSUM4S))
1712   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1713  "TARGET_ALTIVEC"
1714  "vsum4s<VI_char>s %0,%1,%2"
1715  [(set_attr "type" "veccomplex")])
1716
1717(define_expand "altivec_vsum2sws"
1718  [(use (match_operand:V4SI 0 "register_operand"))
1719   (use (match_operand:V4SI 1 "register_operand"))
1720   (use (match_operand:V4SI 2 "register_operand"))]
1721  "TARGET_ALTIVEC"
1722{
1723  if (VECTOR_ELT_ORDER_BIG)
1724    emit_insn (gen_altivec_vsum2sws_direct (operands[0], operands[1],
1725                                            operands[2]));
1726  else
1727    {
1728      rtx tmp1 = gen_reg_rtx (V4SImode);
1729      rtx tmp2 = gen_reg_rtx (V4SImode);
1730      emit_insn (gen_altivec_vsldoi_v4si (tmp1, operands[2],
1731                                          operands[2], GEN_INT (12)));
1732      emit_insn (gen_altivec_vsum2sws_direct (tmp2, operands[1], tmp1));
1733      emit_insn (gen_altivec_vsldoi_v4si (operands[0], tmp2, tmp2,
1734                                          GEN_INT (4)));
1735    }
1736  DONE;
1737})
1738
1739; FIXME: This can probably be expressed without an UNSPEC.
1740(define_insn "altivec_vsum2sws_direct"
1741  [(set (match_operand:V4SI 0 "register_operand" "=v")
1742        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1743	              (match_operand:V4SI 2 "register_operand" "v")]
1744		     UNSPEC_VSUM2SWS))
1745   (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1746  "TARGET_ALTIVEC"
1747  "vsum2sws %0,%1,%2"
1748  [(set_attr "type" "veccomplex")])
1749
1750(define_expand "altivec_vsumsws"
1751  [(use (match_operand:V4SI 0 "register_operand"))
1752   (use (match_operand:V4SI 1 "register_operand"))
1753   (use (match_operand:V4SI 2 "register_operand"))]
1754  "TARGET_ALTIVEC"
1755{
1756  if (VECTOR_ELT_ORDER_BIG)
1757    emit_insn (gen_altivec_vsumsws_direct (operands[0], operands[1],
1758                                           operands[2]));
1759  else
1760    {
1761      rtx tmp1 = gen_reg_rtx (V4SImode);
1762      rtx tmp2 = gen_reg_rtx (V4SImode);
1763      emit_insn (gen_altivec_vspltw_direct (tmp1, operands[2], const0_rtx));
1764      emit_insn (gen_altivec_vsumsws_direct (tmp2, operands[1], tmp1));
1765      emit_insn (gen_altivec_vsldoi_v4si (operands[0], tmp2, tmp2,
1766                                          GEN_INT (12)));
1767    }
1768  DONE;
1769})
1770
1771; FIXME: This can probably be expressed without an UNSPEC.
1772(define_insn "altivec_vsumsws_direct"
1773  [(set (match_operand:V4SI 0 "register_operand" "=v")
1774        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1775                      (match_operand:V4SI 2 "register_operand" "v")]
1776		     UNSPEC_VSUMSWS_DIRECT))
1777   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1778  "TARGET_ALTIVEC"
1779  "vsumsws %0,%1,%2"
1780  [(set_attr "type" "veccomplex")])
1781
1782(define_expand "altivec_vspltb"
1783  [(use (match_operand:V16QI 0 "register_operand" ""))
1784   (use (match_operand:V16QI 1 "register_operand" ""))
1785   (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1786  "TARGET_ALTIVEC"
1787{
1788  rtvec v;
1789  rtx x;
1790
1791  /* Special handling for LE with -maltivec=be.  We have to reflect
1792     the actual selected index for the splat in the RTL.  */
1793  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1794    operands[2] = GEN_INT (15 - INTVAL (operands[2]));
1795
1796  v = gen_rtvec (1, operands[2]);
1797  x = gen_rtx_VEC_SELECT (QImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1798  x = gen_rtx_VEC_DUPLICATE (V16QImode, x);
1799  emit_insn (gen_rtx_SET (operands[0], x));
1800  DONE;
1801})
1802
1803(define_insn "*altivec_vspltb_internal"
1804  [(set (match_operand:V16QI 0 "register_operand" "=v")
1805        (vec_duplicate:V16QI
1806	 (vec_select:QI (match_operand:V16QI 1 "register_operand" "v")
1807			(parallel
1808			 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))]
1809  "TARGET_ALTIVEC"
1810{
1811  /* For true LE, this adjusts the selected index.  For LE with
1812     -maltivec=be, this reverses what was done in the define_expand
1813     because the instruction already has big-endian bias.  */
1814  if (!BYTES_BIG_ENDIAN)
1815    operands[2] = GEN_INT (15 - INTVAL (operands[2]));
1816
1817  return "vspltb %0,%1,%2";
1818}
1819  [(set_attr "type" "vecperm")])
1820
1821(define_insn "altivec_vspltb_direct"
1822  [(set (match_operand:V16QI 0 "register_operand" "=v")
1823        (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1824	               (match_operand:QI 2 "u5bit_cint_operand" "i")]
1825                      UNSPEC_VSPLT_DIRECT))]
1826  "TARGET_ALTIVEC"
1827  "vspltb %0,%1,%2"
1828  [(set_attr "type" "vecperm")])
1829
1830(define_expand "altivec_vsplth"
1831  [(use (match_operand:V8HI 0 "register_operand" ""))
1832   (use (match_operand:V8HI 1 "register_operand" ""))
1833   (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1834  "TARGET_ALTIVEC"
1835{
1836  rtvec v;
1837  rtx x;
1838
1839  /* Special handling for LE with -maltivec=be.  We have to reflect
1840     the actual selected index for the splat in the RTL.  */
1841  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1842    operands[2] = GEN_INT (7 - INTVAL (operands[2]));
1843
1844  v = gen_rtvec (1, operands[2]);
1845  x = gen_rtx_VEC_SELECT (HImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1846  x = gen_rtx_VEC_DUPLICATE (V8HImode, x);
1847  emit_insn (gen_rtx_SET (operands[0], x));
1848  DONE;
1849})
1850
1851(define_insn "*altivec_vsplth_internal"
1852  [(set (match_operand:V8HI 0 "register_operand" "=v")
1853	(vec_duplicate:V8HI
1854	 (vec_select:HI (match_operand:V8HI 1 "register_operand" "v")
1855			(parallel
1856			 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))]
1857  "TARGET_ALTIVEC"
1858{
1859  /* For true LE, this adjusts the selected index.  For LE with
1860     -maltivec=be, this reverses what was done in the define_expand
1861     because the instruction already has big-endian bias.  */
1862  if (!BYTES_BIG_ENDIAN)
1863    operands[2] = GEN_INT (7 - INTVAL (operands[2]));
1864
1865  return "vsplth %0,%1,%2";
1866}
1867  [(set_attr "type" "vecperm")])
1868
1869(define_insn "altivec_vsplth_direct"
1870  [(set (match_operand:V8HI 0 "register_operand" "=v")
1871        (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1872                      (match_operand:QI 2 "u5bit_cint_operand" "i")]
1873                     UNSPEC_VSPLT_DIRECT))]
1874  "TARGET_ALTIVEC"
1875  "vsplth %0,%1,%2"
1876  [(set_attr "type" "vecperm")])
1877
1878(define_expand "altivec_vspltw"
1879  [(use (match_operand:V4SI 0 "register_operand" ""))
1880   (use (match_operand:V4SI 1 "register_operand" ""))
1881   (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1882  "TARGET_ALTIVEC"
1883{
1884  rtvec v;
1885  rtx x;
1886
1887  /* Special handling for LE with -maltivec=be.  We have to reflect
1888     the actual selected index for the splat in the RTL.  */
1889  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1890    operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1891
1892  v = gen_rtvec (1, operands[2]);
1893  x = gen_rtx_VEC_SELECT (SImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1894  x = gen_rtx_VEC_DUPLICATE (V4SImode, x);
1895  emit_insn (gen_rtx_SET (operands[0], x));
1896  DONE;
1897})
1898
1899(define_insn "*altivec_vspltw_internal"
1900  [(set (match_operand:V4SI 0 "register_operand" "=v")
1901	(vec_duplicate:V4SI
1902	 (vec_select:SI (match_operand:V4SI 1 "register_operand" "v")
1903			(parallel
1904			 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))]
1905  "TARGET_ALTIVEC"
1906{
1907  /* For true LE, this adjusts the selected index.  For LE with
1908     -maltivec=be, this reverses what was done in the define_expand
1909     because the instruction already has big-endian bias.  */
1910  if (!BYTES_BIG_ENDIAN)
1911    operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1912
1913  return "vspltw %0,%1,%2";
1914}
1915  [(set_attr "type" "vecperm")])
1916
1917(define_insn "altivec_vspltw_direct"
1918  [(set (match_operand:V4SI 0 "register_operand" "=v")
1919        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1920                      (match_operand:QI 2 "u5bit_cint_operand" "i")]
1921                     UNSPEC_VSPLT_DIRECT))]
1922  "TARGET_ALTIVEC"
1923  "vspltw %0,%1,%2"
1924  [(set_attr "type" "vecperm")])
1925
1926(define_expand "altivec_vspltsf"
1927  [(use (match_operand:V4SF 0 "register_operand" ""))
1928   (use (match_operand:V4SF 1 "register_operand" ""))
1929   (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1930  "TARGET_ALTIVEC"
1931{
1932  rtvec v;
1933  rtx x;
1934
1935  /* Special handling for LE with -maltivec=be.  We have to reflect
1936     the actual selected index for the splat in the RTL.  */
1937  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1938    operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1939
1940  v = gen_rtvec (1, operands[2]);
1941  x = gen_rtx_VEC_SELECT (SFmode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1942  x = gen_rtx_VEC_DUPLICATE (V4SFmode, x);
1943  emit_insn (gen_rtx_SET (operands[0], x));
1944  DONE;
1945})
1946
1947(define_insn "*altivec_vspltsf_internal"
1948  [(set (match_operand:V4SF 0 "register_operand" "=v")
1949	(vec_duplicate:V4SF
1950	 (vec_select:SF (match_operand:V4SF 1 "register_operand" "v")
1951			(parallel
1952			 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))]
1953  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1954{
1955  /* For true LE, this adjusts the selected index.  For LE with
1956     -maltivec=be, this reverses what was done in the define_expand
1957     because the instruction already has big-endian bias.  */
1958  if (!BYTES_BIG_ENDIAN)
1959    operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1960
1961  return "vspltw %0,%1,%2";
1962}
1963  [(set_attr "type" "vecperm")])
1964
1965(define_insn "altivec_vspltis<VI_char>"
1966  [(set (match_operand:VI 0 "register_operand" "=v")
1967	(vec_duplicate:VI
1968	 (match_operand:QI 1 "s5bit_cint_operand" "i")))]
1969  "TARGET_ALTIVEC"
1970  "vspltis<VI_char> %0,%1"
1971  [(set_attr "type" "vecperm")])
1972
1973(define_insn "*altivec_vrfiz"
1974  [(set (match_operand:V4SF 0 "register_operand" "=v")
1975	(fix:V4SF (match_operand:V4SF 1 "register_operand" "v")))]
1976  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1977  "vrfiz %0,%1"
1978  [(set_attr "type" "vecfloat")])
1979
1980(define_expand "altivec_vperm_<mode>"
1981  [(set (match_operand:VM 0 "register_operand" "")
1982	(unspec:VM [(match_operand:VM 1 "register_operand" "")
1983		    (match_operand:VM 2 "register_operand" "")
1984		    (match_operand:V16QI 3 "register_operand" "")]
1985		   UNSPEC_VPERM))]
1986  "TARGET_ALTIVEC"
1987{
1988  if (!VECTOR_ELT_ORDER_BIG)
1989    {
1990      altivec_expand_vec_perm_le (operands);
1991      DONE;
1992    }
1993})
1994
1995;; Slightly prefer vperm, since the target does not overlap the source
1996(define_insn "*altivec_vperm_<mode>_internal"
1997  [(set (match_operand:VM 0 "register_operand" "=v,?wo")
1998	(unspec:VM [(match_operand:VM 1 "register_operand" "v,wo")
1999		    (match_operand:VM 2 "register_operand" "v,0")
2000		    (match_operand:V16QI 3 "register_operand" "v,wo")]
2001		   UNSPEC_VPERM))]
2002  "TARGET_ALTIVEC"
2003  "@
2004   vperm %0,%1,%2,%3
2005   xxperm %x0,%x1,%x3"
2006  [(set_attr "type" "vecperm")
2007   (set_attr "length" "4")])
2008
2009(define_insn "altivec_vperm_v8hiv16qi"
2010  [(set (match_operand:V16QI 0 "register_operand" "=v,?wo")
2011	(unspec:V16QI [(match_operand:V8HI 1 "register_operand" "v,wo")
2012   	               (match_operand:V8HI 2 "register_operand" "v,0")
2013		       (match_operand:V16QI 3 "register_operand" "v,wo")]
2014		   UNSPEC_VPERM))]
2015  "TARGET_ALTIVEC"
2016  "@
2017   vperm %0,%1,%2,%3
2018   xxperm %x0,%x1,%x3"
2019  [(set_attr "type" "vecperm")
2020   (set_attr "length" "4")])
2021
2022(define_expand "altivec_vperm_<mode>_uns"
2023  [(set (match_operand:VM 0 "register_operand" "")
2024	(unspec:VM [(match_operand:VM 1 "register_operand" "")
2025		    (match_operand:VM 2 "register_operand" "")
2026		    (match_operand:V16QI 3 "register_operand" "")]
2027		   UNSPEC_VPERM_UNS))]
2028  "TARGET_ALTIVEC"
2029{
2030  if (!VECTOR_ELT_ORDER_BIG)
2031    {
2032      altivec_expand_vec_perm_le (operands);
2033      DONE;
2034    }
2035})
2036
2037(define_insn "*altivec_vperm_<mode>_uns_internal"
2038  [(set (match_operand:VM 0 "register_operand" "=v,?wo")
2039	(unspec:VM [(match_operand:VM 1 "register_operand" "v,wo")
2040		    (match_operand:VM 2 "register_operand" "v,0")
2041		    (match_operand:V16QI 3 "register_operand" "v,wo")]
2042		   UNSPEC_VPERM_UNS))]
2043  "TARGET_ALTIVEC"
2044  "@
2045   vperm %0,%1,%2,%3
2046   xxperm %x0,%x1,%x3"
2047  [(set_attr "type" "vecperm")
2048   (set_attr "length" "4")])
2049
2050(define_expand "vec_permv16qi"
2051  [(set (match_operand:V16QI 0 "register_operand" "")
2052	(unspec:V16QI [(match_operand:V16QI 1 "register_operand" "")
2053		       (match_operand:V16QI 2 "register_operand" "")
2054		       (match_operand:V16QI 3 "register_operand" "")]
2055		      UNSPEC_VPERM))]
2056  "TARGET_ALTIVEC"
2057{
2058  if (!BYTES_BIG_ENDIAN) {
2059    altivec_expand_vec_perm_le (operands);
2060    DONE;
2061  }
2062})
2063
2064(define_expand "vec_perm_constv16qi"
2065  [(match_operand:V16QI 0 "register_operand" "")
2066   (match_operand:V16QI 1 "register_operand" "")
2067   (match_operand:V16QI 2 "register_operand" "")
2068   (match_operand:V16QI 3 "" "")]
2069  "TARGET_ALTIVEC"
2070{
2071  if (altivec_expand_vec_perm_const (operands))
2072    DONE;
2073  else
2074    FAIL;
2075})
2076
2077(define_insn "*altivec_vpermr_<mode>_internal"
2078  [(set (match_operand:VM 0 "register_operand" "=v,?wo")
2079	(unspec:VM [(match_operand:VM 1 "register_operand" "v,wo")
2080		    (match_operand:VM 2 "register_operand" "v,0")
2081		    (match_operand:V16QI 3 "register_operand" "v,wo")]
2082		   UNSPEC_VPERMR))]
2083  "TARGET_P9_VECTOR"
2084  "@
2085   vpermr %0,%1,%2,%3
2086   xxpermr %x0,%x1,%x3"
2087  [(set_attr "type" "vecperm")
2088   (set_attr "length" "4")])
2089
2090(define_insn "altivec_vrfip"		; ceil
2091  [(set (match_operand:V4SF 0 "register_operand" "=v")
2092        (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2093		     UNSPEC_FRIP))]
2094  "TARGET_ALTIVEC"
2095  "vrfip %0,%1"
2096  [(set_attr "type" "vecfloat")])
2097
2098(define_insn "altivec_vrfin"
2099  [(set (match_operand:V4SF 0 "register_operand" "=v")
2100        (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2101		     UNSPEC_VRFIN))]
2102  "TARGET_ALTIVEC"
2103  "vrfin %0,%1"
2104  [(set_attr "type" "vecfloat")])
2105
2106(define_insn "*altivec_vrfim"		; floor
2107  [(set (match_operand:V4SF 0 "register_operand" "=v")
2108        (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2109		     UNSPEC_FRIM))]
2110  "TARGET_ALTIVEC"
2111  "vrfim %0,%1"
2112  [(set_attr "type" "vecfloat")])
2113
2114(define_insn "altivec_vcfux"
2115  [(set (match_operand:V4SF 0 "register_operand" "=v")
2116        (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
2117	              (match_operand:QI 2 "immediate_operand" "i")]
2118		     UNSPEC_VCFUX))]
2119  "TARGET_ALTIVEC"
2120  "vcfux %0,%1,%2"
2121  [(set_attr "type" "vecfloat")])
2122
2123(define_insn "altivec_vcfsx"
2124  [(set (match_operand:V4SF 0 "register_operand" "=v")
2125        (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
2126	              (match_operand:QI 2 "immediate_operand" "i")]
2127		     UNSPEC_VCFSX))]
2128  "TARGET_ALTIVEC"
2129  "vcfsx %0,%1,%2"
2130  [(set_attr "type" "vecfloat")])
2131
2132(define_insn "altivec_vctuxs"
2133  [(set (match_operand:V4SI 0 "register_operand" "=v")
2134        (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2135                      (match_operand:QI 2 "immediate_operand" "i")]
2136		     UNSPEC_VCTUXS))
2137   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2138  "TARGET_ALTIVEC"
2139  "vctuxs %0,%1,%2"
2140  [(set_attr "type" "vecfloat")])
2141
2142(define_insn "altivec_vctsxs"
2143  [(set (match_operand:V4SI 0 "register_operand" "=v")
2144        (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2145                      (match_operand:QI 2 "immediate_operand" "i")]
2146		     UNSPEC_VCTSXS))
2147   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2148  "TARGET_ALTIVEC"
2149  "vctsxs %0,%1,%2"
2150  [(set_attr "type" "vecfloat")])
2151
2152(define_insn "altivec_vlogefp"
2153  [(set (match_operand:V4SF 0 "register_operand" "=v")
2154        (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2155		     UNSPEC_VLOGEFP))]
2156  "TARGET_ALTIVEC"
2157  "vlogefp %0,%1"
2158  [(set_attr "type" "vecfloat")])
2159
2160(define_insn "altivec_vexptefp"
2161  [(set (match_operand:V4SF 0 "register_operand" "=v")
2162        (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2163		     UNSPEC_VEXPTEFP))]
2164  "TARGET_ALTIVEC"
2165  "vexptefp %0,%1"
2166  [(set_attr "type" "vecfloat")])
2167
2168(define_insn "*altivec_vrsqrtefp"
2169  [(set (match_operand:V4SF 0 "register_operand" "=v")
2170        (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2171		     UNSPEC_RSQRT))]
2172  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2173  "vrsqrtefp %0,%1"
2174  [(set_attr "type" "vecfloat")])
2175
2176(define_insn "altivec_vrefp"
2177  [(set (match_operand:V4SF 0 "register_operand" "=v")
2178        (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2179		     UNSPEC_FRES))]
2180  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2181  "vrefp %0,%1"
2182  [(set_attr "type" "vecfloat")])
2183
2184(define_expand "altivec_copysign_v4sf3"
2185  [(use (match_operand:V4SF 0 "register_operand" ""))
2186   (use (match_operand:V4SF 1 "register_operand" ""))
2187   (use (match_operand:V4SF 2 "register_operand" ""))]
2188  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2189  "
2190{
2191  rtx mask = gen_reg_rtx (V4SImode);
2192  rtvec v = rtvec_alloc (4);
2193  unsigned HOST_WIDE_INT mask_val = ((unsigned HOST_WIDE_INT)1) << 31;
2194
2195  RTVEC_ELT (v, 0) = GEN_INT (mask_val);
2196  RTVEC_ELT (v, 1) = GEN_INT (mask_val);
2197  RTVEC_ELT (v, 2) = GEN_INT (mask_val);
2198  RTVEC_ELT (v, 3) = GEN_INT (mask_val);
2199
2200  emit_insn (gen_vec_initv4si (mask, gen_rtx_PARALLEL (V4SImode, v)));
2201  emit_insn (gen_vector_select_v4sf (operands[0], operands[1], operands[2],
2202				     gen_lowpart (V4SFmode, mask)));
2203  DONE;
2204}")
2205
2206(define_insn "altivec_vsldoi_<mode>"
2207  [(set (match_operand:VM 0 "register_operand" "=v")
2208        (unspec:VM [(match_operand:VM 1 "register_operand" "v")
2209		    (match_operand:VM 2 "register_operand" "v")
2210		    (match_operand:QI 3 "immediate_operand" "i")]
2211		  UNSPEC_VSLDOI))]
2212  "TARGET_ALTIVEC"
2213  "vsldoi %0,%1,%2,%3"
2214  [(set_attr "type" "vecperm")])
2215
2216(define_insn "altivec_vupkhs<VU_char>"
2217  [(set (match_operand:VP 0 "register_operand" "=v")
2218	(unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2219		     UNSPEC_VUNPACK_HI_SIGN))]
2220  "<VI_unit>"
2221{
2222  if (VECTOR_ELT_ORDER_BIG)
2223    return "vupkhs<VU_char> %0,%1";
2224  else
2225    return "vupkls<VU_char> %0,%1";
2226}
2227  [(set_attr "type" "vecperm")])
2228
2229(define_insn "*altivec_vupkhs<VU_char>_direct"
2230  [(set (match_operand:VP 0 "register_operand" "=v")
2231	(unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2232		     UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2233  "<VI_unit>"
2234  "vupkhs<VU_char> %0,%1"
2235  [(set_attr "type" "vecperm")])
2236
2237(define_insn "altivec_vupkls<VU_char>"
2238  [(set (match_operand:VP 0 "register_operand" "=v")
2239	(unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2240		     UNSPEC_VUNPACK_LO_SIGN))]
2241  "<VI_unit>"
2242{
2243  if (VECTOR_ELT_ORDER_BIG)
2244    return "vupkls<VU_char> %0,%1";
2245  else
2246    return "vupkhs<VU_char> %0,%1";
2247}
2248  [(set_attr "type" "vecperm")])
2249
2250(define_insn "*altivec_vupkls<VU_char>_direct"
2251  [(set (match_operand:VP 0 "register_operand" "=v")
2252	(unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2253		     UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2254  "<VI_unit>"
2255  "vupkls<VU_char> %0,%1"
2256  [(set_attr "type" "vecperm")])
2257
2258(define_insn "altivec_vupkhpx"
2259  [(set (match_operand:V4SI 0 "register_operand" "=v")
2260	(unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2261		     UNSPEC_VUPKHPX))]
2262  "TARGET_ALTIVEC"
2263{
2264  if (VECTOR_ELT_ORDER_BIG)
2265    return "vupkhpx %0,%1";
2266  else
2267    return "vupklpx %0,%1";
2268}
2269  [(set_attr "type" "vecperm")])
2270
2271(define_insn "altivec_vupklpx"
2272  [(set (match_operand:V4SI 0 "register_operand" "=v")
2273	(unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2274		     UNSPEC_VUPKLPX))]
2275  "TARGET_ALTIVEC"
2276{
2277  if (VECTOR_ELT_ORDER_BIG)
2278    return "vupklpx %0,%1";
2279  else
2280    return "vupkhpx %0,%1";
2281}
2282  [(set_attr "type" "vecperm")])
2283
2284;; Compare vectors producing a vector result and a predicate, setting CR6 to
2285;; indicate a combined status
2286(define_insn "*altivec_vcmpequ<VI_char>_p"
2287  [(set (reg:CC 74)
2288	(unspec:CC [(eq:CC (match_operand:VI2 1 "register_operand" "v")
2289			   (match_operand:VI2 2 "register_operand" "v"))]
2290		   UNSPEC_PREDICATE))
2291   (set (match_operand:VI2 0 "register_operand" "=v")
2292	(eq:VI2 (match_dup 1)
2293		(match_dup 2)))]
2294  "<VI_unit>"
2295  "vcmpequ<VI_char>. %0,%1,%2"
2296  [(set_attr "type" "veccmpfx")])
2297
2298(define_insn "*altivec_vcmpgts<VI_char>_p"
2299  [(set (reg:CC 74)
2300	(unspec:CC [(gt:CC (match_operand:VI2 1 "register_operand" "v")
2301			   (match_operand:VI2 2 "register_operand" "v"))]
2302		   UNSPEC_PREDICATE))
2303   (set (match_operand:VI2 0 "register_operand" "=v")
2304	(gt:VI2 (match_dup 1)
2305		(match_dup 2)))]
2306  "<VI_unit>"
2307  "vcmpgts<VI_char>. %0,%1,%2"
2308  [(set_attr "type" "veccmpfx")])
2309
2310(define_insn "*altivec_vcmpgtu<VI_char>_p"
2311  [(set (reg:CC 74)
2312	(unspec:CC [(gtu:CC (match_operand:VI2 1 "register_operand" "v")
2313			    (match_operand:VI2 2 "register_operand" "v"))]
2314		   UNSPEC_PREDICATE))
2315   (set (match_operand:VI2 0 "register_operand" "=v")
2316	(gtu:VI2 (match_dup 1)
2317		 (match_dup 2)))]
2318  "<VI_unit>"
2319  "vcmpgtu<VI_char>. %0,%1,%2"
2320  [(set_attr "type" "veccmpfx")])
2321
2322(define_insn "*altivec_vcmpeqfp_p"
2323  [(set (reg:CC 74)
2324	(unspec:CC [(eq:CC (match_operand:V4SF 1 "register_operand" "v")
2325			   (match_operand:V4SF 2 "register_operand" "v"))]
2326		   UNSPEC_PREDICATE))
2327   (set (match_operand:V4SF 0 "register_operand" "=v")
2328	(eq:V4SF (match_dup 1)
2329		 (match_dup 2)))]
2330  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2331  "vcmpeqfp. %0,%1,%2"
2332  [(set_attr "type" "veccmp")])
2333
2334(define_insn "*altivec_vcmpgtfp_p"
2335  [(set (reg:CC 74)
2336	(unspec:CC [(gt:CC (match_operand:V4SF 1 "register_operand" "v")
2337			   (match_operand:V4SF 2 "register_operand" "v"))]
2338		   UNSPEC_PREDICATE))
2339   (set (match_operand:V4SF 0 "register_operand" "=v")
2340	(gt:V4SF (match_dup 1)
2341		 (match_dup 2)))]
2342  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2343  "vcmpgtfp. %0,%1,%2"
2344  [(set_attr "type" "veccmp")])
2345
2346(define_insn "*altivec_vcmpgefp_p"
2347  [(set (reg:CC 74)
2348	(unspec:CC [(ge:CC (match_operand:V4SF 1 "register_operand" "v")
2349			   (match_operand:V4SF 2 "register_operand" "v"))]
2350		   UNSPEC_PREDICATE))
2351   (set (match_operand:V4SF 0 "register_operand" "=v")
2352	(ge:V4SF (match_dup 1)
2353		 (match_dup 2)))]
2354  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2355  "vcmpgefp. %0,%1,%2"
2356  [(set_attr "type" "veccmp")])
2357
2358(define_insn "altivec_vcmpbfp_p"
2359  [(set (reg:CC 74)
2360	(unspec:CC [(match_operand:V4SF 1 "register_operand" "v")
2361		    (match_operand:V4SF 2 "register_operand" "v")]
2362		   UNSPEC_VCMPBFP))
2363   (set (match_operand:V4SF 0 "register_operand" "=v")
2364        (unspec:V4SF [(match_dup 1)
2365                      (match_dup 2)]
2366                      UNSPEC_VCMPBFP))]
2367  "VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode)"
2368  "vcmpbfp. %0,%1,%2"
2369  [(set_attr "type" "veccmp")])
2370
2371(define_insn "altivec_mtvscr"
2372  [(set (reg:SI 110)
2373	(unspec_volatile:SI
2374	 [(match_operand:V4SI 0 "register_operand" "v")] UNSPECV_MTVSCR))]
2375  "TARGET_ALTIVEC"
2376  "mtvscr %0"
2377  [(set_attr "type" "vecsimple")])
2378
2379(define_insn "altivec_mfvscr"
2380  [(set (match_operand:V8HI 0 "register_operand" "=v")
2381	(unspec_volatile:V8HI [(reg:SI 110)] UNSPECV_MFVSCR))]
2382  "TARGET_ALTIVEC"
2383  "mfvscr %0"
2384  [(set_attr "type" "vecsimple")])
2385
2386(define_insn "altivec_dssall"
2387  [(unspec_volatile [(const_int 0)] UNSPECV_DSSALL)]
2388  "TARGET_ALTIVEC"
2389  "dssall"
2390  [(set_attr "type" "vecsimple")])
2391
2392(define_insn "altivec_dss"
2393  [(unspec_volatile [(match_operand:QI 0 "immediate_operand" "i")]
2394		    UNSPECV_DSS)]
2395  "TARGET_ALTIVEC"
2396  "dss %0"
2397  [(set_attr "type" "vecsimple")])
2398
2399(define_insn "altivec_dst"
2400  [(unspec [(match_operand 0 "register_operand" "b")
2401	    (match_operand:SI 1 "register_operand" "r")
2402	    (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DST)]
2403  "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2404  "dst %0,%1,%2"
2405  [(set_attr "type" "vecsimple")])
2406
2407(define_insn "altivec_dstt"
2408  [(unspec [(match_operand 0 "register_operand" "b")
2409	    (match_operand:SI 1 "register_operand" "r")
2410	    (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTT)]
2411  "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2412  "dstt %0,%1,%2"
2413  [(set_attr "type" "vecsimple")])
2414
2415(define_insn "altivec_dstst"
2416  [(unspec [(match_operand 0 "register_operand" "b")
2417	    (match_operand:SI 1 "register_operand" "r")
2418	    (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTST)]
2419  "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2420  "dstst %0,%1,%2"
2421  [(set_attr "type" "vecsimple")])
2422
2423(define_insn "altivec_dststt"
2424  [(unspec [(match_operand 0 "register_operand" "b")
2425	    (match_operand:SI 1 "register_operand" "r")
2426	    (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTSTT)]
2427  "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2428  "dststt %0,%1,%2"
2429  [(set_attr "type" "vecsimple")])
2430
2431(define_expand "altivec_lvsl"
2432  [(use (match_operand:V16QI 0 "register_operand" ""))
2433   (use (match_operand:V16QI 1 "memory_operand" ""))]
2434  "TARGET_ALTIVEC"
2435{
2436  if (VECTOR_ELT_ORDER_BIG)
2437    emit_insn (gen_altivec_lvsl_direct (operands[0], operands[1]));
2438  else
2439    {
2440      int i;
2441      rtx mask, perm[16], constv, vperm;
2442      mask = gen_reg_rtx (V16QImode);
2443      emit_insn (gen_altivec_lvsl_direct (mask, operands[1]));
2444      for (i = 0; i < 16; ++i)
2445        perm[i] = GEN_INT (i);
2446      constv = gen_rtx_CONST_VECTOR (V16QImode, gen_rtvec_v (16, perm));
2447      constv = force_reg (V16QImode, constv);
2448      vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2449                              UNSPEC_VPERM);
2450      emit_insn (gen_rtx_SET (operands[0], vperm));
2451    }
2452  DONE;
2453})
2454
2455(define_insn "altivec_lvsl_direct"
2456  [(set (match_operand:V16QI 0 "register_operand" "=v")
2457	(unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2458		      UNSPEC_LVSL))]
2459  "TARGET_ALTIVEC"
2460  "lvsl %0,%y1"
2461  [(set_attr "type" "vecload")])
2462
2463(define_expand "altivec_lvsr"
2464  [(use (match_operand:V16QI 0 "register_operand" ""))
2465   (use (match_operand:V16QI 1 "memory_operand" ""))]
2466  "TARGET_ALTIVEC"
2467{
2468  if (VECTOR_ELT_ORDER_BIG)
2469    emit_insn (gen_altivec_lvsr_direct (operands[0], operands[1]));
2470  else
2471    {
2472      int i;
2473      rtx mask, perm[16], constv, vperm;
2474      mask = gen_reg_rtx (V16QImode);
2475      emit_insn (gen_altivec_lvsr_direct (mask, operands[1]));
2476      for (i = 0; i < 16; ++i)
2477        perm[i] = GEN_INT (i);
2478      constv = gen_rtx_CONST_VECTOR (V16QImode, gen_rtvec_v (16, perm));
2479      constv = force_reg (V16QImode, constv);
2480      vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2481                              UNSPEC_VPERM);
2482      emit_insn (gen_rtx_SET (operands[0], vperm));
2483    }
2484  DONE;
2485})
2486
2487(define_insn "altivec_lvsr_direct"
2488  [(set (match_operand:V16QI 0 "register_operand" "=v")
2489	(unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2490		      UNSPEC_LVSR))]
2491  "TARGET_ALTIVEC"
2492  "lvsr %0,%y1"
2493  [(set_attr "type" "vecload")])
2494
2495(define_expand "build_vector_mask_for_load"
2496  [(set (match_operand:V16QI 0 "register_operand" "")
2497	(unspec:V16QI [(match_operand 1 "memory_operand" "")] UNSPEC_LVSR))]
2498  "TARGET_ALTIVEC"
2499  "
2500{
2501  rtx addr;
2502  rtx temp;
2503
2504  gcc_assert (GET_CODE (operands[1]) == MEM);
2505
2506  addr = XEXP (operands[1], 0);
2507  temp = gen_reg_rtx (GET_MODE (addr));
2508  emit_insn (gen_rtx_SET (temp, gen_rtx_NEG (GET_MODE (addr), addr)));
2509  emit_insn (gen_altivec_lvsr (operands[0],
2510			       replace_equiv_address (operands[1], temp)));
2511  DONE;
2512}")
2513
2514;; Parallel some of the LVE* and STV*'s with unspecs because some have
2515;; identical rtl but different instructions-- and gcc gets confused.
2516
2517(define_expand "altivec_lve<VI_char>x"
2518  [(parallel
2519    [(set (match_operand:VI 0 "register_operand" "=v")
2520	  (match_operand:VI 1 "memory_operand" "Z"))
2521     (unspec [(const_int 0)] UNSPEC_LVE)])]
2522  "TARGET_ALTIVEC"
2523{
2524  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2525    {
2526      altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_LVE);
2527      DONE;
2528    }
2529})
2530
2531(define_insn "*altivec_lve<VI_char>x_internal"
2532  [(parallel
2533    [(set (match_operand:VI 0 "register_operand" "=v")
2534	  (match_operand:VI 1 "memory_operand" "Z"))
2535     (unspec [(const_int 0)] UNSPEC_LVE)])]
2536  "TARGET_ALTIVEC"
2537  "lve<VI_char>x %0,%y1"
2538  [(set_attr "type" "vecload")])
2539
2540(define_insn "*altivec_lvesfx"
2541  [(parallel
2542    [(set (match_operand:V4SF 0 "register_operand" "=v")
2543	  (match_operand:V4SF 1 "memory_operand" "Z"))
2544     (unspec [(const_int 0)] UNSPEC_LVE)])]
2545  "TARGET_ALTIVEC"
2546  "lvewx %0,%y1"
2547  [(set_attr "type" "vecload")])
2548
2549(define_expand "altivec_lvxl_<mode>"
2550  [(parallel
2551    [(set (match_operand:VM2 0 "register_operand" "=v")
2552	  (match_operand:VM2 1 "memory_operand" "Z"))
2553     (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
2554  "TARGET_ALTIVEC"
2555{
2556  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2557    {
2558      altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_SET_VSCR);
2559      DONE;
2560    }
2561})
2562
2563(define_insn "*altivec_lvxl_<mode>_internal"
2564  [(parallel
2565    [(set (match_operand:VM2 0 "register_operand" "=v")
2566	  (match_operand:VM2 1 "memory_operand" "Z"))
2567     (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
2568  "TARGET_ALTIVEC"
2569  "lvxl %0,%y1"
2570  [(set_attr "type" "vecload")])
2571
2572(define_expand "altivec_lvx_<mode>"
2573  [(parallel
2574    [(set (match_operand:VM2 0 "register_operand" "=v")
2575	  (match_operand:VM2 1 "memory_operand" "Z"))
2576     (unspec [(const_int 0)] UNSPEC_LVX)])]
2577  "TARGET_ALTIVEC"
2578{
2579  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2580    {
2581      altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_LVX);
2582      DONE;
2583    }
2584})
2585
2586(define_insn "altivec_lvx_<mode>_internal"
2587  [(parallel
2588    [(set (match_operand:VM2 0 "register_operand" "=v")
2589	  (match_operand:VM2 1 "memory_operand" "Z"))
2590     (unspec [(const_int 0)] UNSPEC_LVX)])]
2591  "TARGET_ALTIVEC"
2592  "lvx %0,%y1"
2593  [(set_attr "type" "vecload")])
2594
2595(define_expand "altivec_stvx_<mode>"
2596  [(parallel
2597    [(set (match_operand:VM2 0 "memory_operand" "=Z")
2598	  (match_operand:VM2 1 "register_operand" "v"))
2599     (unspec [(const_int 0)] UNSPEC_STVX)])]
2600  "TARGET_ALTIVEC"
2601{
2602  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2603    {
2604      altivec_expand_stvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVX);
2605      DONE;
2606    }
2607})
2608
2609(define_insn "altivec_stvx_<mode>_internal"
2610  [(parallel
2611    [(set (match_operand:VM2 0 "memory_operand" "=Z")
2612	  (match_operand:VM2 1 "register_operand" "v"))
2613     (unspec [(const_int 0)] UNSPEC_STVX)])]
2614  "TARGET_ALTIVEC"
2615  "stvx %1,%y0"
2616  [(set_attr "type" "vecstore")])
2617
2618(define_expand "altivec_stvxl_<mode>"
2619  [(parallel
2620    [(set (match_operand:VM2 0 "memory_operand" "=Z")
2621	  (match_operand:VM2 1 "register_operand" "v"))
2622     (unspec [(const_int 0)] UNSPEC_STVXL)])]
2623  "TARGET_ALTIVEC"
2624{
2625  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2626    {
2627      altivec_expand_stvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVXL);
2628      DONE;
2629    }
2630})
2631
2632(define_insn "*altivec_stvxl_<mode>_internal"
2633  [(parallel
2634    [(set (match_operand:VM2 0 "memory_operand" "=Z")
2635	  (match_operand:VM2 1 "register_operand" "v"))
2636     (unspec [(const_int 0)] UNSPEC_STVXL)])]
2637  "TARGET_ALTIVEC"
2638  "stvxl %1,%y0"
2639  [(set_attr "type" "vecstore")])
2640
2641(define_expand "altivec_stve<VI_char>x"
2642  [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
2643	(unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
2644  "TARGET_ALTIVEC"
2645{
2646  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2647    {
2648      altivec_expand_stvex_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVE);
2649      DONE;
2650    }
2651})
2652
2653(define_insn "*altivec_stve<VI_char>x_internal"
2654  [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
2655	(unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
2656  "TARGET_ALTIVEC"
2657  "stve<VI_char>x %1,%y0"
2658  [(set_attr "type" "vecstore")])
2659
2660(define_insn "*altivec_stvesfx"
2661  [(set (match_operand:SF 0 "memory_operand" "=Z")
2662	(unspec:SF [(match_operand:V4SF 1 "register_operand" "v")] UNSPEC_STVE))]
2663  "TARGET_ALTIVEC"
2664  "stvewx %1,%y0"
2665  [(set_attr "type" "vecstore")])
2666
2667;; Generate
2668;;    xxlxor/vxor SCRATCH0,SCRATCH0,SCRATCH0
2669;;    vsubu?m SCRATCH2,SCRATCH1,%1
2670;;    vmaxs? %0,%1,SCRATCH2"
2671(define_expand "abs<mode>2"
2672  [(set (match_dup 2) (match_dup 3))
2673   (set (match_dup 4)
2674        (minus:VI2 (match_dup 2)
2675		   (match_operand:VI2 1 "register_operand" "v")))
2676   (set (match_operand:VI2 0 "register_operand" "=v")
2677        (smax:VI2 (match_dup 1) (match_dup 4)))]
2678  "<VI_unit>"
2679{
2680  int i, n_elt = GET_MODE_NUNITS (<MODE>mode);
2681  rtvec v = rtvec_alloc (n_elt);
2682
2683  /* Create an all 0 constant.  */
2684  for (i = 0; i < n_elt; ++i)
2685    RTVEC_ELT (v, i) = const0_rtx;
2686
2687  operands[2] = gen_reg_rtx (<MODE>mode);
2688  operands[3] = gen_rtx_CONST_VECTOR (<MODE>mode, v);
2689  operands[4] = gen_reg_rtx (<MODE>mode);
2690})
2691
2692;; Generate
2693;;    vspltisw SCRATCH1,-1
2694;;    vslw SCRATCH2,SCRATCH1,SCRATCH1
2695;;    vandc %0,%1,SCRATCH2
2696(define_expand "altivec_absv4sf2"
2697  [(set (match_dup 2)
2698	(vec_duplicate:V4SI (const_int -1)))
2699   (set (match_dup 3)
2700        (ashift:V4SI (match_dup 2) (match_dup 2)))
2701   (set (match_operand:V4SF 0 "register_operand" "=v")
2702        (and:V4SF (not:V4SF (subreg:V4SF (match_dup 3) 0))
2703                  (match_operand:V4SF 1 "register_operand" "v")))]
2704  "TARGET_ALTIVEC"
2705{
2706  operands[2] = gen_reg_rtx (V4SImode);
2707  operands[3] = gen_reg_rtx (V4SImode);
2708})
2709
2710;; Generate
2711;;    vspltis? SCRATCH0,0
2712;;    vsubs?s SCRATCH2,SCRATCH1,%1
2713;;    vmaxs? %0,%1,SCRATCH2"
2714(define_expand "altivec_abss_<mode>"
2715  [(set (match_dup 2) (vec_duplicate:VI (const_int 0)))
2716   (parallel [(set (match_dup 3)
2717		   (unspec:VI [(match_dup 2)
2718			       (match_operand:VI 1 "register_operand" "v")]
2719			      UNSPEC_VSUBS))
2720              (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))])
2721   (set (match_operand:VI 0 "register_operand" "=v")
2722        (smax:VI (match_dup 1) (match_dup 3)))]
2723  "TARGET_ALTIVEC"
2724{
2725  operands[2] = gen_reg_rtx (GET_MODE (operands[0]));
2726  operands[3] = gen_reg_rtx (GET_MODE (operands[0]));
2727})
2728
2729(define_expand "reduc_plus_scal_<mode>"
2730  [(set (match_operand:<VI_scalar> 0 "register_operand" "=v")
2731        (unspec:VIshort [(match_operand:VIshort 1 "register_operand" "v")]
2732			UNSPEC_REDUC_PLUS))]
2733  "TARGET_ALTIVEC"
2734{
2735  rtx vzero = gen_reg_rtx (V4SImode);
2736  rtx vtmp1 = gen_reg_rtx (V4SImode);
2737  rtx vtmp2 = gen_reg_rtx (<MODE>mode);
2738  rtx dest = gen_lowpart (V4SImode, vtmp2);
2739  int elt = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (<MODE>mode) - 1 : 0;
2740
2741  emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2742  emit_insn (gen_altivec_vsum4s<VI_char>s (vtmp1, operands[1], vzero));
2743  emit_insn (gen_altivec_vsumsws_direct (dest, vtmp1, vzero));
2744  rs6000_expand_vector_extract (operands[0], vtmp2, elt);
2745  DONE;
2746})
2747
2748(define_insn "*p9_neg<mode>2"
2749  [(set (match_operand:VNEG 0 "altivec_register_operand" "=v")
2750	(neg:VNEG (match_operand:VNEG 1 "altivec_register_operand" "v")))]
2751  "TARGET_P9_VECTOR"
2752  "vneg<VI_char> %0,%1"
2753  [(set_attr "type" "vecsimple")])
2754
2755(define_expand "neg<mode>2"
2756  [(set (match_operand:VI2 0 "register_operand" "")
2757	(neg:VI2 (match_operand:VI2 1 "register_operand" "")))]
2758  "<VI_unit>"
2759{
2760  if (!TARGET_P9_VECTOR || (<MODE>mode != V4SImode && <MODE>mode != V2DImode))
2761    {
2762      rtx vzero;
2763
2764      vzero = gen_reg_rtx (GET_MODE (operands[0]));
2765      emit_move_insn (vzero, CONST0_RTX (<MODE>mode));
2766      emit_insn (gen_sub<mode>3 (operands[0], vzero, operands[1]));
2767      DONE;
2768    }
2769})
2770
2771(define_expand "udot_prod<mode>"
2772  [(set (match_operand:V4SI 0 "register_operand" "=v")
2773        (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
2774                   (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
2775                                 (match_operand:VIshort 2 "register_operand" "v")]
2776                                UNSPEC_VMSUMU)))]
2777  "TARGET_ALTIVEC"
2778  "
2779{
2780  emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], operands[2], operands[3]));
2781  DONE;
2782}")
2783
2784(define_expand "sdot_prodv8hi"
2785  [(set (match_operand:V4SI 0 "register_operand" "=v")
2786        (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
2787                   (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
2788                                 (match_operand:V8HI 2 "register_operand" "v")]
2789                                UNSPEC_VMSUMSHM)))]
2790  "TARGET_ALTIVEC"
2791  "
2792{
2793  emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], operands[2], operands[3]));
2794  DONE;
2795}")
2796
2797(define_expand "widen_usum<mode>3"
2798  [(set (match_operand:V4SI 0 "register_operand" "=v")
2799        (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2800                   (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")]
2801                                UNSPEC_VMSUMU)))]
2802  "TARGET_ALTIVEC"
2803  "
2804{
2805  rtx vones = gen_reg_rtx (GET_MODE (operands[1]));
2806
2807  emit_insn (gen_altivec_vspltis<VI_char> (vones, const1_rtx));
2808  emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], vones, operands[2]));
2809  DONE;
2810}")
2811
2812(define_expand "widen_ssumv16qi3"
2813  [(set (match_operand:V4SI 0 "register_operand" "=v")
2814        (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2815                   (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")]
2816                                UNSPEC_VMSUMM)))]
2817  "TARGET_ALTIVEC"
2818  "
2819{
2820  rtx vones = gen_reg_rtx (V16QImode);
2821
2822  emit_insn (gen_altivec_vspltisb (vones, const1_rtx));
2823  emit_insn (gen_altivec_vmsummbm (operands[0], operands[1], vones, operands[2]));
2824  DONE;
2825}")
2826
2827(define_expand "widen_ssumv8hi3"
2828  [(set (match_operand:V4SI 0 "register_operand" "=v")
2829        (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2830                   (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2831                                UNSPEC_VMSUMSHM)))]
2832  "TARGET_ALTIVEC"
2833  "
2834{
2835  rtx vones = gen_reg_rtx (V8HImode);
2836
2837  emit_insn (gen_altivec_vspltish (vones, const1_rtx));
2838  emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], vones, operands[2]));
2839  DONE;
2840}")
2841
2842(define_expand "vec_unpacks_hi_<VP_small_lc>"
2843  [(set (match_operand:VP 0 "register_operand" "=v")
2844        (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2845		   UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2846  "<VI_unit>"
2847  "")
2848
2849(define_expand "vec_unpacks_lo_<VP_small_lc>"
2850  [(set (match_operand:VP 0 "register_operand" "=v")
2851        (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2852		   UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2853  "<VI_unit>"
2854  "")
2855
2856(define_insn "vperm_v8hiv4si"
2857  [(set (match_operand:V4SI 0 "register_operand" "=v,?wo")
2858        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v,wo")
2859		      (match_operand:V4SI 2 "register_operand" "v,0")
2860		      (match_operand:V16QI 3 "register_operand" "v,wo")]
2861                  UNSPEC_VPERMSI))]
2862  "TARGET_ALTIVEC"
2863  "@
2864   vperm %0,%1,%2,%3
2865   xxperm %x0,%x1,%x3"
2866  [(set_attr "type" "vecperm")
2867   (set_attr "length" "4")])
2868
2869(define_insn "vperm_v16qiv8hi"
2870  [(set (match_operand:V8HI 0 "register_operand" "=v,?wo")
2871        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v,wo")
2872		      (match_operand:V8HI 2 "register_operand" "v,0")
2873		      (match_operand:V16QI 3 "register_operand" "v,wo")]
2874                  UNSPEC_VPERMHI))]
2875  "TARGET_ALTIVEC"
2876  "@
2877   vperm %0,%1,%2,%3
2878   xxperm %x0,%x1,%x3"
2879  [(set_attr "type" "vecperm")
2880   (set_attr "length" "4")])
2881
2882
2883(define_expand "vec_unpacku_hi_v16qi"
2884  [(set (match_operand:V8HI 0 "register_operand" "=v")
2885        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
2886                     UNSPEC_VUPKHUB))]
2887  "TARGET_ALTIVEC"
2888  "
2889{
2890  rtx vzero = gen_reg_rtx (V8HImode);
2891  rtx mask = gen_reg_rtx (V16QImode);
2892  rtvec v = rtvec_alloc (16);
2893  bool be = BYTES_BIG_ENDIAN;
2894
2895  emit_insn (gen_altivec_vspltish (vzero, const0_rtx));
2896
2897  RTVEC_ELT (v,  0) = gen_rtx_CONST_INT (QImode, be ? 16 :  7);
2898  RTVEC_ELT (v,  1) = gen_rtx_CONST_INT (QImode, be ?  0 : 16);
2899  RTVEC_ELT (v,  2) = gen_rtx_CONST_INT (QImode, be ? 16 :  6);
2900  RTVEC_ELT (v,  3) = gen_rtx_CONST_INT (QImode, be ?  1 : 16);
2901  RTVEC_ELT (v,  4) = gen_rtx_CONST_INT (QImode, be ? 16 :  5);
2902  RTVEC_ELT (v,  5) = gen_rtx_CONST_INT (QImode, be ?  2 : 16);
2903  RTVEC_ELT (v,  6) = gen_rtx_CONST_INT (QImode, be ? 16 :  4);
2904  RTVEC_ELT (v,  7) = gen_rtx_CONST_INT (QImode, be ?  3 : 16);
2905  RTVEC_ELT (v,  8) = gen_rtx_CONST_INT (QImode, be ? 16 :  3);
2906  RTVEC_ELT (v,  9) = gen_rtx_CONST_INT (QImode, be ?  4 : 16);
2907  RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 16 :  2);
2908  RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ?  5 : 16);
2909  RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 :  1);
2910  RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ?  6 : 16);
2911  RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 16 :  0);
2912  RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ?  7 : 16);
2913
2914  emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2915  emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask));
2916  DONE;
2917}")
2918
2919(define_expand "vec_unpacku_hi_v8hi"
2920  [(set (match_operand:V4SI 0 "register_operand" "=v")
2921        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2922                     UNSPEC_VUPKHUH))]
2923  "TARGET_ALTIVEC"
2924  "
2925{
2926  rtx vzero = gen_reg_rtx (V4SImode);
2927  rtx mask = gen_reg_rtx (V16QImode);
2928  rtvec v = rtvec_alloc (16);
2929  bool be = BYTES_BIG_ENDIAN;
2930
2931  emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2932
2933  RTVEC_ELT (v,  0) = gen_rtx_CONST_INT (QImode, be ? 16 :  7);
2934  RTVEC_ELT (v,  1) = gen_rtx_CONST_INT (QImode, be ? 17 :  6);
2935  RTVEC_ELT (v,  2) = gen_rtx_CONST_INT (QImode, be ?  0 : 17);
2936  RTVEC_ELT (v,  3) = gen_rtx_CONST_INT (QImode, be ?  1 : 16);
2937  RTVEC_ELT (v,  4) = gen_rtx_CONST_INT (QImode, be ? 16 :  5);
2938  RTVEC_ELT (v,  5) = gen_rtx_CONST_INT (QImode, be ? 17 :  4);
2939  RTVEC_ELT (v,  6) = gen_rtx_CONST_INT (QImode, be ?  2 : 17);
2940  RTVEC_ELT (v,  7) = gen_rtx_CONST_INT (QImode, be ?  3 : 16);
2941  RTVEC_ELT (v,  8) = gen_rtx_CONST_INT (QImode, be ? 16 :  3);
2942  RTVEC_ELT (v,  9) = gen_rtx_CONST_INT (QImode, be ? 17 :  2);
2943  RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ?  4 : 17);
2944  RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ?  5 : 16);
2945  RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 :  1);
2946  RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 17 :  0);
2947  RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ?  6 : 17);
2948  RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ?  7 : 16);
2949
2950  emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2951  emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask));
2952  DONE;
2953}")
2954
2955(define_expand "vec_unpacku_lo_v16qi"
2956  [(set (match_operand:V8HI 0 "register_operand" "=v")
2957        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
2958                     UNSPEC_VUPKLUB))]
2959  "TARGET_ALTIVEC"
2960  "
2961{
2962  rtx vzero = gen_reg_rtx (V8HImode);
2963  rtx mask = gen_reg_rtx (V16QImode);
2964  rtvec v = rtvec_alloc (16);
2965  bool be = BYTES_BIG_ENDIAN;
2966
2967  emit_insn (gen_altivec_vspltish (vzero, const0_rtx));
2968
2969  RTVEC_ELT (v,  0) = gen_rtx_CONST_INT (QImode, be ? 16 : 15);
2970  RTVEC_ELT (v,  1) = gen_rtx_CONST_INT (QImode, be ?  8 : 16);
2971  RTVEC_ELT (v,  2) = gen_rtx_CONST_INT (QImode, be ? 16 : 14);
2972  RTVEC_ELT (v,  3) = gen_rtx_CONST_INT (QImode, be ?  9 : 16);
2973  RTVEC_ELT (v,  4) = gen_rtx_CONST_INT (QImode, be ? 16 : 13);
2974  RTVEC_ELT (v,  5) = gen_rtx_CONST_INT (QImode, be ? 10 : 16);
2975  RTVEC_ELT (v,  6) = gen_rtx_CONST_INT (QImode, be ? 16 : 12);
2976  RTVEC_ELT (v,  7) = gen_rtx_CONST_INT (QImode, be ? 11 : 16);
2977  RTVEC_ELT (v,  8) = gen_rtx_CONST_INT (QImode, be ? 16 : 11);
2978  RTVEC_ELT (v,  9) = gen_rtx_CONST_INT (QImode, be ? 12 : 16);
2979  RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 16 : 10);
2980  RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 13 : 16);
2981  RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 :  9);
2982  RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 14 : 16);
2983  RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 16 :  8);
2984  RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 15 : 16);
2985
2986  emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2987  emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask));
2988  DONE;
2989}")
2990
2991(define_expand "vec_unpacku_lo_v8hi"
2992  [(set (match_operand:V4SI 0 "register_operand" "=v")
2993        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2994                     UNSPEC_VUPKLUH))]
2995  "TARGET_ALTIVEC"
2996  "
2997{
2998  rtx vzero = gen_reg_rtx (V4SImode);
2999  rtx mask = gen_reg_rtx (V16QImode);
3000  rtvec v = rtvec_alloc (16);
3001  bool be = BYTES_BIG_ENDIAN;
3002
3003  emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
3004
3005  RTVEC_ELT (v,  0) = gen_rtx_CONST_INT (QImode, be ? 16 : 15);
3006  RTVEC_ELT (v,  1) = gen_rtx_CONST_INT (QImode, be ? 17 : 14);
3007  RTVEC_ELT (v,  2) = gen_rtx_CONST_INT (QImode, be ?  8 : 17);
3008  RTVEC_ELT (v,  3) = gen_rtx_CONST_INT (QImode, be ?  9 : 16);
3009  RTVEC_ELT (v,  4) = gen_rtx_CONST_INT (QImode, be ? 16 : 13);
3010  RTVEC_ELT (v,  5) = gen_rtx_CONST_INT (QImode, be ? 17 : 12);
3011  RTVEC_ELT (v,  6) = gen_rtx_CONST_INT (QImode, be ? 10 : 17);
3012  RTVEC_ELT (v,  7) = gen_rtx_CONST_INT (QImode, be ? 11 : 16);
3013  RTVEC_ELT (v,  8) = gen_rtx_CONST_INT (QImode, be ? 16 : 11);
3014  RTVEC_ELT (v,  9) = gen_rtx_CONST_INT (QImode, be ? 17 : 10);
3015  RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 12 : 17);
3016  RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 13 : 16);
3017  RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 :  9);
3018  RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 17 :  8);
3019  RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 14 : 17);
3020  RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 15 : 16);
3021
3022  emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3023  emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask));
3024  DONE;
3025}")
3026
3027(define_expand "vec_widen_umult_hi_v16qi"
3028  [(set (match_operand:V8HI 0 "register_operand" "=v")
3029        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3030                      (match_operand:V16QI 2 "register_operand" "v")]
3031                     UNSPEC_VMULWHUB))]
3032  "TARGET_ALTIVEC"
3033  "
3034{
3035  rtx ve = gen_reg_rtx (V8HImode);
3036  rtx vo = gen_reg_rtx (V8HImode);
3037
3038  if (BYTES_BIG_ENDIAN)
3039    {
3040      emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
3041      emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
3042      emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
3043    }
3044  else
3045    {
3046      emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3047      emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3048      emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
3049    }
3050  DONE;
3051}")
3052
3053(define_expand "vec_widen_umult_lo_v16qi"
3054  [(set (match_operand:V8HI 0 "register_operand" "=v")
3055        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3056                      (match_operand:V16QI 2 "register_operand" "v")]
3057                     UNSPEC_VMULWLUB))]
3058  "TARGET_ALTIVEC"
3059  "
3060{
3061  rtx ve = gen_reg_rtx (V8HImode);
3062  rtx vo = gen_reg_rtx (V8HImode);
3063
3064  if (BYTES_BIG_ENDIAN)
3065    {
3066      emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
3067      emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
3068      emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
3069    }
3070  else
3071    {
3072      emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3073      emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3074      emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
3075    }
3076  DONE;
3077}")
3078
3079(define_expand "vec_widen_smult_hi_v16qi"
3080  [(set (match_operand:V8HI 0 "register_operand" "=v")
3081        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3082                      (match_operand:V16QI 2 "register_operand" "v")]
3083                     UNSPEC_VMULWHSB))]
3084  "TARGET_ALTIVEC"
3085  "
3086{
3087  rtx ve = gen_reg_rtx (V8HImode);
3088  rtx vo = gen_reg_rtx (V8HImode);
3089
3090  if (BYTES_BIG_ENDIAN)
3091    {
3092      emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3093      emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3094      emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
3095    }
3096  else
3097    {
3098      emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3099      emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3100      emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
3101    }
3102  DONE;
3103}")
3104
3105(define_expand "vec_widen_smult_lo_v16qi"
3106  [(set (match_operand:V8HI 0 "register_operand" "=v")
3107        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3108                      (match_operand:V16QI 2 "register_operand" "v")]
3109                     UNSPEC_VMULWLSB))]
3110  "TARGET_ALTIVEC"
3111  "
3112{
3113  rtx ve = gen_reg_rtx (V8HImode);
3114  rtx vo = gen_reg_rtx (V8HImode);
3115
3116  if (BYTES_BIG_ENDIAN)
3117    {
3118      emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3119      emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3120      emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
3121    }
3122  else
3123    {
3124      emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3125      emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3126      emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
3127    }
3128  DONE;
3129}")
3130
3131(define_expand "vec_widen_umult_hi_v8hi"
3132  [(set (match_operand:V4SI 0 "register_operand" "=v")
3133        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3134                      (match_operand:V8HI 2 "register_operand" "v")]
3135                     UNSPEC_VMULWHUH))]
3136  "TARGET_ALTIVEC"
3137  "
3138{
3139  rtx ve = gen_reg_rtx (V4SImode);
3140  rtx vo = gen_reg_rtx (V4SImode);
3141
3142  if (BYTES_BIG_ENDIAN)
3143    {
3144      emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3145      emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3146      emit_insn (gen_altivec_vmrghw_direct (operands[0], ve, vo));
3147    }
3148  else
3149    {
3150      emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3151      emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3152      emit_insn (gen_altivec_vmrghw_direct (operands[0], vo, ve));
3153    }
3154  DONE;
3155}")
3156
3157(define_expand "vec_widen_umult_lo_v8hi"
3158  [(set (match_operand:V4SI 0 "register_operand" "=v")
3159        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3160                      (match_operand:V8HI 2 "register_operand" "v")]
3161                     UNSPEC_VMULWLUH))]
3162  "TARGET_ALTIVEC"
3163  "
3164{
3165  rtx ve = gen_reg_rtx (V4SImode);
3166  rtx vo = gen_reg_rtx (V4SImode);
3167
3168  if (BYTES_BIG_ENDIAN)
3169    {
3170      emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3171      emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3172      emit_insn (gen_altivec_vmrglw_direct (operands[0], ve, vo));
3173    }
3174  else
3175    {
3176      emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3177      emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3178      emit_insn (gen_altivec_vmrglw_direct (operands[0], vo, ve));
3179    }
3180  DONE;
3181}")
3182
3183(define_expand "vec_widen_smult_hi_v8hi"
3184  [(set (match_operand:V4SI 0 "register_operand" "=v")
3185        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3186                      (match_operand:V8HI 2 "register_operand" "v")]
3187                     UNSPEC_VMULWHSH))]
3188  "TARGET_ALTIVEC"
3189  "
3190{
3191  rtx ve = gen_reg_rtx (V4SImode);
3192  rtx vo = gen_reg_rtx (V4SImode);
3193
3194  if (BYTES_BIG_ENDIAN)
3195    {
3196      emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3197      emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3198      emit_insn (gen_altivec_vmrghw_direct (operands[0], ve, vo));
3199    }
3200  else
3201    {
3202      emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3203      emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3204      emit_insn (gen_altivec_vmrghw_direct (operands[0], vo, ve));
3205    }
3206  DONE;
3207}")
3208
3209(define_expand "vec_widen_smult_lo_v8hi"
3210  [(set (match_operand:V4SI 0 "register_operand" "=v")
3211        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3212                      (match_operand:V8HI 2 "register_operand" "v")]
3213                     UNSPEC_VMULWLSH))]
3214  "TARGET_ALTIVEC"
3215  "
3216{
3217  rtx ve = gen_reg_rtx (V4SImode);
3218  rtx vo = gen_reg_rtx (V4SImode);
3219
3220  if (BYTES_BIG_ENDIAN)
3221    {
3222      emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3223      emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3224      emit_insn (gen_altivec_vmrglw_direct (operands[0], ve, vo));
3225    }
3226  else
3227    {
3228      emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3229      emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3230      emit_insn (gen_altivec_vmrglw_direct (operands[0], vo, ve));
3231    }
3232  DONE;
3233}")
3234
3235(define_expand "vec_pack_trunc_<mode>"
3236  [(set (match_operand:<VP_small> 0 "register_operand" "=v")
3237        (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
3238			    (match_operand:VP 2 "register_operand" "v")]
3239                      UNSPEC_VPACK_UNS_UNS_MOD))]
3240  "<VI_unit>"
3241  "")
3242
3243(define_expand "mulv16qi3"
3244  [(set (match_operand:V16QI 0 "register_operand" "=v")
3245        (mult:V16QI (match_operand:V16QI 1 "register_operand" "v")
3246                    (match_operand:V16QI 2 "register_operand" "v")))]
3247  "TARGET_ALTIVEC"
3248  "
3249{
3250  rtx even = gen_reg_rtx (V8HImode);
3251  rtx odd = gen_reg_rtx (V8HImode);
3252  rtx mask = gen_reg_rtx (V16QImode);
3253  rtvec v = rtvec_alloc (16);
3254  int i;
3255
3256  for (i = 0; i < 8; ++i) {
3257    RTVEC_ELT (v, 2 * i)
3258     = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 1 : 31 - 2 * i);
3259    RTVEC_ELT (v, 2 * i + 1)
3260     = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 17 : 15 - 2 * i);
3261  }
3262
3263  emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3264  emit_insn (gen_altivec_vmulesb (even, operands[1], operands[2]));
3265  emit_insn (gen_altivec_vmulosb (odd, operands[1], operands[2]));
3266  emit_insn (gen_altivec_vperm_v8hiv16qi (operands[0], even, odd, mask));
3267  DONE;
3268}")
3269
3270(define_expand "altivec_negv4sf2"
3271  [(use (match_operand:V4SF 0 "register_operand" ""))
3272   (use (match_operand:V4SF 1 "register_operand" ""))]
3273  "TARGET_ALTIVEC"
3274  "
3275{
3276  rtx neg0;
3277
3278  /* Generate [-0.0, -0.0, -0.0, -0.0].  */
3279  neg0 = gen_reg_rtx (V4SImode);
3280  emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
3281  emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
3282
3283  /* XOR */
3284  emit_insn (gen_xorv4sf3 (operands[0],
3285			   gen_lowpart (V4SFmode, neg0), operands[1]));
3286
3287  DONE;
3288}")
3289
3290;; Vector SIMD PEM v2.06c defines LVLX, LVLXL, LVRX, LVRXL,
3291;; STVLX, STVLXL, STVVRX, STVRXL are available only on Cell.
3292(define_insn "altivec_lvlx"
3293  [(set (match_operand:V16QI 0 "register_operand" "=v")
3294        (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3295		      UNSPEC_LVLX))]
3296  "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3297  "lvlx %0,%y1"
3298  [(set_attr "type" "vecload")])
3299
3300(define_insn "altivec_lvlxl"
3301  [(set (match_operand:V16QI 0 "register_operand" "=v")
3302        (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3303		      UNSPEC_LVLXL))]
3304  "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3305  "lvlxl %0,%y1"
3306  [(set_attr "type" "vecload")])
3307
3308(define_insn "altivec_lvrx"
3309  [(set (match_operand:V16QI 0 "register_operand" "=v")
3310        (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3311		      UNSPEC_LVRX))]
3312  "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3313  "lvrx %0,%y1"
3314  [(set_attr "type" "vecload")])
3315
3316(define_insn "altivec_lvrxl"
3317  [(set (match_operand:V16QI 0 "register_operand" "=v")
3318        (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3319		      UNSPEC_LVRXL))]
3320  "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3321  "lvrxl %0,%y1"
3322  [(set_attr "type" "vecload")])
3323
3324(define_insn "altivec_stvlx"
3325  [(parallel
3326    [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3327	  (match_operand:V16QI 1 "register_operand" "v"))
3328     (unspec [(const_int 0)] UNSPEC_STVLX)])]
3329  "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3330  "stvlx %1,%y0"
3331  [(set_attr "type" "vecstore")])
3332
3333(define_insn "altivec_stvlxl"
3334  [(parallel
3335    [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3336	  (match_operand:V16QI 1 "register_operand" "v"))
3337     (unspec [(const_int 0)] UNSPEC_STVLXL)])]
3338  "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3339  "stvlxl %1,%y0"
3340  [(set_attr "type" "vecstore")])
3341
3342(define_insn "altivec_stvrx"
3343  [(parallel
3344    [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3345	  (match_operand:V16QI 1 "register_operand" "v"))
3346     (unspec [(const_int 0)] UNSPEC_STVRX)])]
3347  "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3348  "stvrx %1,%y0"
3349  [(set_attr "type" "vecstore")])
3350
3351(define_insn "altivec_stvrxl"
3352  [(parallel
3353    [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3354	  (match_operand:V16QI 1 "register_operand" "v"))
3355     (unspec [(const_int 0)] UNSPEC_STVRXL)])]
3356  "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3357  "stvrxl %1,%y0"
3358  [(set_attr "type" "vecstore")])
3359
3360(define_expand "vec_unpacks_float_hi_v8hi"
3361 [(set (match_operand:V4SF 0 "register_operand" "")
3362        (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3363                     UNSPEC_VUPKHS_V4SF))]
3364  "TARGET_ALTIVEC"
3365  "
3366{
3367  rtx tmp = gen_reg_rtx (V4SImode);
3368
3369  emit_insn (gen_vec_unpacks_hi_v8hi (tmp, operands[1]));
3370  emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
3371  DONE;
3372}")
3373
3374(define_expand "vec_unpacks_float_lo_v8hi"
3375 [(set (match_operand:V4SF 0 "register_operand" "")
3376        (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3377                     UNSPEC_VUPKLS_V4SF))]
3378  "TARGET_ALTIVEC"
3379  "
3380{
3381  rtx tmp = gen_reg_rtx (V4SImode);
3382
3383  emit_insn (gen_vec_unpacks_lo_v8hi (tmp, operands[1]));
3384  emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
3385  DONE;
3386}")
3387
3388(define_expand "vec_unpacku_float_hi_v8hi"
3389 [(set (match_operand:V4SF 0 "register_operand" "")
3390        (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3391                     UNSPEC_VUPKHU_V4SF))]
3392  "TARGET_ALTIVEC"
3393  "
3394{
3395  rtx tmp = gen_reg_rtx (V4SImode);
3396
3397  emit_insn (gen_vec_unpacku_hi_v8hi (tmp, operands[1]));
3398  emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
3399  DONE;
3400}")
3401
3402(define_expand "vec_unpacku_float_lo_v8hi"
3403 [(set (match_operand:V4SF 0 "register_operand" "")
3404        (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3405                     UNSPEC_VUPKLU_V4SF))]
3406  "TARGET_ALTIVEC"
3407  "
3408{
3409  rtx tmp = gen_reg_rtx (V4SImode);
3410
3411  emit_insn (gen_vec_unpacku_lo_v8hi (tmp, operands[1]));
3412  emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
3413  DONE;
3414}")
3415
3416
3417;; Power8/power9 vector instructions encoded as Altivec instructions
3418
3419;; Vector count leading zeros
3420(define_insn "*p8v_clz<mode>2"
3421  [(set (match_operand:VI2 0 "register_operand" "=v")
3422	(clz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
3423  "TARGET_P8_VECTOR"
3424  "vclz<wd> %0,%1"
3425  [(set_attr "length" "4")
3426   (set_attr "type" "vecsimple")])
3427
3428;; Vector absolute difference unsigned
3429(define_expand "vadu<mode>3"
3430  [(set (match_operand:VI 0 "register_operand")
3431        (unspec:VI [(match_operand:VI 1 "register_operand")
3432		    (match_operand:VI 2 "register_operand")]
3433         UNSPEC_VADU))]
3434  "TARGET_P9_VECTOR")
3435
3436;; Vector absolute difference unsigned
3437(define_insn "*p9_vadu<mode>3"
3438  [(set (match_operand:VI 0 "register_operand" "=v")
3439        (unspec:VI [(match_operand:VI 1 "register_operand" "v")
3440		    (match_operand:VI 2 "register_operand" "v")]
3441         UNSPEC_VADU))]
3442  "TARGET_P9_VECTOR"
3443  "vabsdu<wd> %0,%1,%2"
3444  [(set_attr "type" "vecsimple")])
3445
3446;; Vector count trailing zeros
3447(define_insn "*p9v_ctz<mode>2"
3448  [(set (match_operand:VI2 0 "register_operand" "=v")
3449	(ctz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
3450  "TARGET_P9_VECTOR"
3451  "vctz<wd> %0,%1"
3452  [(set_attr "length" "4")
3453   (set_attr "type" "vecsimple")])
3454
3455;; Vector population count
3456(define_insn "*p8v_popcount<mode>2"
3457  [(set (match_operand:VI2 0 "register_operand" "=v")
3458        (popcount:VI2 (match_operand:VI2 1 "register_operand" "v")))]
3459  "TARGET_P8_VECTOR"
3460  "vpopcnt<wd> %0,%1"
3461  [(set_attr "length" "4")
3462   (set_attr "type" "vecsimple")])
3463
3464;; Vector parity
3465(define_insn "*p9v_parity<mode>2"
3466  [(set (match_operand:VParity 0 "register_operand" "=v")
3467        (parity:VParity (match_operand:VParity 1 "register_operand" "v")))]
3468  "TARGET_P9_VECTOR"
3469  "vprtyb<wd> %0,%1"
3470  [(set_attr "length" "4")
3471   (set_attr "type" "vecsimple")])
3472
3473;; Vector Gather Bits by Bytes by Doubleword
3474(define_insn "p8v_vgbbd"
3475  [(set (match_operand:V16QI 0 "register_operand" "=v")
3476	(unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")]
3477		      UNSPEC_VGBBD))]
3478  "TARGET_P8_VECTOR"
3479  "vgbbd %0,%1"
3480  [(set_attr "length" "4")
3481   (set_attr "type" "vecsimple")])
3482
3483
3484;; 128-bit binary integer arithmetic
3485;; We have a special container type (V1TImode) to allow operations using the
3486;; ISA 2.07 128-bit binary support to target the VMX/altivec registers without
3487;; having to worry about the register allocator deciding GPRs are better.
3488
3489(define_insn "altivec_vadduqm"
3490  [(set (match_operand:V1TI 0 "register_operand" "=v")
3491	(plus:V1TI (match_operand:V1TI 1 "register_operand" "v")
3492		   (match_operand:V1TI 2 "register_operand" "v")))]
3493  "TARGET_VADDUQM"
3494  "vadduqm %0,%1,%2"
3495  [(set_attr "length" "4")
3496   (set_attr "type" "vecsimple")])
3497
3498(define_insn "altivec_vaddcuq"
3499  [(set (match_operand:V1TI 0 "register_operand" "=v")
3500	(unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3501		      (match_operand:V1TI 2 "register_operand" "v")]
3502		     UNSPEC_VADDCUQ))]
3503  "TARGET_VADDUQM"
3504  "vaddcuq %0,%1,%2"
3505  [(set_attr "length" "4")
3506   (set_attr "type" "vecsimple")])
3507
3508(define_insn "altivec_vsubuqm"
3509  [(set (match_operand:V1TI 0 "register_operand" "=v")
3510	(minus:V1TI (match_operand:V1TI 1 "register_operand" "v")
3511		    (match_operand:V1TI 2 "register_operand" "v")))]
3512  "TARGET_VADDUQM"
3513  "vsubuqm %0,%1,%2"
3514  [(set_attr "length" "4")
3515   (set_attr "type" "vecsimple")])
3516
3517(define_insn "altivec_vsubcuq"
3518  [(set (match_operand:V1TI 0 "register_operand" "=v")
3519	(unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3520		      (match_operand:V1TI 2 "register_operand" "v")]
3521		     UNSPEC_VSUBCUQ))]
3522  "TARGET_VADDUQM"
3523  "vsubcuq %0,%1,%2"
3524  [(set_attr "length" "4")
3525   (set_attr "type" "vecsimple")])
3526
3527(define_insn "altivec_vaddeuqm"
3528  [(set (match_operand:V1TI 0 "register_operand" "=v")
3529	(unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3530		      (match_operand:V1TI 2 "register_operand" "v")
3531		      (match_operand:V1TI 3 "register_operand" "v")]
3532		     UNSPEC_VADDEUQM))]
3533  "TARGET_VADDUQM"
3534  "vaddeuqm %0,%1,%2,%3"
3535  [(set_attr "length" "4")
3536   (set_attr "type" "vecsimple")])
3537
3538(define_insn "altivec_vaddecuq"
3539  [(set (match_operand:V1TI 0 "register_operand" "=v")
3540	(unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3541		      (match_operand:V1TI 2 "register_operand" "v")
3542		      (match_operand:V1TI 3 "register_operand" "v")]
3543		     UNSPEC_VADDECUQ))]
3544  "TARGET_VADDUQM"
3545  "vaddecuq %0,%1,%2,%3"
3546  [(set_attr "length" "4")
3547   (set_attr "type" "vecsimple")])
3548
3549(define_insn "altivec_vsubeuqm"
3550  [(set (match_operand:V1TI 0 "register_operand" "=v")
3551	(unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3552		      (match_operand:V1TI 2 "register_operand" "v")
3553		      (match_operand:V1TI 3 "register_operand" "v")]
3554		   UNSPEC_VSUBEUQM))]
3555  "TARGET_VADDUQM"
3556  "vsubeuqm %0,%1,%2,%3"
3557  [(set_attr "length" "4")
3558   (set_attr "type" "vecsimple")])
3559
3560(define_insn "altivec_vsubecuq"
3561  [(set (match_operand:V1TI 0 "register_operand" "=v")
3562	(unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3563		      (match_operand:V1TI 2 "register_operand" "v")
3564		      (match_operand:V1TI 3 "register_operand" "v")]
3565		     UNSPEC_VSUBECUQ))]
3566  "TARGET_VADDUQM"
3567  "vsubecuq %0,%1,%2,%3"
3568  [(set_attr "length" "4")
3569   (set_attr "type" "vecsimple")])
3570
3571;; We use V2DI as the output type to simplify converting the permute
3572;; bits into an integer
3573(define_insn "altivec_vbpermq"
3574  [(set (match_operand:V2DI 0 "register_operand" "=v")
3575	(unspec:V2DI [(match_operand:V16QI 1 "register_operand" "v")
3576		      (match_operand:V16QI 2 "register_operand" "v")]
3577		     UNSPEC_VBPERMQ))]
3578  "TARGET_P8_VECTOR"
3579  "vbpermq %0,%1,%2"
3580  [(set_attr "length" "4")
3581   (set_attr "type" "vecsimple")])
3582
3583;; Decimal Integer operations
3584(define_int_iterator UNSPEC_BCD_ADD_SUB [UNSPEC_BCDADD UNSPEC_BCDSUB])
3585
3586(define_int_attr bcd_add_sub [(UNSPEC_BCDADD "add")
3587			      (UNSPEC_BCDSUB "sub")])
3588
3589(define_code_iterator BCD_TEST [eq lt gt unordered])
3590
3591(define_insn "bcd<bcd_add_sub>"
3592  [(set (match_operand:V1TI 0 "register_operand" "")
3593	(unspec:V1TI [(match_operand:V1TI 1 "register_operand" "")
3594		      (match_operand:V1TI 2 "register_operand" "")
3595		      (match_operand:QI 3 "const_0_to_1_operand" "")]
3596		     UNSPEC_BCD_ADD_SUB))
3597   (clobber (reg:CCFP 74))]
3598  "TARGET_P8_VECTOR"
3599  "bcd<bcd_add_sub>. %0,%1,%2,%3"
3600  [(set_attr "length" "4")
3601   (set_attr "type" "vecsimple")])
3602
3603;; Use a floating point type (V2DFmode) for the compare to set CR6 so that we
3604;; can use the unordered test for BCD nans and add/subtracts that overflow.  An
3605;; UNORDERED test on an integer type (like V1TImode) is not defined.  The type
3606;; probably should be one that can go in the VMX (Altivec) registers, so we
3607;; can't use DDmode or DFmode.
3608(define_insn "*bcd<bcd_add_sub>_test"
3609  [(set (reg:CCFP 74)
3610	(compare:CCFP
3611	 (unspec:V2DF [(match_operand:V1TI 1 "register_operand" "v")
3612		       (match_operand:V1TI 2 "register_operand" "v")
3613		       (match_operand:QI 3 "const_0_to_1_operand" "i")]
3614		      UNSPEC_BCD_ADD_SUB)
3615	 (match_operand:V2DF 4 "zero_constant" "j")))
3616   (clobber (match_scratch:V1TI 0 "=v"))]
3617  "TARGET_P8_VECTOR"
3618  "bcd<bcd_add_sub>. %0,%1,%2,%3"
3619  [(set_attr "length" "4")
3620   (set_attr "type" "vecsimple")])
3621
3622(define_insn "*bcd<bcd_add_sub>_test2"
3623  [(set (match_operand:V1TI 0 "register_operand" "=v")
3624	(unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3625		      (match_operand:V1TI 2 "register_operand" "v")
3626		      (match_operand:QI 3 "const_0_to_1_operand" "i")]
3627		     UNSPEC_BCD_ADD_SUB))
3628   (set (reg:CCFP 74)
3629	(compare:CCFP
3630	 (unspec:V2DF [(match_dup 1)
3631		       (match_dup 2)
3632		       (match_dup 3)]
3633		      UNSPEC_BCD_ADD_SUB)
3634	 (match_operand:V2DF 4 "zero_constant" "j")))]
3635  "TARGET_P8_VECTOR"
3636  "bcd<bcd_add_sub>. %0,%1,%2,%3"
3637  [(set_attr "length" "4")
3638   (set_attr "type" "vecsimple")])
3639
3640(define_insn "darn_32"
3641  [(set (match_operand:SI 0 "register_operand" "=r")
3642        (unspec:SI [(const_int 0)] UNSPEC_DARN_32))]
3643  "TARGET_P9_MISC"
3644  "darn %0,0"
3645  [(set_attr "type" "integer")])
3646
3647(define_insn "darn_raw"
3648  [(set (match_operand:DI 0 "register_operand" "=r")
3649        (unspec:DI [(const_int 0)] UNSPEC_DARN_RAW))]
3650  "TARGET_P9_MISC && TARGET_64BIT"
3651  "darn %0,2"
3652  [(set_attr "type" "integer")])
3653
3654(define_insn "darn"
3655  [(set (match_operand:DI 0 "register_operand" "=r")
3656        (unspec:DI [(const_int 0)] UNSPEC_DARN))]
3657  "TARGET_P9_MISC && TARGET_64BIT"
3658  "darn %0,1"
3659  [(set_attr "type" "integer")])
3660
3661(define_expand "bcd<bcd_add_sub>_<code>"
3662  [(parallel [(set (reg:CCFP 74)
3663		   (compare:CCFP
3664		    (unspec:V2DF [(match_operand:V1TI 1 "register_operand" "")
3665				  (match_operand:V1TI 2 "register_operand" "")
3666				  (match_operand:QI 3 "const_0_to_1_operand" "")]
3667				 UNSPEC_BCD_ADD_SUB)
3668		    (match_dup 4)))
3669	      (clobber (match_scratch:V1TI 5 ""))])
3670   (set (match_operand:SI 0 "register_operand" "")
3671	(BCD_TEST:SI (reg:CCFP 74)
3672		     (const_int 0)))]
3673  "TARGET_P8_VECTOR"
3674{
3675  operands[4] = CONST0_RTX (V2DFmode);
3676})
3677
3678;; Peephole2 pattern to combine a bcdadd/bcdsub that calculates the value and
3679;; the bcdadd/bcdsub that tests the value.  The combiner won't work since
3680;; CR6 is a hard coded register.  Unfortunately, all of the Altivec predicate
3681;; support is hard coded to use the fixed register CR6 instead of creating
3682;; a register class for CR6.
3683
3684(define_peephole2
3685  [(parallel [(set (match_operand:V1TI 0 "register_operand" "")
3686		   (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "")
3687				 (match_operand:V1TI 2 "register_operand" "")
3688				 (match_operand:QI 3 "const_0_to_1_operand" "")]
3689				UNSPEC_BCD_ADD_SUB))
3690	      (clobber (reg:CCFP 74))])
3691   (parallel [(set (reg:CCFP 74)
3692		   (compare:CCFP
3693		    (unspec:V2DF [(match_dup 1)
3694				  (match_dup 2)
3695				  (match_dup 3)]
3696				 UNSPEC_BCD_ADD_SUB)
3697		    (match_operand:V2DF 4 "zero_constant" "")))
3698	      (clobber (match_operand:V1TI 5 "register_operand" ""))])]
3699  "TARGET_P8_VECTOR"
3700  [(parallel [(set (match_dup 0)
3701		   (unspec:V1TI [(match_dup 1)
3702				 (match_dup 2)
3703				 (match_dup 3)]
3704				UNSPEC_BCD_ADD_SUB))
3705	      (set (reg:CCFP 74)
3706		   (compare:CCFP
3707		    (unspec:V2DF [(match_dup 1)
3708				  (match_dup 2)
3709				  (match_dup 3)]
3710				 UNSPEC_BCD_ADD_SUB)
3711		    (match_dup 4)))])])
3712