comparison gcc/config/rs6000/altivec.md @ 111:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents f6334be47118
children 84e7813d76e9
comparison
equal deleted inserted replaced
68:561a7518be6b 111:04ced10e8804
1 ;; AltiVec patterns. 1 ;; AltiVec patterns.
2 ;; Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 2 ;; Copyright (C) 2002-2017 Free Software Foundation, Inc.
3 ;; Free Software Foundation, Inc.
4 ;; Contributed by Aldy Hernandez (aldy@quesejoda.com) 3 ;; Contributed by Aldy Hernandez (aldy@quesejoda.com)
5 4
6 ;; This file is part of GCC. 5 ;; This file is part of GCC.
7 6
8 ;; GCC is free software; you can redistribute it and/or modify it 7 ;; GCC is free software; you can redistribute it and/or modify it
17 16
18 ;; You should have received a copy of the GNU General Public License 17 ;; You should have received a copy of the GNU General Public License
19 ;; along with GCC; see the file COPYING3. If not see 18 ;; along with GCC; see the file COPYING3. If not see
20 ;; <http://www.gnu.org/licenses/>. 19 ;; <http://www.gnu.org/licenses/>.
21 20
22 (define_constants 21 (define_c_enum "unspec"
23 ;; 51-62 deleted 22 [UNSPEC_VCMPBFP
24 [(UNSPEC_VCMPBFP 64) 23 UNSPEC_VMSUMU
25 (UNSPEC_VMSUMU 65) 24 UNSPEC_VMSUMM
26 (UNSPEC_VMSUMM 66) 25 UNSPEC_VMSUMSHM
27 (UNSPEC_VMSUMSHM 68) 26 UNSPEC_VMSUMUHS
28 (UNSPEC_VMSUMUHS 69) 27 UNSPEC_VMSUMSHS
29 (UNSPEC_VMSUMSHS 70) 28 UNSPEC_VMHADDSHS
30 (UNSPEC_VMHADDSHS 71) 29 UNSPEC_VMHRADDSHS
31 (UNSPEC_VMHRADDSHS 72) 30 UNSPEC_VADDCUW
32 (UNSPEC_VMLADDUHM 73) 31 UNSPEC_VADDU
33 (UNSPEC_VADDCUW 75) 32 UNSPEC_VADDS
34 (UNSPEC_VADDU 76) 33 UNSPEC_VAVGU
35 (UNSPEC_VADDS 77) 34 UNSPEC_VAVGS
36 (UNSPEC_VAVGU 80) 35 UNSPEC_VMULEUB
37 (UNSPEC_VAVGS 81) 36 UNSPEC_VMULESB
38 (UNSPEC_VMULEUB 83) 37 UNSPEC_VMULEUH
39 (UNSPEC_VMULESB 84) 38 UNSPEC_VMULESH
40 (UNSPEC_VMULEUH 85) 39 UNSPEC_VMULEUW
41 (UNSPEC_VMULESH 86) 40 UNSPEC_VMULESW
42 (UNSPEC_VMULOUB 87) 41 UNSPEC_VMULOUB
43 (UNSPEC_VMULOSB 88) 42 UNSPEC_VMULOSB
44 (UNSPEC_VMULOUH 89) 43 UNSPEC_VMULOUH
45 (UNSPEC_VMULOSH 90) 44 UNSPEC_VMULOSH
46 (UNSPEC_VPKUHUM 93) 45 UNSPEC_VMULOUW
47 (UNSPEC_VPKUWUM 94) 46 UNSPEC_VMULOSW
48 (UNSPEC_VPKPX 95) 47 UNSPEC_VPKPX
49 (UNSPEC_VPKSHSS 97) 48 UNSPEC_VPACK_SIGN_SIGN_SAT
50 (UNSPEC_VPKSWSS 99) 49 UNSPEC_VPACK_SIGN_UNS_SAT
51 (UNSPEC_VPKUHUS 100) 50 UNSPEC_VPACK_UNS_UNS_SAT
52 (UNSPEC_VPKSHUS 101) 51 UNSPEC_VPACK_UNS_UNS_MOD
53 (UNSPEC_VPKUWUS 102) 52 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT
54 (UNSPEC_VPKSWUS 103) 53 UNSPEC_VREVEV
55 ;; 104 deleted 54 UNSPEC_VSLV4SI
56 (UNSPEC_VSLV4SI 110) 55 UNSPEC_VSLO
57 (UNSPEC_VSLO 111) 56 UNSPEC_VSR
58 (UNSPEC_VSR 118) 57 UNSPEC_VSRO
59 (UNSPEC_VSRO 119) 58 UNSPEC_VSUBCUW
60 (UNSPEC_VSUBCUW 124) 59 UNSPEC_VSUBU
61 (UNSPEC_VSUBU 125) 60 UNSPEC_VSUBS
62 (UNSPEC_VSUBS 126) 61 UNSPEC_VSUM4UBS
63 (UNSPEC_VSUM4UBS 131) 62 UNSPEC_VSUM4S
64 (UNSPEC_VSUM4S 132) 63 UNSPEC_VSUM2SWS
65 (UNSPEC_VSUM2SWS 134) 64 UNSPEC_VSUMSWS
66 (UNSPEC_VSUMSWS 135) 65 UNSPEC_VPERM
67 (UNSPEC_VPERM 144) 66 UNSPEC_VPERMR
68 (UNSPEC_VPERM_UNS 145) 67 UNSPEC_VPERM_UNS
69 ;; 148 deleted 68 UNSPEC_VRFIN
70 (UNSPEC_VRFIN 149) 69 UNSPEC_VCFUX
71 ;; 150 deleted 70 UNSPEC_VCFSX
72 (UNSPEC_VCFUX 151) 71 UNSPEC_VCTUXS
73 (UNSPEC_VCFSX 152) 72 UNSPEC_VCTSXS
74 (UNSPEC_VCTUXS 153) 73 UNSPEC_VLOGEFP
75 (UNSPEC_VCTSXS 154) 74 UNSPEC_VEXPTEFP
76 (UNSPEC_VLOGEFP 155) 75 UNSPEC_VSLDOI
77 (UNSPEC_VEXPTEFP 156) 76 UNSPEC_VUNPACK_HI_SIGN
78 ;; 157-162 deleted 77 UNSPEC_VUNPACK_LO_SIGN
79 (UNSPEC_VLSDOI 163) 78 UNSPEC_VUNPACK_HI_SIGN_DIRECT
80 (UNSPEC_VUPKHSB 167) 79 UNSPEC_VUNPACK_LO_SIGN_DIRECT
81 (UNSPEC_VUPKHPX 168) 80 UNSPEC_VUPKHPX
82 (UNSPEC_VUPKHSH 169) 81 UNSPEC_VUPKLPX
83 (UNSPEC_VUPKLSB 170) 82 UNSPEC_CONVERT_4F32_8I16
84 (UNSPEC_VUPKLPX 171) 83 UNSPEC_DARN
85 (UNSPEC_VUPKLSH 172) 84 UNSPEC_DARN_32
86 ;; 173 deleted 85 UNSPEC_DARN_RAW
87 (UNSPEC_DST 190) 86 UNSPEC_DST
88 (UNSPEC_DSTT 191) 87 UNSPEC_DSTT
89 (UNSPEC_DSTST 192) 88 UNSPEC_DSTST
90 (UNSPEC_DSTSTT 193) 89 UNSPEC_DSTSTT
91 (UNSPEC_LVSL 194) 90 UNSPEC_LVSL
92 (UNSPEC_LVSR 195) 91 UNSPEC_LVSR
93 (UNSPEC_LVE 196) 92 UNSPEC_LVE
94 (UNSPEC_STVX 201) 93 UNSPEC_STVX
95 (UNSPEC_STVXL 202) 94 UNSPEC_STVXL
96 (UNSPEC_STVE 203) 95 UNSPEC_STVE
97 (UNSPEC_SET_VSCR 213) 96 UNSPEC_SET_VSCR
98 (UNSPEC_GET_VRSAVE 214) 97 UNSPEC_GET_VRSAVE
99 (UNSPEC_LVX 215) 98 UNSPEC_LVX
100 (UNSPEC_REDUC_PLUS 217) 99 UNSPEC_REDUC_PLUS
101 (UNSPEC_VECSH 219) 100 UNSPEC_VECSH
102 (UNSPEC_EXTEVEN_V4SI 220) 101 UNSPEC_EXTEVEN_V4SI
103 (UNSPEC_EXTEVEN_V8HI 221) 102 UNSPEC_EXTEVEN_V8HI
104 (UNSPEC_EXTEVEN_V16QI 222) 103 UNSPEC_EXTEVEN_V16QI
105 (UNSPEC_EXTEVEN_V4SF 223) 104 UNSPEC_EXTEVEN_V4SF
106 (UNSPEC_EXTODD_V4SI 224) 105 UNSPEC_EXTODD_V4SI
107 (UNSPEC_EXTODD_V8HI 225) 106 UNSPEC_EXTODD_V8HI
108 (UNSPEC_EXTODD_V16QI 226) 107 UNSPEC_EXTODD_V16QI
109 (UNSPEC_EXTODD_V4SF 227) 108 UNSPEC_EXTODD_V4SF
110 (UNSPEC_INTERHI_V4SI 228) 109 UNSPEC_INTERHI_V4SI
111 (UNSPEC_INTERHI_V8HI 229) 110 UNSPEC_INTERHI_V8HI
112 (UNSPEC_INTERHI_V16QI 230) 111 UNSPEC_INTERHI_V16QI
113 ;; delete 231 112 UNSPEC_INTERLO_V4SI
114 (UNSPEC_INTERLO_V4SI 232) 113 UNSPEC_INTERLO_V8HI
115 (UNSPEC_INTERLO_V8HI 233) 114 UNSPEC_INTERLO_V16QI
116 (UNSPEC_INTERLO_V16QI 234) 115 UNSPEC_LVLX
117 ;; delete 235 116 UNSPEC_LVLXL
118 (UNSPEC_LVLX 236) 117 UNSPEC_LVRX
119 (UNSPEC_LVLXL 237) 118 UNSPEC_LVRXL
120 (UNSPEC_LVRX 238) 119 UNSPEC_STVLX
121 (UNSPEC_LVRXL 239) 120 UNSPEC_STVLXL
122 (UNSPEC_STVLX 240) 121 UNSPEC_STVRX
123 (UNSPEC_STVLXL 241) 122 UNSPEC_STVRXL
124 (UNSPEC_STVRX 242) 123 UNSPEC_VADU
125 (UNSPEC_STVRXL 243) 124 UNSPEC_VSLV
126 (UNSPEC_VMULWHUB 308) 125 UNSPEC_VSRV
127 (UNSPEC_VMULWLUB 309) 126 UNSPEC_VMULWHUB
128 (UNSPEC_VMULWHSB 310) 127 UNSPEC_VMULWLUB
129 (UNSPEC_VMULWLSB 311) 128 UNSPEC_VMULWHSB
130 (UNSPEC_VMULWHUH 312) 129 UNSPEC_VMULWLSB
131 (UNSPEC_VMULWLUH 313) 130 UNSPEC_VMULWHUH
132 (UNSPEC_VMULWHSH 314) 131 UNSPEC_VMULWLUH
133 (UNSPEC_VMULWLSH 315) 132 UNSPEC_VMULWHSH
134 (UNSPEC_VUPKHUB 316) 133 UNSPEC_VMULWLSH
135 (UNSPEC_VUPKHUH 317) 134 UNSPEC_VUPKHUB
136 (UNSPEC_VUPKLUB 318) 135 UNSPEC_VUPKHUH
137 (UNSPEC_VUPKLUH 319) 136 UNSPEC_VUPKLUB
138 (UNSPEC_VPERMSI 320) 137 UNSPEC_VUPKLUH
139 (UNSPEC_VPERMHI 321) 138 UNSPEC_VPERMSI
140 (UNSPEC_INTERHI 322) 139 UNSPEC_VPERMHI
141 (UNSPEC_INTERLO 323) 140 UNSPEC_INTERHI
142 (UNSPEC_VUPKHS_V4SF 324) 141 UNSPEC_INTERLO
143 (UNSPEC_VUPKLS_V4SF 325) 142 UNSPEC_VUPKHS_V4SF
144 (UNSPEC_VUPKHU_V4SF 326) 143 UNSPEC_VUPKLS_V4SF
145 (UNSPEC_VUPKLU_V4SF 327) 144 UNSPEC_VUPKHU_V4SF
145 UNSPEC_VUPKLU_V4SF
146 UNSPEC_VGBBD
147 UNSPEC_VMRGH_DIRECT
148 UNSPEC_VMRGL_DIRECT
149 UNSPEC_VSPLT_DIRECT
150 UNSPEC_VMRGEW_DIRECT
151 UNSPEC_VMRGOW_DIRECT
152 UNSPEC_VSUMSWS_DIRECT
153 UNSPEC_VADDCUQ
154 UNSPEC_VADDEUQM
155 UNSPEC_VADDECUQ
156 UNSPEC_VSUBCUQ
157 UNSPEC_VSUBEUQM
158 UNSPEC_VSUBECUQ
159 UNSPEC_VBPERMQ
160 UNSPEC_VBPERMD
161 UNSPEC_BCDADD
162 UNSPEC_BCDSUB
163 UNSPEC_BCD_OVERFLOW
164 UNSPEC_CMPRB
165 UNSPEC_CMPRB2
166 UNSPEC_CMPEQB
167 UNSPEC_VRLMI
168 UNSPEC_VRLNM
146 ]) 169 ])
147 170
148 (define_constants 171 (define_c_enum "unspecv"
149 [(UNSPECV_SET_VRSAVE 30) 172 [UNSPECV_SET_VRSAVE
150 (UNSPECV_MTVSCR 186) 173 UNSPECV_MTVSCR
151 (UNSPECV_MFVSCR 187) 174 UNSPECV_MFVSCR
152 (UNSPECV_DSSALL 188) 175 UNSPECV_DSSALL
153 (UNSPECV_DSS 189) 176 UNSPECV_DSS
154 ]) 177 ])
155 178
156 ;; Vec int modes 179 ;; Like VI, defined in vector.md, but add ISA 2.07 integer vector ops
157 (define_mode_iterator VI [V4SI V8HI V16QI]) 180 (define_mode_iterator VI2 [V4SI V8HI V16QI V2DI])
158 ;; Short vec in modes 181 ;; Short vec int modes
159 (define_mode_iterator VIshort [V8HI V16QI]) 182 (define_mode_iterator VIshort [V8HI V16QI])
183 ;; Longer vec int modes for rotate/mask ops
184 (define_mode_iterator VIlong [V2DI V4SI])
160 ;; Vec float modes 185 ;; Vec float modes
161 (define_mode_iterator VF [V4SF]) 186 (define_mode_iterator VF [V4SF])
162 ;; Vec modes, pity mode iterators are not composable 187 ;; Vec modes, pity mode iterators are not composable
163 (define_mode_iterator V [V4SI V8HI V16QI V4SF]) 188 (define_mode_iterator V [V4SI V8HI V16QI V4SF])
164 ;; Vec modes for move/logical/permute ops, include vector types for move not 189 ;; Vec modes for move/logical/permute ops, include vector types for move not
165 ;; otherwise handled by altivec (v2df, v2di, ti) 190 ;; otherwise handled by altivec (v2df, v2di, ti)
166 (define_mode_iterator VM [V4SI V8HI V16QI V4SF V2DF V2DI TI]) 191 (define_mode_iterator VM [V4SI
192 V8HI
193 V16QI
194 V4SF
195 V2DF
196 V2DI
197 V1TI
198 TI
199 (KF "FLOAT128_VECTOR_P (KFmode)")
200 (TF "FLOAT128_VECTOR_P (TFmode)")])
167 201
168 ;; Like VM, except don't do TImode 202 ;; Like VM, except don't do TImode
169 (define_mode_iterator VM2 [V4SI V8HI V16QI V4SF V2DF V2DI]) 203 (define_mode_iterator VM2 [V4SI
170 204 V8HI
171 (define_mode_attr VI_char [(V4SI "w") (V8HI "h") (V16QI "b")]) 205 V16QI
172 (define_mode_attr VI_scalar [(V4SI "SI") (V8HI "HI") (V16QI "QI")]) 206 V4SF
207 V2DF
208 V2DI
209 V1TI
210 (KF "FLOAT128_VECTOR_P (KFmode)")
211 (TF "FLOAT128_VECTOR_P (TFmode)")])
212
213 ;; Map the Vector convert single precision to double precision for integer
214 ;; versus floating point
215 (define_mode_attr VS_sxwsp [(V4SI "sxw") (V4SF "sp")])
216
217 ;; Specific iterator for parity which does not have a byte/half-word form, but
218 ;; does have a quad word form
219 (define_mode_iterator VParity [V4SI
220 V2DI
221 V1TI
222 TI])
223
224 (define_mode_attr VI_char [(V2DI "d") (V4SI "w") (V8HI "h") (V16QI "b")])
225 (define_mode_attr VI_scalar [(V2DI "DI") (V4SI "SI") (V8HI "HI") (V16QI "QI")])
226 (define_mode_attr VI_unit [(V16QI "VECTOR_UNIT_ALTIVEC_P (V16QImode)")
227 (V8HI "VECTOR_UNIT_ALTIVEC_P (V8HImode)")
228 (V4SI "VECTOR_UNIT_ALTIVEC_P (V4SImode)")
229 (V2DI "VECTOR_UNIT_P8_VECTOR_P (V2DImode)")
230 (V1TI "VECTOR_UNIT_ALTIVEC_P (V1TImode)")])
231
232 ;; Vector pack/unpack
233 (define_mode_iterator VP [V2DI V4SI V8HI])
234 (define_mode_attr VP_small [(V2DI "V4SI") (V4SI "V8HI") (V8HI "V16QI")])
235 (define_mode_attr VP_small_lc [(V2DI "v4si") (V4SI "v8hi") (V8HI "v16qi")])
236 (define_mode_attr VU_char [(V2DI "w") (V4SI "h") (V8HI "b")])
237
238 ;; Vector negate
239 (define_mode_iterator VNEG [V4SI V2DI])
173 240
174 ;; Vector move instructions. 241 ;; Vector move instructions.
175 (define_insn "*altivec_mov<mode>" 242 (define_insn "*altivec_mov<mode>"
176 [(set (match_operand:VM2 0 "nonimmediate_operand" "=Z,v,v,*o,*r,*r,v,v") 243 [(set (match_operand:VM2 0 "nonimmediate_operand" "=Z,v,v,?Y,?*r,?*r,v,v,?*r")
177 (match_operand:VM2 1 "input_operand" "v,Z,v,r,o,r,j,W"))] 244 (match_operand:VM2 1 "input_operand" "v,Z,v,*r,Y,*r,j,W,W"))]
178 "VECTOR_MEM_ALTIVEC_P (<MODE>mode) 245 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)
179 && (register_operand (operands[0], <MODE>mode) 246 && (register_operand (operands[0], <MODE>mode)
180 || register_operand (operands[1], <MODE>mode))" 247 || register_operand (operands[1], <MODE>mode))"
248 {
249 switch (which_alternative)
250 {
251 case 0: return "stvx %1,%y0";
252 case 1: return "lvx %0,%y1";
253 case 2: return "vor %0,%1,%1";
254 case 3: return "#";
255 case 4: return "#";
256 case 5: return "#";
257 case 6: return "vxor %0,%0,%0";
258 case 7: return output_vec_const_move (operands);
259 case 8: return "#";
260 default: gcc_unreachable ();
261 }
262 }
263 [(set_attr "type" "vecstore,vecload,veclogical,store,load,*,veclogical,*,*")
264 (set_attr "length" "4,4,4,20,20,20,4,8,32")])
265
266 ;; Unlike other altivec moves, allow the GPRs, since a normal use of TImode
267 ;; is for unions. However for plain data movement, slightly favor the vector
268 ;; loads
269 (define_insn "*altivec_movti"
270 [(set (match_operand:TI 0 "nonimmediate_operand" "=Z,v,v,?Y,?r,?r,v,v")
271 (match_operand:TI 1 "input_operand" "v,Z,v,r,Y,r,j,W"))]
272 "VECTOR_MEM_ALTIVEC_P (TImode)
273 && (register_operand (operands[0], TImode)
274 || register_operand (operands[1], TImode))"
181 { 275 {
182 switch (which_alternative) 276 switch (which_alternative)
183 { 277 {
184 case 0: return "stvx %1,%y0"; 278 case 0: return "stvx %1,%y0";
185 case 1: return "lvx %0,%y1"; 279 case 1: return "lvx %0,%y1";
190 case 6: return "vxor %0,%0,%0"; 284 case 6: return "vxor %0,%0,%0";
191 case 7: return output_vec_const_move (operands); 285 case 7: return output_vec_const_move (operands);
192 default: gcc_unreachable (); 286 default: gcc_unreachable ();
193 } 287 }
194 } 288 }
195 [(set_attr "type" "vecstore,vecload,vecsimple,store,load,*,vecsimple,*")]) 289 [(set_attr "type" "vecstore,vecload,veclogical,store,load,*,veclogical,*")])
196
197 ;; Unlike other altivec moves, allow the GPRs, since a normal use of TImode
198 ;; is for unions. However for plain data movement, slightly favor the vector
199 ;; loads
200 (define_insn "*altivec_movti"
201 [(set (match_operand:TI 0 "nonimmediate_operand" "=Z,v,v,?o,?r,?r,v,v")
202 (match_operand:TI 1 "input_operand" "v,Z,v,r,o,r,j,W"))]
203 "VECTOR_MEM_ALTIVEC_P (TImode)
204 && (register_operand (operands[0], TImode)
205 || register_operand (operands[1], TImode))"
206 {
207 switch (which_alternative)
208 {
209 case 0: return "stvx %1,%y0";
210 case 1: return "lvx %0,%y1";
211 case 2: return "vor %0,%1,%1";
212 case 3: return "#";
213 case 4: return "#";
214 case 5: return "#";
215 case 6: return "vxor %0,%0,%0";
216 case 7: return output_vec_const_move (operands);
217 default: gcc_unreachable ();
218 }
219 }
220 [(set_attr "type" "vecstore,vecload,vecsimple,store,load,*,vecsimple,*")])
221 290
222 ;; Load up a vector with the most significant bit set by loading up -1 and 291 ;; Load up a vector with the most significant bit set by loading up -1 and
223 ;; doing a shift left 292 ;; doing a shift left
224 (define_split 293 (define_split
225 [(set (match_operand:VM 0 "altivec_register_operand" "") 294 [(set (match_operand:VM 0 "altivec_register_operand" "")
226 (match_operand:VM 1 "easy_vector_constant_msb" ""))] 295 (match_operand:VM 1 "easy_vector_constant_msb" ""))]
227 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed" 296 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
228 [(const_int 0)] 297 [(const_int 0)]
229 { 298 {
230 rtx dest = operands[0]; 299 rtx dest = operands[0];
231 enum machine_mode mode = GET_MODE (operands[0]); 300 machine_mode mode = GET_MODE (operands[0]);
232 rtvec v; 301 rtvec v;
233 int i, num_elements; 302 int i, num_elements;
234 303
235 if (mode == V4SFmode) 304 if (mode == V4SFmode)
236 { 305 {
241 num_elements = GET_MODE_NUNITS (mode); 310 num_elements = GET_MODE_NUNITS (mode);
242 v = rtvec_alloc (num_elements); 311 v = rtvec_alloc (num_elements);
243 for (i = 0; i < num_elements; i++) 312 for (i = 0; i < num_elements; i++)
244 RTVEC_ELT (v, i) = constm1_rtx; 313 RTVEC_ELT (v, i) = constm1_rtx;
245 314
246 emit_insn (gen_vec_initv4si (dest, gen_rtx_PARALLEL (mode, v))); 315 emit_insn (gen_vec_initv4sisi (dest, gen_rtx_PARALLEL (mode, v)));
247 emit_insn (gen_rtx_SET (VOIDmode, dest, gen_rtx_ASHIFT (mode, dest, dest))); 316 emit_insn (gen_rtx_SET (dest, gen_rtx_ASHIFT (mode, dest, dest)));
248 DONE; 317 DONE;
249 }) 318 })
250 319
251 (define_split 320 (define_split
252 [(set (match_operand:VM 0 "altivec_register_operand" "") 321 [(set (match_operand:VM 0 "altivec_register_operand" "")
255 [(set (match_dup 0) (match_dup 3)) 324 [(set (match_dup 0) (match_dup 3))
256 (set (match_dup 0) (match_dup 4))] 325 (set (match_dup 0) (match_dup 4))]
257 { 326 {
258 rtx dup = gen_easy_altivec_constant (operands[1]); 327 rtx dup = gen_easy_altivec_constant (operands[1]);
259 rtx const_vec; 328 rtx const_vec;
260 enum machine_mode op_mode = <MODE>mode; 329 machine_mode op_mode = <MODE>mode;
261 330
262 /* Divide the operand of the resulting VEC_DUPLICATE, and use 331 /* Divide the operand of the resulting VEC_DUPLICATE, and use
263 simplify_rtx to make a CONST_VECTOR. */ 332 simplify_rtx to make a CONST_VECTOR. */
264 XEXP (dup, 0) = simplify_const_binary_operation (ASHIFTRT, QImode, 333 XEXP (dup, 0) = simplify_const_binary_operation (ASHIFTRT, QImode,
265 XEXP (dup, 0), const1_rtx); 334 XEXP (dup, 0), const1_rtx);
275 else 344 else
276 operands[3] = gen_lowpart (op_mode, const_vec); 345 operands[3] = gen_lowpart (op_mode, const_vec);
277 operands[4] = gen_rtx_PLUS (op_mode, operands[0], operands[0]); 346 operands[4] = gen_rtx_PLUS (op_mode, operands[0], operands[0]);
278 }) 347 })
279 348
349 (define_split
350 [(set (match_operand:VM 0 "altivec_register_operand" "")
351 (match_operand:VM 1 "easy_vector_constant_vsldoi" ""))]
352 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && can_create_pseudo_p ()"
353 [(set (match_dup 2) (match_dup 3))
354 (set (match_dup 4) (match_dup 5))
355 (set (match_dup 0)
356 (unspec:VM [(match_dup 2)
357 (match_dup 4)
358 (match_dup 6)]
359 UNSPEC_VSLDOI))]
360 {
361 rtx op1 = operands[1];
362 int elt = (BYTES_BIG_ENDIAN) ? 0 : GET_MODE_NUNITS (<MODE>mode) - 1;
363 HOST_WIDE_INT val = const_vector_elt_as_int (op1, elt);
364 rtx rtx_val = GEN_INT (val);
365 int shift = vspltis_shifted (op1);
366 int nunits = GET_MODE_NUNITS (<MODE>mode);
367 int i;
368
369 gcc_assert (shift != 0);
370 operands[2] = gen_reg_rtx (<MODE>mode);
371 operands[3] = gen_rtx_CONST_VECTOR (<MODE>mode, rtvec_alloc (nunits));
372 operands[4] = gen_reg_rtx (<MODE>mode);
373
374 if (shift < 0)
375 {
376 operands[5] = CONSTM1_RTX (<MODE>mode);
377 operands[6] = GEN_INT (-shift);
378 }
379 else
380 {
381 operands[5] = CONST0_RTX (<MODE>mode);
382 operands[6] = GEN_INT (shift);
383 }
384
385 /* Populate the constant vectors. */
386 for (i = 0; i < nunits; i++)
387 XVECEXP (operands[3], 0, i) = rtx_val;
388 })
389
280 (define_insn "get_vrsave_internal" 390 (define_insn "get_vrsave_internal"
281 [(set (match_operand:SI 0 "register_operand" "=r") 391 [(set (match_operand:SI 0 "register_operand" "=r")
282 (unspec:SI [(reg:SI 109)] UNSPEC_GET_VRSAVE))] 392 (unspec:SI [(reg:SI VRSAVE_REGNO)] UNSPEC_GET_VRSAVE))]
283 "TARGET_ALTIVEC" 393 "TARGET_ALTIVEC"
284 { 394 {
285 if (TARGET_MACHO) 395 if (TARGET_MACHO)
286 return "mfspr %0,256"; 396 return "mfspr %0,256";
287 else 397 else
289 } 399 }
290 [(set_attr "type" "*")]) 400 [(set_attr "type" "*")])
291 401
292 (define_insn "*set_vrsave_internal" 402 (define_insn "*set_vrsave_internal"
293 [(match_parallel 0 "vrsave_operation" 403 [(match_parallel 0 "vrsave_operation"
294 [(set (reg:SI 109) 404 [(set (reg:SI VRSAVE_REGNO)
295 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "r") 405 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "r")
296 (reg:SI 109)] UNSPECV_SET_VRSAVE))])] 406 (reg:SI VRSAVE_REGNO)] UNSPECV_SET_VRSAVE))])]
297 "TARGET_ALTIVEC" 407 "TARGET_ALTIVEC"
298 { 408 {
299 if (TARGET_MACHO) 409 if (TARGET_MACHO)
300 return "mtspr 256,%1"; 410 return "mtspr 256,%1";
301 else 411 else
303 } 413 }
304 [(set_attr "type" "*")]) 414 [(set_attr "type" "*")])
305 415
306 (define_insn "*save_world" 416 (define_insn "*save_world"
307 [(match_parallel 0 "save_world_operation" 417 [(match_parallel 0 "save_world_operation"
308 [(clobber (reg:SI 65)) 418 [(clobber (reg:SI LR_REGNO))
309 (use (match_operand:SI 1 "call_operand" "s"))])] 419 (use (match_operand:SI 1 "call_operand" "s"))])]
310 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT" 420 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
311 "bl %z1" 421 "bl %z1"
312 [(set_attr "type" "branch") 422 [(set_attr "type" "branch")
313 (set_attr "length" "4")]) 423 (set_attr "length" "4")])
314 424
315 (define_insn "*restore_world" 425 (define_insn "*restore_world"
316 [(match_parallel 0 "restore_world_operation" 426 [(match_parallel 0 "restore_world_operation"
317 [(return) 427 [(return)
318 (use (reg:SI 65)) 428 (use (reg:SI LR_REGNO))
319 (use (match_operand:SI 1 "call_operand" "s")) 429 (use (match_operand:SI 1 "call_operand" "s"))
320 (clobber (match_operand:SI 2 "gpc_reg_operand" "=r"))])] 430 (clobber (match_operand:SI 2 "gpc_reg_operand" "=r"))])]
321 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT" 431 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
322 "b %z1") 432 "b %z1")
323 433
434 ;; The save_vregs and restore_vregs patterns don't use memory_operand
435 ;; because (plus (reg) (const_int)) is not a valid vector address.
436 ;; This way is more compact than describing exactly what happens in
437 ;; the out-of-line functions, ie. loading the constant into r11/r12
438 ;; then using indexed addressing, and requires less editing of rtl
439 ;; to describe the operation to dwarf2out_frame_debug_expr.
440 (define_insn "*save_vregs_<mode>_r11"
441 [(match_parallel 0 "any_parallel_operand"
442 [(clobber (reg:P LR_REGNO))
443 (use (match_operand:P 1 "symbol_ref_operand" "s"))
444 (clobber (reg:P 11))
445 (use (reg:P 0))
446 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
447 (match_operand:P 3 "short_cint_operand" "I")))
448 (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
449 "TARGET_ALTIVEC"
450 "bl %1"
451 [(set_attr "type" "branch")
452 (set_attr "length" "4")])
453
454 (define_insn "*save_vregs_<mode>_r12"
455 [(match_parallel 0 "any_parallel_operand"
456 [(clobber (reg:P LR_REGNO))
457 (use (match_operand:P 1 "symbol_ref_operand" "s"))
458 (clobber (reg:P 12))
459 (use (reg:P 0))
460 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
461 (match_operand:P 3 "short_cint_operand" "I")))
462 (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
463 "TARGET_ALTIVEC"
464 "bl %1"
465 [(set_attr "type" "branch")
466 (set_attr "length" "4")])
467
468 (define_insn "*restore_vregs_<mode>_r11"
469 [(match_parallel 0 "any_parallel_operand"
470 [(clobber (reg:P LR_REGNO))
471 (use (match_operand:P 1 "symbol_ref_operand" "s"))
472 (clobber (reg:P 11))
473 (use (reg:P 0))
474 (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
475 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
476 (match_operand:P 4 "short_cint_operand" "I"))))])]
477 "TARGET_ALTIVEC"
478 "bl %1"
479 [(set_attr "type" "branch")
480 (set_attr "length" "4")])
481
482 (define_insn "*restore_vregs_<mode>_r12"
483 [(match_parallel 0 "any_parallel_operand"
484 [(clobber (reg:P LR_REGNO))
485 (use (match_operand:P 1 "symbol_ref_operand" "s"))
486 (clobber (reg:P 12))
487 (use (reg:P 0))
488 (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
489 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
490 (match_operand:P 4 "short_cint_operand" "I"))))])]
491 "TARGET_ALTIVEC"
492 "bl %1"
493 [(set_attr "type" "branch")
494 (set_attr "length" "4")])
495
324 ;; Simple binary operations. 496 ;; Simple binary operations.
325 497
326 ;; add 498 ;; add
327 (define_insn "add<mode>3" 499 (define_insn "add<mode>3"
328 [(set (match_operand:VI 0 "register_operand" "=v") 500 [(set (match_operand:VI2 0 "register_operand" "=v")
329 (plus:VI (match_operand:VI 1 "register_operand" "v") 501 (plus:VI2 (match_operand:VI2 1 "register_operand" "v")
330 (match_operand:VI 2 "register_operand" "v")))] 502 (match_operand:VI2 2 "register_operand" "v")))]
331 "TARGET_ALTIVEC" 503 "<VI_unit>"
332 "vaddu<VI_char>m %0,%1,%2" 504 "vaddu<VI_char>m %0,%1,%2"
333 [(set_attr "type" "vecsimple")]) 505 [(set_attr "type" "vecsimple")])
334 506
335 (define_insn "*altivec_addv4sf3" 507 (define_insn "*altivec_addv4sf3"
336 [(set (match_operand:V4SF 0 "register_operand" "=v") 508 [(set (match_operand:V4SF 0 "register_operand" "=v")
337 (plus:V4SF (match_operand:V4SF 1 "register_operand" "v") 509 (plus:V4SF (match_operand:V4SF 1 "register_operand" "v")
338 (match_operand:V4SF 2 "register_operand" "v")))] 510 (match_operand:V4SF 2 "register_operand" "v")))]
339 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)" 511 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
340 "vaddfp %0,%1,%2" 512 "vaddfp %0,%1,%2"
341 [(set_attr "type" "vecfloat")]) 513 [(set_attr "type" "vecfloat")])
342 514
343 (define_insn "altivec_vaddcuw" 515 (define_insn "altivec_vaddcuw"
344 [(set (match_operand:V4SI 0 "register_operand" "=v") 516 [(set (match_operand:V4SI 0 "register_operand" "=v")
345 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") 517 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
346 (match_operand:V4SI 2 "register_operand" "v")] 518 (match_operand:V4SI 2 "register_operand" "v")]
347 UNSPEC_VADDCUW))] 519 UNSPEC_VADDCUW))]
348 "TARGET_ALTIVEC" 520 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
349 "vaddcuw %0,%1,%2" 521 "vaddcuw %0,%1,%2"
350 [(set_attr "type" "vecsimple")]) 522 [(set_attr "type" "vecsimple")])
351 523
352 (define_insn "altivec_vaddu<VI_char>s" 524 (define_insn "altivec_vaddu<VI_char>s"
353 [(set (match_operand:VI 0 "register_operand" "=v") 525 [(set (match_operand:VI 0 "register_operand" "=v")
354 (unspec:VI [(match_operand:VI 1 "register_operand" "v") 526 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
355 (match_operand:VI 2 "register_operand" "v")] 527 (match_operand:VI 2 "register_operand" "v")]
356 UNSPEC_VADDU)) 528 UNSPEC_VADDU))
357 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 529 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
358 "TARGET_ALTIVEC" 530 "<VI_unit>"
359 "vaddu<VI_char>s %0,%1,%2" 531 "vaddu<VI_char>s %0,%1,%2"
360 [(set_attr "type" "vecsimple")]) 532 [(set_attr "type" "vecsimple")])
361 533
362 (define_insn "altivec_vadds<VI_char>s" 534 (define_insn "altivec_vadds<VI_char>s"
363 [(set (match_operand:VI 0 "register_operand" "=v") 535 [(set (match_operand:VI 0 "register_operand" "=v")
364 (unspec:VI [(match_operand:VI 1 "register_operand" "v") 536 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
365 (match_operand:VI 2 "register_operand" "v")] 537 (match_operand:VI 2 "register_operand" "v")]
366 UNSPEC_VADDS)) 538 UNSPEC_VADDS))
367 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 539 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
368 "TARGET_ALTIVEC" 540 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
369 "vadds<VI_char>s %0,%1,%2" 541 "vadds<VI_char>s %0,%1,%2"
370 [(set_attr "type" "vecsimple")]) 542 [(set_attr "type" "vecsimple")])
371 543
372 ;; sub 544 ;; sub
373 (define_insn "sub<mode>3" 545 (define_insn "sub<mode>3"
374 [(set (match_operand:VI 0 "register_operand" "=v") 546 [(set (match_operand:VI2 0 "register_operand" "=v")
375 (minus:VI (match_operand:VI 1 "register_operand" "v") 547 (minus:VI2 (match_operand:VI2 1 "register_operand" "v")
376 (match_operand:VI 2 "register_operand" "v")))] 548 (match_operand:VI2 2 "register_operand" "v")))]
377 "TARGET_ALTIVEC" 549 "<VI_unit>"
378 "vsubu<VI_char>m %0,%1,%2" 550 "vsubu<VI_char>m %0,%1,%2"
379 [(set_attr "type" "vecsimple")]) 551 [(set_attr "type" "vecsimple")])
380 552
381 (define_insn "*altivec_subv4sf3" 553 (define_insn "*altivec_subv4sf3"
382 [(set (match_operand:V4SF 0 "register_operand" "=v") 554 [(set (match_operand:V4SF 0 "register_operand" "=v")
389 (define_insn "altivec_vsubcuw" 561 (define_insn "altivec_vsubcuw"
390 [(set (match_operand:V4SI 0 "register_operand" "=v") 562 [(set (match_operand:V4SI 0 "register_operand" "=v")
391 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") 563 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
392 (match_operand:V4SI 2 "register_operand" "v")] 564 (match_operand:V4SI 2 "register_operand" "v")]
393 UNSPEC_VSUBCUW))] 565 UNSPEC_VSUBCUW))]
394 "TARGET_ALTIVEC" 566 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
395 "vsubcuw %0,%1,%2" 567 "vsubcuw %0,%1,%2"
396 [(set_attr "type" "vecsimple")]) 568 [(set_attr "type" "vecsimple")])
397 569
398 (define_insn "altivec_vsubu<VI_char>s" 570 (define_insn "altivec_vsubu<VI_char>s"
399 [(set (match_operand:VI 0 "register_operand" "=v") 571 [(set (match_operand:VI 0 "register_operand" "=v")
400 (unspec:VI [(match_operand:VI 1 "register_operand" "v") 572 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
401 (match_operand:VI 2 "register_operand" "v")] 573 (match_operand:VI 2 "register_operand" "v")]
402 UNSPEC_VSUBU)) 574 UNSPEC_VSUBU))
403 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 575 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
404 "TARGET_ALTIVEC" 576 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
405 "vsubu<VI_char>s %0,%1,%2" 577 "vsubu<VI_char>s %0,%1,%2"
406 [(set_attr "type" "vecsimple")]) 578 [(set_attr "type" "vecsimple")])
407 579
408 (define_insn "altivec_vsubs<VI_char>s" 580 (define_insn "altivec_vsubs<VI_char>s"
409 [(set (match_operand:VI 0 "register_operand" "=v") 581 [(set (match_operand:VI 0 "register_operand" "=v")
410 (unspec:VI [(match_operand:VI 1 "register_operand" "v") 582 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
411 (match_operand:VI 2 "register_operand" "v")] 583 (match_operand:VI 2 "register_operand" "v")]
412 UNSPEC_VSUBS)) 584 UNSPEC_VSUBS))
413 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 585 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
414 "TARGET_ALTIVEC" 586 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
415 "vsubs<VI_char>s %0,%1,%2" 587 "vsubs<VI_char>s %0,%1,%2"
416 [(set_attr "type" "vecsimple")]) 588 [(set_attr "type" "vecsimple")])
417 589
418 ;; 590 ;;
419 (define_insn "altivec_vavgu<VI_char>" 591 (define_insn "altivec_vavgu<VI_char>"
428 (define_insn "altivec_vavgs<VI_char>" 600 (define_insn "altivec_vavgs<VI_char>"
429 [(set (match_operand:VI 0 "register_operand" "=v") 601 [(set (match_operand:VI 0 "register_operand" "=v")
430 (unspec:VI [(match_operand:VI 1 "register_operand" "v") 602 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
431 (match_operand:VI 2 "register_operand" "v")] 603 (match_operand:VI 2 "register_operand" "v")]
432 UNSPEC_VAVGS))] 604 UNSPEC_VAVGS))]
433 "TARGET_ALTIVEC" 605 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
434 "vavgs<VI_char> %0,%1,%2" 606 "vavgs<VI_char> %0,%1,%2"
435 [(set_attr "type" "vecsimple")]) 607 [(set_attr "type" "vecsimple")])
436 608
437 (define_insn "altivec_vcmpbfp" 609 (define_insn "altivec_vcmpbfp"
438 [(set (match_operand:V4SI 0 "register_operand" "=v") 610 [(set (match_operand:V4SI 0 "register_operand" "=v")
439 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v") 611 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
440 (match_operand:V4SF 2 "register_operand" "v")] 612 (match_operand:V4SF 2 "register_operand" "v")]
441 UNSPEC_VCMPBFP))] 613 UNSPEC_VCMPBFP))]
442 "TARGET_ALTIVEC" 614 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
443 "vcmpbfp %0,%1,%2" 615 "vcmpbfp %0,%1,%2"
444 [(set_attr "type" "veccmp")]) 616 [(set_attr "type" "veccmp")])
445 617
446 (define_insn "*altivec_eq<mode>" 618 (define_insn "*altivec_eq<mode>"
447 [(set (match_operand:VI 0 "altivec_register_operand" "=v") 619 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
448 (eq:VI (match_operand:VI 1 "altivec_register_operand" "v") 620 (eq:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
449 (match_operand:VI 2 "altivec_register_operand" "v")))] 621 (match_operand:VI2 2 "altivec_register_operand" "v")))]
450 "TARGET_ALTIVEC" 622 "<VI_unit>"
451 "vcmpequ<VI_char> %0,%1,%2" 623 "vcmpequ<VI_char> %0,%1,%2"
452 [(set_attr "type" "veccmp")]) 624 [(set_attr "type" "veccmpfx")])
453 625
454 (define_insn "*altivec_gt<mode>" 626 (define_insn "*altivec_gt<mode>"
455 [(set (match_operand:VI 0 "altivec_register_operand" "=v") 627 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
456 (gt:VI (match_operand:VI 1 "altivec_register_operand" "v") 628 (gt:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
457 (match_operand:VI 2 "altivec_register_operand" "v")))] 629 (match_operand:VI2 2 "altivec_register_operand" "v")))]
458 "TARGET_ALTIVEC" 630 "<VI_unit>"
459 "vcmpgts<VI_char> %0,%1,%2" 631 "vcmpgts<VI_char> %0,%1,%2"
460 [(set_attr "type" "veccmp")]) 632 [(set_attr "type" "veccmpfx")])
461 633
462 (define_insn "*altivec_gtu<mode>" 634 (define_insn "*altivec_gtu<mode>"
463 [(set (match_operand:VI 0 "altivec_register_operand" "=v") 635 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
464 (gtu:VI (match_operand:VI 1 "altivec_register_operand" "v") 636 (gtu:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
465 (match_operand:VI 2 "altivec_register_operand" "v")))] 637 (match_operand:VI2 2 "altivec_register_operand" "v")))]
466 "TARGET_ALTIVEC" 638 "<VI_unit>"
467 "vcmpgtu<VI_char> %0,%1,%2" 639 "vcmpgtu<VI_char> %0,%1,%2"
468 [(set_attr "type" "veccmp")]) 640 [(set_attr "type" "veccmpfx")])
469 641
470 (define_insn "*altivec_eqv4sf" 642 (define_insn "*altivec_eqv4sf"
471 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v") 643 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
472 (eq:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v") 644 (eq:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
473 (match_operand:V4SF 2 "altivec_register_operand" "v")))] 645 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
493 665
494 (define_insn "*altivec_vsel<mode>" 666 (define_insn "*altivec_vsel<mode>"
495 [(set (match_operand:VM 0 "altivec_register_operand" "=v") 667 [(set (match_operand:VM 0 "altivec_register_operand" "=v")
496 (if_then_else:VM 668 (if_then_else:VM
497 (ne:CC (match_operand:VM 1 "altivec_register_operand" "v") 669 (ne:CC (match_operand:VM 1 "altivec_register_operand" "v")
498 (const_int 0)) 670 (match_operand:VM 4 "zero_constant" ""))
499 (match_operand:VM 2 "altivec_register_operand" "v") 671 (match_operand:VM 2 "altivec_register_operand" "v")
500 (match_operand:VM 3 "altivec_register_operand" "v")))] 672 (match_operand:VM 3 "altivec_register_operand" "v")))]
501 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)" 673 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
502 "vsel %0,%3,%2,%1" 674 "vsel %0,%3,%2,%1"
503 [(set_attr "type" "vecperm")]) 675 [(set_attr "type" "vecmove")])
504 676
505 (define_insn "*altivec_vsel<mode>_uns" 677 (define_insn "*altivec_vsel<mode>_uns"
506 [(set (match_operand:VM 0 "altivec_register_operand" "=v") 678 [(set (match_operand:VM 0 "altivec_register_operand" "=v")
507 (if_then_else:VM 679 (if_then_else:VM
508 (ne:CCUNS (match_operand:VM 1 "altivec_register_operand" "v") 680 (ne:CCUNS (match_operand:VM 1 "altivec_register_operand" "v")
509 (const_int 0)) 681 (match_operand:VM 4 "zero_constant" ""))
510 (match_operand:VM 2 "altivec_register_operand" "v") 682 (match_operand:VM 2 "altivec_register_operand" "v")
511 (match_operand:VM 3 "altivec_register_operand" "v")))] 683 (match_operand:VM 3 "altivec_register_operand" "v")))]
512 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)" 684 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
513 "vsel %0,%3,%2,%1" 685 "vsel %0,%3,%2,%1"
514 [(set_attr "type" "vecperm")]) 686 [(set_attr "type" "vecmove")])
515 687
516 ;; Fused multiply add. 688 ;; Fused multiply add.
517 689
518 (define_insn "*altivec_fmav4sf4" 690 (define_insn "*altivec_fmav4sf4"
519 [(set (match_operand:V4SF 0 "register_operand" "=v") 691 [(set (match_operand:V4SF 0 "register_operand" "=v")
552 724
553 ;; (define_insn "mulv4si3" 725 ;; (define_insn "mulv4si3"
554 ;; [(set (match_operand:V4SI 0 "register_operand" "=v") 726 ;; [(set (match_operand:V4SI 0 "register_operand" "=v")
555 ;; (mult:V4SI (match_operand:V4SI 1 "register_operand" "v") 727 ;; (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
556 ;; (match_operand:V4SI 2 "register_operand" "v")))] 728 ;; (match_operand:V4SI 2 "register_operand" "v")))]
729 (define_insn "mulv4si3_p8"
730 [(set (match_operand:V4SI 0 "register_operand" "=v")
731 (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
732 (match_operand:V4SI 2 "register_operand" "v")))]
733 "TARGET_P8_VECTOR"
734 "vmuluwm %0,%1,%2"
735 [(set_attr "type" "veccomplex")])
736
557 (define_expand "mulv4si3" 737 (define_expand "mulv4si3"
558 [(use (match_operand:V4SI 0 "register_operand" "")) 738 [(use (match_operand:V4SI 0 "register_operand" ""))
559 (use (match_operand:V4SI 1 "register_operand" "")) 739 (use (match_operand:V4SI 1 "register_operand" ""))
560 (use (match_operand:V4SI 2 "register_operand" ""))] 740 (use (match_operand:V4SI 2 "register_operand" ""))]
561 "TARGET_ALTIVEC" 741 "TARGET_ALTIVEC"
562 " 742 {
563 { 743 rtx zero;
564 rtx zero; 744 rtx swap;
565 rtx swap; 745 rtx small_swap;
566 rtx small_swap; 746 rtx sixteen;
567 rtx sixteen; 747 rtx one;
568 rtx one; 748 rtx two;
569 rtx two; 749 rtx low_product;
570 rtx low_product; 750 rtx high_product;
571 rtx high_product;
572 751
573 zero = gen_reg_rtx (V4SImode); 752 if (TARGET_P8_VECTOR)
574 emit_insn (gen_altivec_vspltisw (zero, const0_rtx)); 753 {
754 emit_insn (gen_mulv4si3_p8 (operands[0], operands[1], operands[2]));
755 DONE;
756 }
757
758 zero = gen_reg_rtx (V4SImode);
759 emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
575 760
576 sixteen = gen_reg_rtx (V4SImode); 761 sixteen = gen_reg_rtx (V4SImode);
577 emit_insn (gen_altivec_vspltisw (sixteen, gen_rtx_CONST_INT (V4SImode, -16))); 762 emit_insn (gen_altivec_vspltisw (sixteen, gen_rtx_CONST_INT (V4SImode, -16)));
578 763
579 swap = gen_reg_rtx (V4SImode); 764 swap = gen_reg_rtx (V4SImode);
580 emit_insn (gen_vrotlv4si3 (swap, operands[2], sixteen)); 765 emit_insn (gen_vrotlv4si3 (swap, operands[2], sixteen));
581 766
582 one = gen_reg_rtx (V8HImode); 767 one = gen_reg_rtx (V8HImode);
583 convert_move (one, operands[1], 0); 768 convert_move (one, operands[1], 0);
584 769
585 two = gen_reg_rtx (V8HImode); 770 two = gen_reg_rtx (V8HImode);
586 convert_move (two, operands[2], 0); 771 convert_move (two, operands[2], 0);
587 772
588 small_swap = gen_reg_rtx (V8HImode); 773 small_swap = gen_reg_rtx (V8HImode);
589 convert_move (small_swap, swap, 0); 774 convert_move (small_swap, swap, 0);
590 775
591 low_product = gen_reg_rtx (V4SImode); 776 low_product = gen_reg_rtx (V4SImode);
592 emit_insn (gen_altivec_vmulouh (low_product, one, two)); 777 emit_insn (gen_altivec_vmulouh (low_product, one, two));
593 778
594 high_product = gen_reg_rtx (V4SImode); 779 high_product = gen_reg_rtx (V4SImode);
595 emit_insn (gen_altivec_vmsumuhm (high_product, one, small_swap, zero)); 780 emit_insn (gen_altivec_vmsumuhm (high_product, one, small_swap, zero));
596 781
597 emit_insn (gen_vashlv4si3 (high_product, high_product, sixteen)); 782 emit_insn (gen_vashlv4si3 (high_product, high_product, sixteen));
598 783
599 emit_insn (gen_addv4si3 (operands[0], high_product, low_product)); 784 emit_insn (gen_addv4si3 (operands[0], high_product, low_product));
600 785
601 DONE; 786 DONE;
602 }") 787 })
603 788
604 (define_expand "mulv8hi3" 789 (define_expand "mulv8hi3"
605 [(use (match_operand:V8HI 0 "register_operand" "")) 790 [(use (match_operand:V8HI 0 "register_operand" ""))
606 (use (match_operand:V8HI 1 "register_operand" "")) 791 (use (match_operand:V8HI 1 "register_operand" ""))
607 (use (match_operand:V8HI 2 "register_operand" ""))] 792 (use (match_operand:V8HI 2 "register_operand" ""))]
608 "TARGET_ALTIVEC" 793 "TARGET_ALTIVEC"
609 " 794 {
610 { 795 rtx zero = gen_reg_rtx (V8HImode);
611 rtx odd = gen_reg_rtx (V4SImode); 796
612 rtx even = gen_reg_rtx (V4SImode); 797 emit_insn (gen_altivec_vspltish (zero, const0_rtx));
613 rtx high = gen_reg_rtx (V4SImode); 798 emit_insn (gen_altivec_vmladduhm(operands[0], operands[1], operands[2], zero));
614 rtx low = gen_reg_rtx (V4SImode); 799
615 800 DONE;
616 emit_insn (gen_altivec_vmulesh (even, operands[1], operands[2])); 801 })
617 emit_insn (gen_altivec_vmulosh (odd, operands[1], operands[2]));
618
619 emit_insn (gen_altivec_vmrghw (high, even, odd));
620 emit_insn (gen_altivec_vmrglw (low, even, odd));
621
622 emit_insn (gen_altivec_vpkuwum (operands[0], high, low));
623
624 DONE;
625 }")
626 802
627 ;; Fused multiply subtract 803 ;; Fused multiply subtract
628 (define_insn "*altivec_vnmsubfp" 804 (define_insn "*altivec_vnmsubfp"
629 [(set (match_operand:V4SF 0 "register_operand" "=v") 805 [(set (match_operand:V4SF 0 "register_operand" "=v")
630 (neg:V4SF 806 (neg:V4SF
670 [(set (match_operand:V4SI 0 "register_operand" "=v") 846 [(set (match_operand:V4SI 0 "register_operand" "=v")
671 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v") 847 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
672 (match_operand:V8HI 2 "register_operand" "v") 848 (match_operand:V8HI 2 "register_operand" "v")
673 (match_operand:V4SI 3 "register_operand" "v")] 849 (match_operand:V4SI 3 "register_operand" "v")]
674 UNSPEC_VMSUMUHS)) 850 UNSPEC_VMSUMUHS))
675 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 851 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
676 "TARGET_ALTIVEC" 852 "TARGET_ALTIVEC"
677 "vmsumuhs %0,%1,%2,%3" 853 "vmsumuhs %0,%1,%2,%3"
678 [(set_attr "type" "veccomplex")]) 854 [(set_attr "type" "veccomplex")])
679 855
680 (define_insn "altivec_vmsumshs" 856 (define_insn "altivec_vmsumshs"
681 [(set (match_operand:V4SI 0 "register_operand" "=v") 857 [(set (match_operand:V4SI 0 "register_operand" "=v")
682 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v") 858 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
683 (match_operand:V8HI 2 "register_operand" "v") 859 (match_operand:V8HI 2 "register_operand" "v")
684 (match_operand:V4SI 3 "register_operand" "v")] 860 (match_operand:V4SI 3 "register_operand" "v")]
685 UNSPEC_VMSUMSHS)) 861 UNSPEC_VMSUMSHS))
686 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 862 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
687 "TARGET_ALTIVEC" 863 "TARGET_ALTIVEC"
688 "vmsumshs %0,%1,%2,%3" 864 "vmsumshs %0,%1,%2,%3"
689 [(set_attr "type" "veccomplex")]) 865 [(set_attr "type" "veccomplex")])
690 866
691 ;; max 867 ;; max
692 868
693 (define_insn "umax<mode>3" 869 (define_insn "umax<mode>3"
694 [(set (match_operand:VI 0 "register_operand" "=v") 870 [(set (match_operand:VI2 0 "register_operand" "=v")
695 (umax:VI (match_operand:VI 1 "register_operand" "v") 871 (umax:VI2 (match_operand:VI2 1 "register_operand" "v")
696 (match_operand:VI 2 "register_operand" "v")))] 872 (match_operand:VI2 2 "register_operand" "v")))]
697 "TARGET_ALTIVEC" 873 "<VI_unit>"
698 "vmaxu<VI_char> %0,%1,%2" 874 "vmaxu<VI_char> %0,%1,%2"
699 [(set_attr "type" "vecsimple")]) 875 [(set_attr "type" "vecsimple")])
700 876
701 (define_insn "smax<mode>3" 877 (define_insn "smax<mode>3"
702 [(set (match_operand:VI 0 "register_operand" "=v") 878 [(set (match_operand:VI2 0 "register_operand" "=v")
703 (smax:VI (match_operand:VI 1 "register_operand" "v") 879 (smax:VI2 (match_operand:VI2 1 "register_operand" "v")
704 (match_operand:VI 2 "register_operand" "v")))] 880 (match_operand:VI2 2 "register_operand" "v")))]
705 "TARGET_ALTIVEC" 881 "<VI_unit>"
706 "vmaxs<VI_char> %0,%1,%2" 882 "vmaxs<VI_char> %0,%1,%2"
707 [(set_attr "type" "vecsimple")]) 883 [(set_attr "type" "vecsimple")])
708 884
709 (define_insn "*altivec_smaxv4sf3" 885 (define_insn "*altivec_smaxv4sf3"
710 [(set (match_operand:V4SF 0 "register_operand" "=v") 886 [(set (match_operand:V4SF 0 "register_operand" "=v")
713 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)" 889 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
714 "vmaxfp %0,%1,%2" 890 "vmaxfp %0,%1,%2"
715 [(set_attr "type" "veccmp")]) 891 [(set_attr "type" "veccmp")])
716 892
717 (define_insn "umin<mode>3" 893 (define_insn "umin<mode>3"
718 [(set (match_operand:VI 0 "register_operand" "=v") 894 [(set (match_operand:VI2 0 "register_operand" "=v")
719 (umin:VI (match_operand:VI 1 "register_operand" "v") 895 (umin:VI2 (match_operand:VI2 1 "register_operand" "v")
720 (match_operand:VI 2 "register_operand" "v")))] 896 (match_operand:VI2 2 "register_operand" "v")))]
721 "TARGET_ALTIVEC" 897 "<VI_unit>"
722 "vminu<VI_char> %0,%1,%2" 898 "vminu<VI_char> %0,%1,%2"
723 [(set_attr "type" "vecsimple")]) 899 [(set_attr "type" "vecsimple")])
724 900
725 (define_insn "smin<mode>3" 901 (define_insn "smin<mode>3"
726 [(set (match_operand:VI 0 "register_operand" "=v") 902 [(set (match_operand:VI2 0 "register_operand" "=v")
727 (smin:VI (match_operand:VI 1 "register_operand" "v") 903 (smin:VI2 (match_operand:VI2 1 "register_operand" "v")
728 (match_operand:VI 2 "register_operand" "v")))] 904 (match_operand:VI2 2 "register_operand" "v")))]
729 "TARGET_ALTIVEC" 905 "<VI_unit>"
730 "vmins<VI_char> %0,%1,%2" 906 "vmins<VI_char> %0,%1,%2"
731 [(set_attr "type" "vecsimple")]) 907 [(set_attr "type" "vecsimple")])
732 908
733 (define_insn "*altivec_sminv4sf3" 909 (define_insn "*altivec_sminv4sf3"
734 [(set (match_operand:V4SF 0 "register_operand" "=v") 910 [(set (match_operand:V4SF 0 "register_operand" "=v")
742 [(set (match_operand:V8HI 0 "register_operand" "=v") 918 [(set (match_operand:V8HI 0 "register_operand" "=v")
743 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v") 919 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
744 (match_operand:V8HI 2 "register_operand" "v") 920 (match_operand:V8HI 2 "register_operand" "v")
745 (match_operand:V8HI 3 "register_operand" "v")] 921 (match_operand:V8HI 3 "register_operand" "v")]
746 UNSPEC_VMHADDSHS)) 922 UNSPEC_VMHADDSHS))
747 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 923 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
748 "TARGET_ALTIVEC" 924 "TARGET_ALTIVEC"
749 "vmhaddshs %0,%1,%2,%3" 925 "vmhaddshs %0,%1,%2,%3"
750 [(set_attr "type" "veccomplex")]) 926 [(set_attr "type" "veccomplex")])
751 927
752 (define_insn "altivec_vmhraddshs" 928 (define_insn "altivec_vmhraddshs"
753 [(set (match_operand:V8HI 0 "register_operand" "=v") 929 [(set (match_operand:V8HI 0 "register_operand" "=v")
754 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v") 930 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
755 (match_operand:V8HI 2 "register_operand" "v") 931 (match_operand:V8HI 2 "register_operand" "v")
756 (match_operand:V8HI 3 "register_operand" "v")] 932 (match_operand:V8HI 3 "register_operand" "v")]
757 UNSPEC_VMHRADDSHS)) 933 UNSPEC_VMHRADDSHS))
758 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 934 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
759 "TARGET_ALTIVEC" 935 "TARGET_ALTIVEC"
760 "vmhraddshs %0,%1,%2,%3" 936 "vmhraddshs %0,%1,%2,%3"
761 [(set_attr "type" "veccomplex")]) 937 [(set_attr "type" "veccomplex")])
762 938
763 (define_insn "altivec_vmladduhm" 939 (define_insn "altivec_vmladduhm"
764 [(set (match_operand:V8HI 0 "register_operand" "=v") 940 [(set (match_operand:V8HI 0 "register_operand" "=v")
765 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v") 941 (plus:V8HI (mult:V8HI (match_operand:V8HI 1 "register_operand" "v")
766 (match_operand:V8HI 2 "register_operand" "v") 942 (match_operand:V8HI 2 "register_operand" "v"))
767 (match_operand:V8HI 3 "register_operand" "v")] 943 (match_operand:V8HI 3 "register_operand" "v")))]
768 UNSPEC_VMLADDUHM))]
769 "TARGET_ALTIVEC" 944 "TARGET_ALTIVEC"
770 "vmladduhm %0,%1,%2,%3" 945 "vmladduhm %0,%1,%2,%3"
771 [(set_attr "type" "veccomplex")]) 946 [(set_attr "type" "veccomplex")])
772 947
773 (define_insn "altivec_vmrghb" 948 (define_expand "altivec_vmrghb"
949 [(use (match_operand:V16QI 0 "register_operand" ""))
950 (use (match_operand:V16QI 1 "register_operand" ""))
951 (use (match_operand:V16QI 2 "register_operand" ""))]
952 "TARGET_ALTIVEC"
953 {
954 rtvec v;
955 rtx x;
956
957 /* Special handling for LE with -maltivec=be. */
958 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
959 {
960 v = gen_rtvec (16, GEN_INT (8), GEN_INT (24), GEN_INT (9), GEN_INT (25),
961 GEN_INT (10), GEN_INT (26), GEN_INT (11), GEN_INT (27),
962 GEN_INT (12), GEN_INT (28), GEN_INT (13), GEN_INT (29),
963 GEN_INT (14), GEN_INT (30), GEN_INT (15), GEN_INT (31));
964 x = gen_rtx_VEC_CONCAT (V32QImode, operands[2], operands[1]);
965 }
966 else
967 {
968 v = gen_rtvec (16, GEN_INT (0), GEN_INT (16), GEN_INT (1), GEN_INT (17),
969 GEN_INT (2), GEN_INT (18), GEN_INT (3), GEN_INT (19),
970 GEN_INT (4), GEN_INT (20), GEN_INT (5), GEN_INT (21),
971 GEN_INT (6), GEN_INT (22), GEN_INT (7), GEN_INT (23));
972 x = gen_rtx_VEC_CONCAT (V32QImode, operands[1], operands[2]);
973 }
974
975 x = gen_rtx_VEC_SELECT (V16QImode, x, gen_rtx_PARALLEL (VOIDmode, v));
976 emit_insn (gen_rtx_SET (operands[0], x));
977 DONE;
978 })
979
980 (define_insn "*altivec_vmrghb_internal"
774 [(set (match_operand:V16QI 0 "register_operand" "=v") 981 [(set (match_operand:V16QI 0 "register_operand" "=v")
775 (vec_merge:V16QI (vec_select:V16QI (match_operand:V16QI 1 "register_operand" "v") 982 (vec_select:V16QI
776 (parallel [(const_int 0) 983 (vec_concat:V32QI
777 (const_int 8) 984 (match_operand:V16QI 1 "register_operand" "v")
778 (const_int 1) 985 (match_operand:V16QI 2 "register_operand" "v"))
779 (const_int 9) 986 (parallel [(const_int 0) (const_int 16)
780 (const_int 2) 987 (const_int 1) (const_int 17)
781 (const_int 10) 988 (const_int 2) (const_int 18)
782 (const_int 3) 989 (const_int 3) (const_int 19)
783 (const_int 11) 990 (const_int 4) (const_int 20)
784 (const_int 4) 991 (const_int 5) (const_int 21)
785 (const_int 12) 992 (const_int 6) (const_int 22)
786 (const_int 5) 993 (const_int 7) (const_int 23)])))]
787 (const_int 13) 994 "TARGET_ALTIVEC"
788 (const_int 6) 995 {
789 (const_int 14) 996 if (BYTES_BIG_ENDIAN)
790 (const_int 7) 997 return "vmrghb %0,%1,%2";
791 (const_int 15)])) 998 else
792 (vec_select:V16QI (match_operand:V16QI 2 "register_operand" "v") 999 return "vmrglb %0,%2,%1";
793 (parallel [(const_int 8) 1000 }
794 (const_int 0) 1001 [(set_attr "type" "vecperm")])
795 (const_int 9) 1002
796 (const_int 1) 1003 (define_insn "altivec_vmrghb_direct"
797 (const_int 10) 1004 [(set (match_operand:V16QI 0 "register_operand" "=v")
798 (const_int 2) 1005 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
799 (const_int 11) 1006 (match_operand:V16QI 2 "register_operand" "v")]
800 (const_int 3) 1007 UNSPEC_VMRGH_DIRECT))]
801 (const_int 12)
802 (const_int 4)
803 (const_int 13)
804 (const_int 5)
805 (const_int 14)
806 (const_int 6)
807 (const_int 15)
808 (const_int 7)]))
809 (const_int 21845)))]
810 "TARGET_ALTIVEC" 1008 "TARGET_ALTIVEC"
811 "vmrghb %0,%1,%2" 1009 "vmrghb %0,%1,%2"
812 [(set_attr "type" "vecperm")]) 1010 [(set_attr "type" "vecperm")])
813 1011
814 (define_insn "altivec_vmrghh" 1012 (define_expand "altivec_vmrghh"
1013 [(use (match_operand:V8HI 0 "register_operand" ""))
1014 (use (match_operand:V8HI 1 "register_operand" ""))
1015 (use (match_operand:V8HI 2 "register_operand" ""))]
1016 "TARGET_ALTIVEC"
1017 {
1018 rtvec v;
1019 rtx x;
1020
1021 /* Special handling for LE with -maltivec=be. */
1022 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1023 {
1024 v = gen_rtvec (8, GEN_INT (4), GEN_INT (12), GEN_INT (5), GEN_INT (13),
1025 GEN_INT (6), GEN_INT (14), GEN_INT (7), GEN_INT (15));
1026 x = gen_rtx_VEC_CONCAT (V16HImode, operands[2], operands[1]);
1027 }
1028 else
1029 {
1030 v = gen_rtvec (8, GEN_INT (0), GEN_INT (8), GEN_INT (1), GEN_INT (9),
1031 GEN_INT (2), GEN_INT (10), GEN_INT (3), GEN_INT (11));
1032 x = gen_rtx_VEC_CONCAT (V16HImode, operands[1], operands[2]);
1033 }
1034
1035 x = gen_rtx_VEC_SELECT (V8HImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1036 emit_insn (gen_rtx_SET (operands[0], x));
1037 DONE;
1038 })
1039
1040 (define_insn "*altivec_vmrghh_internal"
815 [(set (match_operand:V8HI 0 "register_operand" "=v") 1041 [(set (match_operand:V8HI 0 "register_operand" "=v")
816 (vec_merge:V8HI (vec_select:V8HI (match_operand:V8HI 1 "register_operand" "v") 1042 (vec_select:V8HI
817 (parallel [(const_int 0) 1043 (vec_concat:V16HI
818 (const_int 4) 1044 (match_operand:V8HI 1 "register_operand" "v")
819 (const_int 1) 1045 (match_operand:V8HI 2 "register_operand" "v"))
820 (const_int 5) 1046 (parallel [(const_int 0) (const_int 8)
821 (const_int 2) 1047 (const_int 1) (const_int 9)
822 (const_int 6) 1048 (const_int 2) (const_int 10)
823 (const_int 3) 1049 (const_int 3) (const_int 11)])))]
824 (const_int 7)])) 1050 "TARGET_ALTIVEC"
825 (vec_select:V8HI (match_operand:V8HI 2 "register_operand" "v") 1051 {
826 (parallel [(const_int 4) 1052 if (BYTES_BIG_ENDIAN)
827 (const_int 0) 1053 return "vmrghh %0,%1,%2";
828 (const_int 5) 1054 else
829 (const_int 1) 1055 return "vmrglh %0,%2,%1";
830 (const_int 6) 1056 }
831 (const_int 2) 1057 [(set_attr "type" "vecperm")])
832 (const_int 7) 1058
833 (const_int 3)])) 1059 (define_insn "altivec_vmrghh_direct"
834 (const_int 85)))] 1060 [(set (match_operand:V8HI 0 "register_operand" "=v")
1061 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1062 (match_operand:V8HI 2 "register_operand" "v")]
1063 UNSPEC_VMRGH_DIRECT))]
835 "TARGET_ALTIVEC" 1064 "TARGET_ALTIVEC"
836 "vmrghh %0,%1,%2" 1065 "vmrghh %0,%1,%2"
837 [(set_attr "type" "vecperm")]) 1066 [(set_attr "type" "vecperm")])
838 1067
839 (define_insn "altivec_vmrghw" 1068 (define_expand "altivec_vmrghw"
1069 [(use (match_operand:V4SI 0 "register_operand" ""))
1070 (use (match_operand:V4SI 1 "register_operand" ""))
1071 (use (match_operand:V4SI 2 "register_operand" ""))]
1072 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1073 {
1074 rtvec v;
1075 rtx x;
1076
1077 /* Special handling for LE with -maltivec=be. */
1078 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1079 {
1080 v = gen_rtvec (4, GEN_INT (2), GEN_INT (6), GEN_INT (3), GEN_INT (7));
1081 x = gen_rtx_VEC_CONCAT (V8SImode, operands[2], operands[1]);
1082 }
1083 else
1084 {
1085 v = gen_rtvec (4, GEN_INT (0), GEN_INT (4), GEN_INT (1), GEN_INT (5));
1086 x = gen_rtx_VEC_CONCAT (V8SImode, operands[1], operands[2]);
1087 }
1088
1089 x = gen_rtx_VEC_SELECT (V4SImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1090 emit_insn (gen_rtx_SET (operands[0], x));
1091 DONE;
1092 })
1093
1094 (define_insn "*altivec_vmrghw_internal"
840 [(set (match_operand:V4SI 0 "register_operand" "=v") 1095 [(set (match_operand:V4SI 0 "register_operand" "=v")
841 (vec_merge:V4SI (vec_select:V4SI (match_operand:V4SI 1 "register_operand" "v") 1096 (vec_select:V4SI
842 (parallel [(const_int 0) 1097 (vec_concat:V8SI
843 (const_int 2) 1098 (match_operand:V4SI 1 "register_operand" "v")
844 (const_int 1) 1099 (match_operand:V4SI 2 "register_operand" "v"))
845 (const_int 3)])) 1100 (parallel [(const_int 0) (const_int 4)
846 (vec_select:V4SI (match_operand:V4SI 2 "register_operand" "v") 1101 (const_int 1) (const_int 5)])))]
847 (parallel [(const_int 2)
848 (const_int 0)
849 (const_int 3)
850 (const_int 1)]))
851 (const_int 5)))]
852 "VECTOR_MEM_ALTIVEC_P (V4SImode)" 1102 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1103 {
1104 if (BYTES_BIG_ENDIAN)
1105 return "vmrghw %0,%1,%2";
1106 else
1107 return "vmrglw %0,%2,%1";
1108 }
1109 [(set_attr "type" "vecperm")])
1110
1111 (define_insn "altivec_vmrghw_direct"
1112 [(set (match_operand:V4SI 0 "register_operand" "=v")
1113 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1114 (match_operand:V4SI 2 "register_operand" "v")]
1115 UNSPEC_VMRGH_DIRECT))]
1116 "TARGET_ALTIVEC"
853 "vmrghw %0,%1,%2" 1117 "vmrghw %0,%1,%2"
854 [(set_attr "type" "vecperm")]) 1118 [(set_attr "type" "vecperm")])
855 1119
856 (define_insn "*altivec_vmrghsf" 1120 (define_insn "*altivec_vmrghsf"
857 [(set (match_operand:V4SF 0 "register_operand" "=v") 1121 [(set (match_operand:V4SF 0 "register_operand" "=v")
858 (vec_merge:V4SF (vec_select:V4SF (match_operand:V4SF 1 "register_operand" "v") 1122 (vec_select:V4SF
859 (parallel [(const_int 0) 1123 (vec_concat:V8SF
860 (const_int 2) 1124 (match_operand:V4SF 1 "register_operand" "v")
861 (const_int 1) 1125 (match_operand:V4SF 2 "register_operand" "v"))
862 (const_int 3)])) 1126 (parallel [(const_int 0) (const_int 4)
863 (vec_select:V4SF (match_operand:V4SF 2 "register_operand" "v") 1127 (const_int 1) (const_int 5)])))]
864 (parallel [(const_int 2)
865 (const_int 0)
866 (const_int 3)
867 (const_int 1)]))
868 (const_int 5)))]
869 "VECTOR_MEM_ALTIVEC_P (V4SFmode)" 1128 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
870 "vmrghw %0,%1,%2" 1129 {
1130 if (BYTES_BIG_ENDIAN)
1131 return "vmrghw %0,%1,%2";
1132 else
1133 return "vmrglw %0,%2,%1";
1134 }
871 [(set_attr "type" "vecperm")]) 1135 [(set_attr "type" "vecperm")])
872 1136
873 (define_insn "altivec_vmrglb" 1137 (define_expand "altivec_vmrglb"
1138 [(use (match_operand:V16QI 0 "register_operand" ""))
1139 (use (match_operand:V16QI 1 "register_operand" ""))
1140 (use (match_operand:V16QI 2 "register_operand" ""))]
1141 "TARGET_ALTIVEC"
1142 {
1143 rtvec v;
1144 rtx x;
1145
1146 /* Special handling for LE with -maltivec=be. */
1147 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1148 {
1149 v = gen_rtvec (16, GEN_INT (0), GEN_INT (16), GEN_INT (1), GEN_INT (17),
1150 GEN_INT (2), GEN_INT (18), GEN_INT (3), GEN_INT (19),
1151 GEN_INT (4), GEN_INT (20), GEN_INT (5), GEN_INT (21),
1152 GEN_INT (6), GEN_INT (22), GEN_INT (7), GEN_INT (23));
1153 x = gen_rtx_VEC_CONCAT (V32QImode, operands[2], operands[1]);
1154 }
1155 else
1156 {
1157 v = gen_rtvec (16, GEN_INT (8), GEN_INT (24), GEN_INT (9), GEN_INT (25),
1158 GEN_INT (10), GEN_INT (26), GEN_INT (11), GEN_INT (27),
1159 GEN_INT (12), GEN_INT (28), GEN_INT (13), GEN_INT (29),
1160 GEN_INT (14), GEN_INT (30), GEN_INT (15), GEN_INT (31));
1161 x = gen_rtx_VEC_CONCAT (V32QImode, operands[1], operands[2]);
1162 }
1163
1164 x = gen_rtx_VEC_SELECT (V16QImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1165 emit_insn (gen_rtx_SET (operands[0], x));
1166 DONE;
1167 })
1168
1169 (define_insn "*altivec_vmrglb_internal"
874 [(set (match_operand:V16QI 0 "register_operand" "=v") 1170 [(set (match_operand:V16QI 0 "register_operand" "=v")
875 (vec_merge:V16QI (vec_select:V16QI (match_operand:V16QI 1 "register_operand" "v") 1171 (vec_select:V16QI
876 (parallel [(const_int 8) 1172 (vec_concat:V32QI
877 (const_int 0) 1173 (match_operand:V16QI 1 "register_operand" "v")
878 (const_int 9) 1174 (match_operand:V16QI 2 "register_operand" "v"))
879 (const_int 1) 1175 (parallel [(const_int 8) (const_int 24)
880 (const_int 10) 1176 (const_int 9) (const_int 25)
881 (const_int 2) 1177 (const_int 10) (const_int 26)
882 (const_int 11) 1178 (const_int 11) (const_int 27)
883 (const_int 3) 1179 (const_int 12) (const_int 28)
884 (const_int 12) 1180 (const_int 13) (const_int 29)
885 (const_int 4) 1181 (const_int 14) (const_int 30)
886 (const_int 13) 1182 (const_int 15) (const_int 31)])))]
887 (const_int 5) 1183 "TARGET_ALTIVEC"
888 (const_int 14) 1184 {
889 (const_int 6) 1185 if (BYTES_BIG_ENDIAN)
890 (const_int 15) 1186 return "vmrglb %0,%1,%2";
891 (const_int 7)])) 1187 else
892 (vec_select:V16QI (match_operand:V16QI 2 "register_operand" "v") 1188 return "vmrghb %0,%2,%1";
893 (parallel [(const_int 0) 1189 }
894 (const_int 8) 1190 [(set_attr "type" "vecperm")])
895 (const_int 1) 1191
896 (const_int 9) 1192 (define_insn "altivec_vmrglb_direct"
897 (const_int 2) 1193 [(set (match_operand:V16QI 0 "register_operand" "=v")
898 (const_int 10) 1194 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
899 (const_int 3) 1195 (match_operand:V16QI 2 "register_operand" "v")]
900 (const_int 11) 1196 UNSPEC_VMRGL_DIRECT))]
901 (const_int 4)
902 (const_int 12)
903 (const_int 5)
904 (const_int 13)
905 (const_int 6)
906 (const_int 14)
907 (const_int 7)
908 (const_int 15)]))
909 (const_int 21845)))]
910 "TARGET_ALTIVEC" 1197 "TARGET_ALTIVEC"
911 "vmrglb %0,%1,%2" 1198 "vmrglb %0,%1,%2"
912 [(set_attr "type" "vecperm")]) 1199 [(set_attr "type" "vecperm")])
913 1200
914 (define_insn "altivec_vmrglh" 1201 (define_expand "altivec_vmrglh"
1202 [(use (match_operand:V8HI 0 "register_operand" ""))
1203 (use (match_operand:V8HI 1 "register_operand" ""))
1204 (use (match_operand:V8HI 2 "register_operand" ""))]
1205 "TARGET_ALTIVEC"
1206 {
1207 rtvec v;
1208 rtx x;
1209
1210 /* Special handling for LE with -maltivec=be. */
1211 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1212 {
1213 v = gen_rtvec (8, GEN_INT (0), GEN_INT (8), GEN_INT (1), GEN_INT (9),
1214 GEN_INT (2), GEN_INT (10), GEN_INT (3), GEN_INT (11));
1215 x = gen_rtx_VEC_CONCAT (V16HImode, operands[2], operands[1]);
1216 }
1217 else
1218 {
1219 v = gen_rtvec (8, GEN_INT (4), GEN_INT (12), GEN_INT (5), GEN_INT (13),
1220 GEN_INT (6), GEN_INT (14), GEN_INT (7), GEN_INT (15));
1221 x = gen_rtx_VEC_CONCAT (V16HImode, operands[1], operands[2]);
1222 }
1223
1224 x = gen_rtx_VEC_SELECT (V8HImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1225 emit_insn (gen_rtx_SET (operands[0], x));
1226 DONE;
1227 })
1228
1229 (define_insn "*altivec_vmrglh_internal"
915 [(set (match_operand:V8HI 0 "register_operand" "=v") 1230 [(set (match_operand:V8HI 0 "register_operand" "=v")
916 (vec_merge:V8HI (vec_select:V8HI (match_operand:V8HI 1 "register_operand" "v") 1231 (vec_select:V8HI
917 (parallel [(const_int 4) 1232 (vec_concat:V16HI
918 (const_int 0) 1233 (match_operand:V8HI 1 "register_operand" "v")
919 (const_int 5) 1234 (match_operand:V8HI 2 "register_operand" "v"))
920 (const_int 1) 1235 (parallel [(const_int 4) (const_int 12)
921 (const_int 6) 1236 (const_int 5) (const_int 13)
922 (const_int 2) 1237 (const_int 6) (const_int 14)
923 (const_int 7) 1238 (const_int 7) (const_int 15)])))]
924 (const_int 3)])) 1239 "TARGET_ALTIVEC"
925 (vec_select:V8HI (match_operand:V8HI 2 "register_operand" "v") 1240 {
926 (parallel [(const_int 0) 1241 if (BYTES_BIG_ENDIAN)
927 (const_int 4) 1242 return "vmrglh %0,%1,%2";
928 (const_int 1) 1243 else
929 (const_int 5) 1244 return "vmrghh %0,%2,%1";
930 (const_int 2) 1245 }
931 (const_int 6) 1246 [(set_attr "type" "vecperm")])
932 (const_int 3) 1247
933 (const_int 7)])) 1248 (define_insn "altivec_vmrglh_direct"
934 (const_int 85)))] 1249 [(set (match_operand:V8HI 0 "register_operand" "=v")
1250 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1251 (match_operand:V8HI 2 "register_operand" "v")]
1252 UNSPEC_VMRGL_DIRECT))]
935 "TARGET_ALTIVEC" 1253 "TARGET_ALTIVEC"
936 "vmrglh %0,%1,%2" 1254 "vmrglh %0,%1,%2"
937 [(set_attr "type" "vecperm")]) 1255 [(set_attr "type" "vecperm")])
938 1256
939 (define_insn "altivec_vmrglw" 1257 (define_expand "altivec_vmrglw"
1258 [(use (match_operand:V4SI 0 "register_operand" ""))
1259 (use (match_operand:V4SI 1 "register_operand" ""))
1260 (use (match_operand:V4SI 2 "register_operand" ""))]
1261 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1262 {
1263 rtvec v;
1264 rtx x;
1265
1266 /* Special handling for LE with -maltivec=be. */
1267 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1268 {
1269 v = gen_rtvec (4, GEN_INT (0), GEN_INT (4), GEN_INT (1), GEN_INT (5));
1270 x = gen_rtx_VEC_CONCAT (V8SImode, operands[2], operands[1]);
1271 }
1272 else
1273 {
1274 v = gen_rtvec (4, GEN_INT (2), GEN_INT (6), GEN_INT (3), GEN_INT (7));
1275 x = gen_rtx_VEC_CONCAT (V8SImode, operands[1], operands[2]);
1276 }
1277
1278 x = gen_rtx_VEC_SELECT (V4SImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1279 emit_insn (gen_rtx_SET (operands[0], x));
1280 DONE;
1281 })
1282
1283 (define_insn "*altivec_vmrglw_internal"
940 [(set (match_operand:V4SI 0 "register_operand" "=v") 1284 [(set (match_operand:V4SI 0 "register_operand" "=v")
941 (vec_merge:V4SI 1285 (vec_select:V4SI
942 (vec_select:V4SI (match_operand:V4SI 1 "register_operand" "v") 1286 (vec_concat:V8SI
943 (parallel [(const_int 2) 1287 (match_operand:V4SI 1 "register_operand" "v")
944 (const_int 0) 1288 (match_operand:V4SI 2 "register_operand" "v"))
945 (const_int 3) 1289 (parallel [(const_int 2) (const_int 6)
946 (const_int 1)])) 1290 (const_int 3) (const_int 7)])))]
947 (vec_select:V4SI (match_operand:V4SI 2 "register_operand" "v")
948 (parallel [(const_int 0)
949 (const_int 2)
950 (const_int 1)
951 (const_int 3)]))
952 (const_int 5)))]
953 "VECTOR_MEM_ALTIVEC_P (V4SImode)" 1291 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1292 {
1293 if (BYTES_BIG_ENDIAN)
1294 return "vmrglw %0,%1,%2";
1295 else
1296 return "vmrghw %0,%2,%1";
1297 }
1298 [(set_attr "type" "vecperm")])
1299
1300 (define_insn "altivec_vmrglw_direct"
1301 [(set (match_operand:V4SI 0 "register_operand" "=v")
1302 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1303 (match_operand:V4SI 2 "register_operand" "v")]
1304 UNSPEC_VMRGL_DIRECT))]
1305 "TARGET_ALTIVEC"
954 "vmrglw %0,%1,%2" 1306 "vmrglw %0,%1,%2"
955 [(set_attr "type" "vecperm")]) 1307 [(set_attr "type" "vecperm")])
956 1308
957 (define_insn "*altivec_vmrglsf" 1309 (define_insn "*altivec_vmrglsf"
958 [(set (match_operand:V4SF 0 "register_operand" "=v") 1310 [(set (match_operand:V4SF 0 "register_operand" "=v")
959 (vec_merge:V4SF 1311 (vec_select:V4SF
960 (vec_select:V4SF (match_operand:V4SF 1 "register_operand" "v") 1312 (vec_concat:V8SF
961 (parallel [(const_int 2) 1313 (match_operand:V4SF 1 "register_operand" "v")
962 (const_int 0) 1314 (match_operand:V4SF 2 "register_operand" "v"))
963 (const_int 3) 1315 (parallel [(const_int 2) (const_int 6)
964 (const_int 1)])) 1316 (const_int 3) (const_int 7)])))]
965 (vec_select:V4SF (match_operand:V4SF 2 "register_operand" "v")
966 (parallel [(const_int 0)
967 (const_int 2)
968 (const_int 1)
969 (const_int 3)]))
970 (const_int 5)))]
971 "VECTOR_MEM_ALTIVEC_P (V4SFmode)" 1317 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
972 "vmrglw %0,%1,%2" 1318 {
1319 if (BYTES_BIG_ENDIAN)
1320 return "vmrglw %0,%1,%2";
1321 else
1322 return "vmrghw %0,%2,%1";
1323 }
973 [(set_attr "type" "vecperm")]) 1324 [(set_attr "type" "vecperm")])
1325
1326 ;; Power8 vector merge two V4SF/V4SI even words to V4SF
1327 (define_insn "p8_vmrgew_<mode>"
1328 [(set (match_operand:VSX_W 0 "register_operand" "=v")
1329 (vec_select:VSX_W
1330 (vec_concat:<VS_double>
1331 (match_operand:VSX_W 1 "register_operand" "v")
1332 (match_operand:VSX_W 2 "register_operand" "v"))
1333 (parallel [(const_int 0) (const_int 4)
1334 (const_int 2) (const_int 6)])))]
1335 "TARGET_P8_VECTOR"
1336 {
1337 if (BYTES_BIG_ENDIAN)
1338 return "vmrgew %0,%1,%2";
1339 else
1340 return "vmrgow %0,%2,%1";
1341 }
1342 [(set_attr "type" "vecperm")])
1343
1344 (define_insn "p8_vmrgow"
1345 [(set (match_operand:V4SI 0 "register_operand" "=v")
1346 (vec_select:V4SI
1347 (vec_concat:V8SI
1348 (match_operand:V4SI 1 "register_operand" "v")
1349 (match_operand:V4SI 2 "register_operand" "v"))
1350 (parallel [(const_int 1) (const_int 5)
1351 (const_int 3) (const_int 7)])))]
1352 "TARGET_P8_VECTOR"
1353 {
1354 if (BYTES_BIG_ENDIAN)
1355 return "vmrgow %0,%1,%2";
1356 else
1357 return "vmrgew %0,%2,%1";
1358 }
1359 [(set_attr "type" "vecperm")])
1360
1361 (define_insn "p8_vmrgew_<mode>_direct"
1362 [(set (match_operand:VSX_W 0 "register_operand" "=v")
1363 (unspec:VSX_W [(match_operand:VSX_W 1 "register_operand" "v")
1364 (match_operand:VSX_W 2 "register_operand" "v")]
1365 UNSPEC_VMRGEW_DIRECT))]
1366 "TARGET_P8_VECTOR"
1367 "vmrgew %0,%1,%2"
1368 [(set_attr "type" "vecperm")])
1369
1370 (define_insn "p8_vmrgow_<mode>_direct"
1371 [(set (match_operand:VSX_W 0 "register_operand" "=v")
1372 (unspec:VSX_W [(match_operand:VSX_W 1 "register_operand" "v")
1373 (match_operand:VSX_W 2 "register_operand" "v")]
1374 UNSPEC_VMRGOW_DIRECT))]
1375 "TARGET_P8_VECTOR"
1376 "vmrgow %0,%1,%2"
1377 [(set_attr "type" "vecperm")])
1378
1379 (define_expand "vec_widen_umult_even_v16qi"
1380 [(use (match_operand:V8HI 0 "register_operand" ""))
1381 (use (match_operand:V16QI 1 "register_operand" ""))
1382 (use (match_operand:V16QI 2 "register_operand" ""))]
1383 "TARGET_ALTIVEC"
1384 {
1385 if (VECTOR_ELT_ORDER_BIG)
1386 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1387 else
1388 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1389 DONE;
1390 })
1391
1392 (define_expand "vec_widen_smult_even_v16qi"
1393 [(use (match_operand:V8HI 0 "register_operand" ""))
1394 (use (match_operand:V16QI 1 "register_operand" ""))
1395 (use (match_operand:V16QI 2 "register_operand" ""))]
1396 "TARGET_ALTIVEC"
1397 {
1398 if (VECTOR_ELT_ORDER_BIG)
1399 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1400 else
1401 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1402 DONE;
1403 })
1404
1405 (define_expand "vec_widen_umult_even_v8hi"
1406 [(use (match_operand:V4SI 0 "register_operand" ""))
1407 (use (match_operand:V8HI 1 "register_operand" ""))
1408 (use (match_operand:V8HI 2 "register_operand" ""))]
1409 "TARGET_ALTIVEC"
1410 {
1411 if (VECTOR_ELT_ORDER_BIG)
1412 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1413 else
1414 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1415 DONE;
1416 })
1417
1418 (define_expand "vec_widen_smult_even_v8hi"
1419 [(use (match_operand:V4SI 0 "register_operand" ""))
1420 (use (match_operand:V8HI 1 "register_operand" ""))
1421 (use (match_operand:V8HI 2 "register_operand" ""))]
1422 "TARGET_ALTIVEC"
1423 {
1424 if (VECTOR_ELT_ORDER_BIG)
1425 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1426 else
1427 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1428 DONE;
1429 })
1430
1431 (define_expand "vec_widen_umult_even_v4si"
1432 [(use (match_operand:V2DI 0 "register_operand"))
1433 (use (match_operand:V4SI 1 "register_operand"))
1434 (use (match_operand:V4SI 2 "register_operand"))]
1435 "TARGET_P8_VECTOR"
1436 {
1437 if (VECTOR_ELT_ORDER_BIG)
1438 emit_insn (gen_altivec_vmuleuw (operands[0], operands[1], operands[2]));
1439 else
1440 emit_insn (gen_altivec_vmulouw (operands[0], operands[1], operands[2]));
1441 DONE;
1442 })
1443
1444 (define_expand "vec_widen_smult_even_v4si"
1445 [(use (match_operand:V2DI 0 "register_operand"))
1446 (use (match_operand:V4SI 1 "register_operand"))
1447 (use (match_operand:V4SI 2 "register_operand"))]
1448 "TARGET_P8_VECTOR"
1449 {
1450 if (VECTOR_ELT_ORDER_BIG)
1451 emit_insn (gen_altivec_vmulesw (operands[0], operands[1], operands[2]));
1452 else
1453 emit_insn (gen_altivec_vmulosw (operands[0], operands[1], operands[2]));
1454 DONE;
1455 })
1456
1457 (define_expand "vec_widen_umult_odd_v16qi"
1458 [(use (match_operand:V8HI 0 "register_operand" ""))
1459 (use (match_operand:V16QI 1 "register_operand" ""))
1460 (use (match_operand:V16QI 2 "register_operand" ""))]
1461 "TARGET_ALTIVEC"
1462 {
1463 if (VECTOR_ELT_ORDER_BIG)
1464 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1465 else
1466 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1467 DONE;
1468 })
1469
1470 (define_expand "vec_widen_smult_odd_v16qi"
1471 [(use (match_operand:V8HI 0 "register_operand" ""))
1472 (use (match_operand:V16QI 1 "register_operand" ""))
1473 (use (match_operand:V16QI 2 "register_operand" ""))]
1474 "TARGET_ALTIVEC"
1475 {
1476 if (VECTOR_ELT_ORDER_BIG)
1477 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1478 else
1479 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1480 DONE;
1481 })
1482
1483 (define_expand "vec_widen_umult_odd_v8hi"
1484 [(use (match_operand:V4SI 0 "register_operand" ""))
1485 (use (match_operand:V8HI 1 "register_operand" ""))
1486 (use (match_operand:V8HI 2 "register_operand" ""))]
1487 "TARGET_ALTIVEC"
1488 {
1489 if (VECTOR_ELT_ORDER_BIG)
1490 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1491 else
1492 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1493 DONE;
1494 })
1495
1496 (define_expand "vec_widen_smult_odd_v8hi"
1497 [(use (match_operand:V4SI 0 "register_operand" ""))
1498 (use (match_operand:V8HI 1 "register_operand" ""))
1499 (use (match_operand:V8HI 2 "register_operand" ""))]
1500 "TARGET_ALTIVEC"
1501 {
1502 if (VECTOR_ELT_ORDER_BIG)
1503 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1504 else
1505 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1506 DONE;
1507 })
1508
1509 (define_expand "vec_widen_umult_odd_v4si"
1510 [(use (match_operand:V2DI 0 "register_operand"))
1511 (use (match_operand:V4SI 1 "register_operand"))
1512 (use (match_operand:V4SI 2 "register_operand"))]
1513 "TARGET_P8_VECTOR"
1514 {
1515 if (VECTOR_ELT_ORDER_BIG)
1516 emit_insn (gen_altivec_vmulouw (operands[0], operands[1], operands[2]));
1517 else
1518 emit_insn (gen_altivec_vmuleuw (operands[0], operands[1], operands[2]));
1519 DONE;
1520 })
1521
1522 (define_expand "vec_widen_smult_odd_v4si"
1523 [(use (match_operand:V2DI 0 "register_operand"))
1524 (use (match_operand:V4SI 1 "register_operand"))
1525 (use (match_operand:V4SI 2 "register_operand"))]
1526 "TARGET_P8_VECTOR"
1527 {
1528 if (VECTOR_ELT_ORDER_BIG)
1529 emit_insn (gen_altivec_vmulosw (operands[0], operands[1], operands[2]));
1530 else
1531 emit_insn (gen_altivec_vmulesw (operands[0], operands[1], operands[2]));
1532 DONE;
1533 })
974 1534
975 (define_insn "altivec_vmuleub" 1535 (define_insn "altivec_vmuleub"
976 [(set (match_operand:V8HI 0 "register_operand" "=v") 1536 [(set (match_operand:V8HI 0 "register_operand" "=v")
977 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v") 1537 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
978 (match_operand:V16QI 2 "register_operand" "v")] 1538 (match_operand:V16QI 2 "register_operand" "v")]
979 UNSPEC_VMULEUB))] 1539 UNSPEC_VMULEUB))]
980 "TARGET_ALTIVEC" 1540 "TARGET_ALTIVEC"
981 "vmuleub %0,%1,%2" 1541 "vmuleub %0,%1,%2"
982 [(set_attr "type" "veccomplex")]) 1542 [(set_attr "type" "veccomplex")])
983 1543
1544 (define_insn "altivec_vmuloub"
1545 [(set (match_operand:V8HI 0 "register_operand" "=v")
1546 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1547 (match_operand:V16QI 2 "register_operand" "v")]
1548 UNSPEC_VMULOUB))]
1549 "TARGET_ALTIVEC"
1550 "vmuloub %0,%1,%2"
1551 [(set_attr "type" "veccomplex")])
1552
984 (define_insn "altivec_vmulesb" 1553 (define_insn "altivec_vmulesb"
985 [(set (match_operand:V8HI 0 "register_operand" "=v") 1554 [(set (match_operand:V8HI 0 "register_operand" "=v")
986 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v") 1555 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
987 (match_operand:V16QI 2 "register_operand" "v")] 1556 (match_operand:V16QI 2 "register_operand" "v")]
988 UNSPEC_VMULESB))] 1557 UNSPEC_VMULESB))]
989 "TARGET_ALTIVEC" 1558 "TARGET_ALTIVEC"
990 "vmulesb %0,%1,%2" 1559 "vmulesb %0,%1,%2"
991 [(set_attr "type" "veccomplex")]) 1560 [(set_attr "type" "veccomplex")])
992 1561
1562 (define_insn "altivec_vmulosb"
1563 [(set (match_operand:V8HI 0 "register_operand" "=v")
1564 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1565 (match_operand:V16QI 2 "register_operand" "v")]
1566 UNSPEC_VMULOSB))]
1567 "TARGET_ALTIVEC"
1568 "vmulosb %0,%1,%2"
1569 [(set_attr "type" "veccomplex")])
1570
993 (define_insn "altivec_vmuleuh" 1571 (define_insn "altivec_vmuleuh"
994 [(set (match_operand:V4SI 0 "register_operand" "=v") 1572 [(set (match_operand:V4SI 0 "register_operand" "=v")
995 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v") 1573 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
996 (match_operand:V8HI 2 "register_operand" "v")] 1574 (match_operand:V8HI 2 "register_operand" "v")]
997 UNSPEC_VMULEUH))] 1575 UNSPEC_VMULEUH))]
998 "TARGET_ALTIVEC" 1576 "TARGET_ALTIVEC"
999 "vmuleuh %0,%1,%2" 1577 "vmuleuh %0,%1,%2"
1000 [(set_attr "type" "veccomplex")]) 1578 [(set_attr "type" "veccomplex")])
1001 1579
1580 (define_insn "altivec_vmulouh"
1581 [(set (match_operand:V4SI 0 "register_operand" "=v")
1582 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1583 (match_operand:V8HI 2 "register_operand" "v")]
1584 UNSPEC_VMULOUH))]
1585 "TARGET_ALTIVEC"
1586 "vmulouh %0,%1,%2"
1587 [(set_attr "type" "veccomplex")])
1588
1002 (define_insn "altivec_vmulesh" 1589 (define_insn "altivec_vmulesh"
1003 [(set (match_operand:V4SI 0 "register_operand" "=v") 1590 [(set (match_operand:V4SI 0 "register_operand" "=v")
1004 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v") 1591 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1005 (match_operand:V8HI 2 "register_operand" "v")] 1592 (match_operand:V8HI 2 "register_operand" "v")]
1006 UNSPEC_VMULESH))] 1593 UNSPEC_VMULESH))]
1007 "TARGET_ALTIVEC" 1594 "TARGET_ALTIVEC"
1008 "vmulesh %0,%1,%2" 1595 "vmulesh %0,%1,%2"
1009 [(set_attr "type" "veccomplex")]) 1596 [(set_attr "type" "veccomplex")])
1010 1597
1011 (define_insn "altivec_vmuloub"
1012 [(set (match_operand:V8HI 0 "register_operand" "=v")
1013 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1014 (match_operand:V16QI 2 "register_operand" "v")]
1015 UNSPEC_VMULOUB))]
1016 "TARGET_ALTIVEC"
1017 "vmuloub %0,%1,%2"
1018 [(set_attr "type" "veccomplex")])
1019
1020 (define_insn "altivec_vmulosb"
1021 [(set (match_operand:V8HI 0 "register_operand" "=v")
1022 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1023 (match_operand:V16QI 2 "register_operand" "v")]
1024 UNSPEC_VMULOSB))]
1025 "TARGET_ALTIVEC"
1026 "vmulosb %0,%1,%2"
1027 [(set_attr "type" "veccomplex")])
1028
1029 (define_insn "altivec_vmulouh"
1030 [(set (match_operand:V4SI 0 "register_operand" "=v")
1031 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1032 (match_operand:V8HI 2 "register_operand" "v")]
1033 UNSPEC_VMULOUH))]
1034 "TARGET_ALTIVEC"
1035 "vmulouh %0,%1,%2"
1036 [(set_attr "type" "veccomplex")])
1037
1038 (define_insn "altivec_vmulosh" 1598 (define_insn "altivec_vmulosh"
1039 [(set (match_operand:V4SI 0 "register_operand" "=v") 1599 [(set (match_operand:V4SI 0 "register_operand" "=v")
1040 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v") 1600 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1041 (match_operand:V8HI 2 "register_operand" "v")] 1601 (match_operand:V8HI 2 "register_operand" "v")]
1042 UNSPEC_VMULOSH))] 1602 UNSPEC_VMULOSH))]
1043 "TARGET_ALTIVEC" 1603 "TARGET_ALTIVEC"
1044 "vmulosh %0,%1,%2" 1604 "vmulosh %0,%1,%2"
1045 [(set_attr "type" "veccomplex")]) 1605 [(set_attr "type" "veccomplex")])
1046 1606
1047 1607 (define_insn "altivec_vmuleuw"
1048 ;; logical ops. Have the logical ops follow the memory ops in 1608 [(set (match_operand:V2DI 0 "register_operand" "=v")
1049 ;; terms of whether to prefer VSX or Altivec 1609 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1050 1610 (match_operand:V4SI 2 "register_operand" "v")]
1051 (define_insn "*altivec_and<mode>3" 1611 UNSPEC_VMULEUW))]
1052 [(set (match_operand:VM 0 "register_operand" "=v") 1612 "TARGET_P8_VECTOR"
1053 (and:VM (match_operand:VM 1 "register_operand" "v") 1613 "vmuleuw %0,%1,%2"
1054 (match_operand:VM 2 "register_operand" "v")))] 1614 [(set_attr "type" "veccomplex")])
1055 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)" 1615
1056 "vand %0,%1,%2" 1616 (define_insn "altivec_vmulouw"
1057 [(set_attr "type" "vecsimple")]) 1617 [(set (match_operand:V2DI 0 "register_operand" "=v")
1058 1618 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1059 (define_insn "*altivec_ior<mode>3" 1619 (match_operand:V4SI 2 "register_operand" "v")]
1060 [(set (match_operand:VM 0 "register_operand" "=v") 1620 UNSPEC_VMULOUW))]
1061 (ior:VM (match_operand:VM 1 "register_operand" "v") 1621 "TARGET_P8_VECTOR"
1062 (match_operand:VM 2 "register_operand" "v")))] 1622 "vmulouw %0,%1,%2"
1063 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)" 1623 [(set_attr "type" "veccomplex")])
1064 "vor %0,%1,%2" 1624
1065 [(set_attr "type" "vecsimple")]) 1625 (define_insn "altivec_vmulesw"
1066 1626 [(set (match_operand:V2DI 0 "register_operand" "=v")
1067 (define_insn "*altivec_xor<mode>3" 1627 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1068 [(set (match_operand:VM 0 "register_operand" "=v") 1628 (match_operand:V4SI 2 "register_operand" "v")]
1069 (xor:VM (match_operand:VM 1 "register_operand" "v") 1629 UNSPEC_VMULESW))]
1070 (match_operand:VM 2 "register_operand" "v")))] 1630 "TARGET_P8_VECTOR"
1071 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)" 1631 "vmulesw %0,%1,%2"
1072 "vxor %0,%1,%2" 1632 [(set_attr "type" "veccomplex")])
1073 [(set_attr "type" "vecsimple")]) 1633
1074 1634 (define_insn "altivec_vmulosw"
1075 (define_insn "*altivec_one_cmpl<mode>2" 1635 [(set (match_operand:V2DI 0 "register_operand" "=v")
1076 [(set (match_operand:VM 0 "register_operand" "=v") 1636 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1077 (not:VM (match_operand:VM 1 "register_operand" "v")))] 1637 (match_operand:V4SI 2 "register_operand" "v")]
1078 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)" 1638 UNSPEC_VMULOSW))]
1079 "vnor %0,%1,%1" 1639 "TARGET_P8_VECTOR"
1080 [(set_attr "type" "vecsimple")]) 1640 "vmulosw %0,%1,%2"
1081 1641 [(set_attr "type" "veccomplex")])
1082 (define_insn "*altivec_nor<mode>3" 1642
1083 [(set (match_operand:VM 0 "register_operand" "=v") 1643 ;; Vector pack/unpack
1084 (not:VM (ior:VM (match_operand:VM 1 "register_operand" "v")
1085 (match_operand:VM 2 "register_operand" "v"))))]
1086 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
1087 "vnor %0,%1,%2"
1088 [(set_attr "type" "vecsimple")])
1089
1090 (define_insn "*altivec_andc<mode>3"
1091 [(set (match_operand:VM 0 "register_operand" "=v")
1092 (and:VM (not:VM (match_operand:VM 2 "register_operand" "v"))
1093 (match_operand:VM 1 "register_operand" "v")))]
1094 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
1095 "vandc %0,%1,%2"
1096 [(set_attr "type" "vecsimple")])
1097
1098 (define_insn "altivec_vpkuhum"
1099 [(set (match_operand:V16QI 0 "register_operand" "=v")
1100 (unspec:V16QI [(match_operand:V8HI 1 "register_operand" "v")
1101 (match_operand:V8HI 2 "register_operand" "v")]
1102 UNSPEC_VPKUHUM))]
1103 "TARGET_ALTIVEC"
1104 "vpkuhum %0,%1,%2"
1105 [(set_attr "type" "vecperm")])
1106
1107 (define_insn "altivec_vpkuwum"
1108 [(set (match_operand:V8HI 0 "register_operand" "=v")
1109 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v")
1110 (match_operand:V4SI 2 "register_operand" "v")]
1111 UNSPEC_VPKUWUM))]
1112 "TARGET_ALTIVEC"
1113 "vpkuwum %0,%1,%2"
1114 [(set_attr "type" "vecperm")])
1115
1116 (define_insn "altivec_vpkpx" 1644 (define_insn "altivec_vpkpx"
1117 [(set (match_operand:V8HI 0 "register_operand" "=v") 1645 [(set (match_operand:V8HI 0 "register_operand" "=v")
1118 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v") 1646 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v")
1119 (match_operand:V4SI 2 "register_operand" "v")] 1647 (match_operand:V4SI 2 "register_operand" "v")]
1120 UNSPEC_VPKPX))] 1648 UNSPEC_VPKPX))]
1121 "TARGET_ALTIVEC" 1649 "TARGET_ALTIVEC"
1122 "vpkpx %0,%1,%2" 1650 "*
1651 {
1652 if (VECTOR_ELT_ORDER_BIG)
1653 return \"vpkpx %0,%1,%2\";
1654 else
1655 return \"vpkpx %0,%2,%1\";
1656 }"
1123 [(set_attr "type" "vecperm")]) 1657 [(set_attr "type" "vecperm")])
1124 1658
1125 (define_insn "altivec_vpkshss" 1659 (define_insn "altivec_vpks<VI_char>ss"
1126 [(set (match_operand:V16QI 0 "register_operand" "=v") 1660 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1127 (unspec:V16QI [(match_operand:V8HI 1 "register_operand" "v") 1661 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1128 (match_operand:V8HI 2 "register_operand" "v")] 1662 (match_operand:VP 2 "register_operand" "v")]
1129 UNSPEC_VPKSHSS)) 1663 UNSPEC_VPACK_SIGN_SIGN_SAT))]
1130 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 1664 "<VI_unit>"
1131 "TARGET_ALTIVEC" 1665 "*
1132 "vpkshss %0,%1,%2" 1666 {
1667 if (VECTOR_ELT_ORDER_BIG)
1668 return \"vpks<VI_char>ss %0,%1,%2\";
1669 else
1670 return \"vpks<VI_char>ss %0,%2,%1\";
1671 }"
1133 [(set_attr "type" "vecperm")]) 1672 [(set_attr "type" "vecperm")])
1134 1673
1135 (define_insn "altivec_vpkswss" 1674 (define_insn "altivec_vpks<VI_char>us"
1136 [(set (match_operand:V8HI 0 "register_operand" "=v") 1675 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1137 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v") 1676 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1138 (match_operand:V4SI 2 "register_operand" "v")] 1677 (match_operand:VP 2 "register_operand" "v")]
1139 UNSPEC_VPKSWSS)) 1678 UNSPEC_VPACK_SIGN_UNS_SAT))]
1140 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 1679 "<VI_unit>"
1141 "TARGET_ALTIVEC" 1680 "*
1142 "vpkswss %0,%1,%2" 1681 {
1682 if (VECTOR_ELT_ORDER_BIG)
1683 return \"vpks<VI_char>us %0,%1,%2\";
1684 else
1685 return \"vpks<VI_char>us %0,%2,%1\";
1686 }"
1143 [(set_attr "type" "vecperm")]) 1687 [(set_attr "type" "vecperm")])
1144 1688
1145 (define_insn "altivec_vpkuhus" 1689 (define_insn "altivec_vpku<VI_char>us"
1146 [(set (match_operand:V16QI 0 "register_operand" "=v") 1690 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1147 (unspec:V16QI [(match_operand:V8HI 1 "register_operand" "v") 1691 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1148 (match_operand:V8HI 2 "register_operand" "v")] 1692 (match_operand:VP 2 "register_operand" "v")]
1149 UNSPEC_VPKUHUS)) 1693 UNSPEC_VPACK_UNS_UNS_SAT))]
1150 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 1694 "<VI_unit>"
1151 "TARGET_ALTIVEC" 1695 "*
1152 "vpkuhus %0,%1,%2" 1696 {
1697 if (VECTOR_ELT_ORDER_BIG)
1698 return \"vpku<VI_char>us %0,%1,%2\";
1699 else
1700 return \"vpku<VI_char>us %0,%2,%1\";
1701 }"
1153 [(set_attr "type" "vecperm")]) 1702 [(set_attr "type" "vecperm")])
1154 1703
1155 (define_insn "altivec_vpkshus" 1704 (define_insn "altivec_vpku<VI_char>um"
1156 [(set (match_operand:V16QI 0 "register_operand" "=v") 1705 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1157 (unspec:V16QI [(match_operand:V8HI 1 "register_operand" "v") 1706 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1158 (match_operand:V8HI 2 "register_operand" "v")] 1707 (match_operand:VP 2 "register_operand" "v")]
1159 UNSPEC_VPKSHUS)) 1708 UNSPEC_VPACK_UNS_UNS_MOD))]
1160 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 1709 "<VI_unit>"
1161 "TARGET_ALTIVEC" 1710 "*
1162 "vpkshus %0,%1,%2" 1711 {
1712 if (VECTOR_ELT_ORDER_BIG)
1713 return \"vpku<VI_char>um %0,%1,%2\";
1714 else
1715 return \"vpku<VI_char>um %0,%2,%1\";
1716 }"
1163 [(set_attr "type" "vecperm")]) 1717 [(set_attr "type" "vecperm")])
1164 1718
1165 (define_insn "altivec_vpkuwus" 1719 (define_insn "altivec_vpku<VI_char>um_direct"
1166 [(set (match_operand:V8HI 0 "register_operand" "=v") 1720 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1167 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v") 1721 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1168 (match_operand:V4SI 2 "register_operand" "v")] 1722 (match_operand:VP 2 "register_operand" "v")]
1169 UNSPEC_VPKUWUS)) 1723 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT))]
1170 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 1724 "<VI_unit>"
1171 "TARGET_ALTIVEC" 1725 "*
1172 "vpkuwus %0,%1,%2" 1726 {
1727 if (BYTES_BIG_ENDIAN)
1728 return \"vpku<VI_char>um %0,%1,%2\";
1729 else
1730 return \"vpku<VI_char>um %0,%2,%1\";
1731 }"
1173 [(set_attr "type" "vecperm")]) 1732 [(set_attr "type" "vecperm")])
1174 1733
1175 (define_insn "altivec_vpkswus"
1176 [(set (match_operand:V8HI 0 "register_operand" "=v")
1177 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v")
1178 (match_operand:V4SI 2 "register_operand" "v")]
1179 UNSPEC_VPKSWUS))
1180 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1181 "TARGET_ALTIVEC"
1182 "vpkswus %0,%1,%2"
1183 [(set_attr "type" "vecperm")])
1184
1185 (define_insn "*altivec_vrl<VI_char>" 1734 (define_insn "*altivec_vrl<VI_char>"
1186 [(set (match_operand:VI 0 "register_operand" "=v") 1735 [(set (match_operand:VI2 0 "register_operand" "=v")
1187 (rotate:VI (match_operand:VI 1 "register_operand" "v") 1736 (rotate:VI2 (match_operand:VI2 1 "register_operand" "v")
1188 (match_operand:VI 2 "register_operand" "v")))] 1737 (match_operand:VI2 2 "register_operand" "v")))]
1189 "TARGET_ALTIVEC" 1738 "<VI_unit>"
1190 "vrl<VI_char> %0,%1,%2" 1739 "vrl<VI_char> %0,%1,%2"
1191 [(set_attr "type" "vecsimple")]) 1740 [(set_attr "type" "vecsimple")])
1741
1742 (define_insn "altivec_vrl<VI_char>mi"
1743 [(set (match_operand:VIlong 0 "register_operand" "=v")
1744 (unspec:VIlong [(match_operand:VIlong 1 "register_operand" "0")
1745 (match_operand:VIlong 2 "register_operand" "v")
1746 (match_operand:VIlong 3 "register_operand" "v")]
1747 UNSPEC_VRLMI))]
1748 "TARGET_P9_VECTOR"
1749 "vrl<VI_char>mi %0,%2,%3"
1750 [(set_attr "type" "veclogical")])
1751
1752 (define_insn "altivec_vrl<VI_char>nm"
1753 [(set (match_operand:VIlong 0 "register_operand" "=v")
1754 (unspec:VIlong [(match_operand:VIlong 1 "register_operand" "v")
1755 (match_operand:VIlong 2 "register_operand" "v")]
1756 UNSPEC_VRLNM))]
1757 "TARGET_P9_VECTOR"
1758 "vrl<VI_char>nm %0,%1,%2"
1759 [(set_attr "type" "veclogical")])
1192 1760
1193 (define_insn "altivec_vsl" 1761 (define_insn "altivec_vsl"
1194 [(set (match_operand:V4SI 0 "register_operand" "=v") 1762 [(set (match_operand:V4SI 0 "register_operand" "=v")
1195 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") 1763 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1196 (match_operand:V4SI 2 "register_operand" "v")] 1764 (match_operand:V4SI 2 "register_operand" "v")]
1206 UNSPEC_VSLO))] 1774 UNSPEC_VSLO))]
1207 "TARGET_ALTIVEC" 1775 "TARGET_ALTIVEC"
1208 "vslo %0,%1,%2" 1776 "vslo %0,%1,%2"
1209 [(set_attr "type" "vecperm")]) 1777 [(set_attr "type" "vecperm")])
1210 1778
1779 (define_insn "vslv"
1780 [(set (match_operand:V16QI 0 "register_operand" "=v")
1781 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1782 (match_operand:V16QI 2 "register_operand" "v")]
1783 UNSPEC_VSLV))]
1784 "TARGET_P9_VECTOR"
1785 "vslv %0,%1,%2"
1786 [(set_attr "type" "vecsimple")])
1787
1788 (define_insn "vsrv"
1789 [(set (match_operand:V16QI 0 "register_operand" "=v")
1790 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1791 (match_operand:V16QI 2 "register_operand" "v")]
1792 UNSPEC_VSRV))]
1793 "TARGET_P9_VECTOR"
1794 "vsrv %0,%1,%2"
1795 [(set_attr "type" "vecsimple")])
1796
1211 (define_insn "*altivec_vsl<VI_char>" 1797 (define_insn "*altivec_vsl<VI_char>"
1212 [(set (match_operand:VI 0 "register_operand" "=v") 1798 [(set (match_operand:VI2 0 "register_operand" "=v")
1213 (ashift:VI (match_operand:VI 1 "register_operand" "v") 1799 (ashift:VI2 (match_operand:VI2 1 "register_operand" "v")
1214 (match_operand:VI 2 "register_operand" "v")))] 1800 (match_operand:VI2 2 "register_operand" "v")))]
1215 "TARGET_ALTIVEC" 1801 "<VI_unit>"
1216 "vsl<VI_char> %0,%1,%2" 1802 "vsl<VI_char> %0,%1,%2"
1217 [(set_attr "type" "vecsimple")]) 1803 [(set_attr "type" "vecsimple")])
1218 1804
1219 (define_insn "*altivec_vsr<VI_char>" 1805 (define_insn "*altivec_vsr<VI_char>"
1220 [(set (match_operand:VI 0 "register_operand" "=v") 1806 [(set (match_operand:VI2 0 "register_operand" "=v")
1221 (lshiftrt:VI (match_operand:VI 1 "register_operand" "v") 1807 (lshiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
1222 (match_operand:VI 2 "register_operand" "v")))] 1808 (match_operand:VI2 2 "register_operand" "v")))]
1223 "TARGET_ALTIVEC" 1809 "<VI_unit>"
1224 "vsr<VI_char> %0,%1,%2" 1810 "vsr<VI_char> %0,%1,%2"
1225 [(set_attr "type" "vecsimple")]) 1811 [(set_attr "type" "vecsimple")])
1226 1812
1227 (define_insn "*altivec_vsra<VI_char>" 1813 (define_insn "*altivec_vsra<VI_char>"
1228 [(set (match_operand:VI 0 "register_operand" "=v") 1814 [(set (match_operand:VI2 0 "register_operand" "=v")
1229 (ashiftrt:VI (match_operand:VI 1 "register_operand" "v") 1815 (ashiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
1230 (match_operand:VI 2 "register_operand" "v")))] 1816 (match_operand:VI2 2 "register_operand" "v")))]
1231 "TARGET_ALTIVEC" 1817 "<VI_unit>"
1232 "vsra<VI_char> %0,%1,%2" 1818 "vsra<VI_char> %0,%1,%2"
1233 [(set_attr "type" "vecsimple")]) 1819 [(set_attr "type" "vecsimple")])
1234 1820
1235 (define_insn "altivec_vsr" 1821 (define_insn "altivec_vsr"
1236 [(set (match_operand:V4SI 0 "register_operand" "=v") 1822 [(set (match_operand:V4SI 0 "register_operand" "=v")
1253 (define_insn "altivec_vsum4ubs" 1839 (define_insn "altivec_vsum4ubs"
1254 [(set (match_operand:V4SI 0 "register_operand" "=v") 1840 [(set (match_operand:V4SI 0 "register_operand" "=v")
1255 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v") 1841 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")
1256 (match_operand:V4SI 2 "register_operand" "v")] 1842 (match_operand:V4SI 2 "register_operand" "v")]
1257 UNSPEC_VSUM4UBS)) 1843 UNSPEC_VSUM4UBS))
1258 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 1844 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1259 "TARGET_ALTIVEC" 1845 "TARGET_ALTIVEC"
1260 "vsum4ubs %0,%1,%2" 1846 "vsum4ubs %0,%1,%2"
1261 [(set_attr "type" "veccomplex")]) 1847 [(set_attr "type" "veccomplex")])
1262 1848
1263 (define_insn "altivec_vsum4s<VI_char>s" 1849 (define_insn "altivec_vsum4s<VI_char>s"
1264 [(set (match_operand:V4SI 0 "register_operand" "=v") 1850 [(set (match_operand:V4SI 0 "register_operand" "=v")
1265 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v") 1851 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
1266 (match_operand:V4SI 2 "register_operand" "v")] 1852 (match_operand:V4SI 2 "register_operand" "v")]
1267 UNSPEC_VSUM4S)) 1853 UNSPEC_VSUM4S))
1268 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 1854 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1269 "TARGET_ALTIVEC" 1855 "TARGET_ALTIVEC"
1270 "vsum4s<VI_char>s %0,%1,%2" 1856 "vsum4s<VI_char>s %0,%1,%2"
1271 [(set_attr "type" "veccomplex")]) 1857 [(set_attr "type" "veccomplex")])
1272 1858
1273 (define_insn "altivec_vsum2sws" 1859 (define_expand "altivec_vsum2sws"
1860 [(use (match_operand:V4SI 0 "register_operand"))
1861 (use (match_operand:V4SI 1 "register_operand"))
1862 (use (match_operand:V4SI 2 "register_operand"))]
1863 "TARGET_ALTIVEC"
1864 {
1865 if (VECTOR_ELT_ORDER_BIG)
1866 emit_insn (gen_altivec_vsum2sws_direct (operands[0], operands[1],
1867 operands[2]));
1868 else
1869 {
1870 rtx tmp1 = gen_reg_rtx (V4SImode);
1871 rtx tmp2 = gen_reg_rtx (V4SImode);
1872 emit_insn (gen_altivec_vsldoi_v4si (tmp1, operands[2],
1873 operands[2], GEN_INT (12)));
1874 emit_insn (gen_altivec_vsum2sws_direct (tmp2, operands[1], tmp1));
1875 emit_insn (gen_altivec_vsldoi_v4si (operands[0], tmp2, tmp2,
1876 GEN_INT (4)));
1877 }
1878 DONE;
1879 })
1880
1881 ; FIXME: This can probably be expressed without an UNSPEC.
1882 (define_insn "altivec_vsum2sws_direct"
1883 [(set (match_operand:V4SI 0 "register_operand" "=v")
1884 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1885 (match_operand:V4SI 2 "register_operand" "v")]
1886 UNSPEC_VSUM2SWS))
1887 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1888 "TARGET_ALTIVEC"
1889 "vsum2sws %0,%1,%2"
1890 [(set_attr "type" "veccomplex")])
1891
1892 (define_expand "altivec_vsumsws"
1893 [(use (match_operand:V4SI 0 "register_operand"))
1894 (use (match_operand:V4SI 1 "register_operand"))
1895 (use (match_operand:V4SI 2 "register_operand"))]
1896 "TARGET_ALTIVEC"
1897 {
1898 if (VECTOR_ELT_ORDER_BIG)
1899 emit_insn (gen_altivec_vsumsws_direct (operands[0], operands[1],
1900 operands[2]));
1901 else
1902 {
1903 rtx tmp1 = gen_reg_rtx (V4SImode);
1904 rtx tmp2 = gen_reg_rtx (V4SImode);
1905 emit_insn (gen_altivec_vspltw_direct (tmp1, operands[2], const0_rtx));
1906 emit_insn (gen_altivec_vsumsws_direct (tmp2, operands[1], tmp1));
1907 emit_insn (gen_altivec_vsldoi_v4si (operands[0], tmp2, tmp2,
1908 GEN_INT (12)));
1909 }
1910 DONE;
1911 })
1912
1913 ; FIXME: This can probably be expressed without an UNSPEC.
1914 (define_insn "altivec_vsumsws_direct"
1274 [(set (match_operand:V4SI 0 "register_operand" "=v") 1915 [(set (match_operand:V4SI 0 "register_operand" "=v")
1275 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") 1916 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1276 (match_operand:V4SI 2 "register_operand" "v")] 1917 (match_operand:V4SI 2 "register_operand" "v")]
1277 UNSPEC_VSUM2SWS)) 1918 UNSPEC_VSUMSWS_DIRECT))
1278 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 1919 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1279 "TARGET_ALTIVEC"
1280 "vsum2sws %0,%1,%2"
1281 [(set_attr "type" "veccomplex")])
1282
1283 (define_insn "altivec_vsumsws"
1284 [(set (match_operand:V4SI 0 "register_operand" "=v")
1285 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1286 (match_operand:V4SI 2 "register_operand" "v")]
1287 UNSPEC_VSUMSWS))
1288 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1289 "TARGET_ALTIVEC" 1920 "TARGET_ALTIVEC"
1290 "vsumsws %0,%1,%2" 1921 "vsumsws %0,%1,%2"
1291 [(set_attr "type" "veccomplex")]) 1922 [(set_attr "type" "veccomplex")])
1292 1923
1293 (define_insn "altivec_vspltb" 1924 (define_expand "altivec_vspltb"
1925 [(use (match_operand:V16QI 0 "register_operand" ""))
1926 (use (match_operand:V16QI 1 "register_operand" ""))
1927 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1928 "TARGET_ALTIVEC"
1929 {
1930 rtvec v;
1931 rtx x;
1932
1933 /* Special handling for LE with -maltivec=be. We have to reflect
1934 the actual selected index for the splat in the RTL. */
1935 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1936 operands[2] = GEN_INT (15 - INTVAL (operands[2]));
1937
1938 v = gen_rtvec (1, operands[2]);
1939 x = gen_rtx_VEC_SELECT (QImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1940 x = gen_rtx_VEC_DUPLICATE (V16QImode, x);
1941 emit_insn (gen_rtx_SET (operands[0], x));
1942 DONE;
1943 })
1944
1945 (define_insn "*altivec_vspltb_internal"
1294 [(set (match_operand:V16QI 0 "register_operand" "=v") 1946 [(set (match_operand:V16QI 0 "register_operand" "=v")
1295 (vec_duplicate:V16QI 1947 (vec_duplicate:V16QI
1296 (vec_select:QI (match_operand:V16QI 1 "register_operand" "v") 1948 (vec_select:QI (match_operand:V16QI 1 "register_operand" "v")
1297 (parallel 1949 (parallel
1298 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))] 1950 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))]
1299 "TARGET_ALTIVEC" 1951 "TARGET_ALTIVEC"
1952 {
1953 /* For true LE, this adjusts the selected index. For LE with
1954 -maltivec=be, this reverses what was done in the define_expand
1955 because the instruction already has big-endian bias. */
1956 if (!BYTES_BIG_ENDIAN)
1957 operands[2] = GEN_INT (15 - INTVAL (operands[2]));
1958
1959 return "vspltb %0,%1,%2";
1960 }
1961 [(set_attr "type" "vecperm")])
1962
1963 (define_insn "altivec_vspltb_direct"
1964 [(set (match_operand:V16QI 0 "register_operand" "=v")
1965 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1966 (match_operand:QI 2 "u5bit_cint_operand" "i")]
1967 UNSPEC_VSPLT_DIRECT))]
1968 "TARGET_ALTIVEC"
1300 "vspltb %0,%1,%2" 1969 "vspltb %0,%1,%2"
1301 [(set_attr "type" "vecperm")]) 1970 [(set_attr "type" "vecperm")])
1302 1971
1303 (define_insn "altivec_vsplth" 1972 (define_expand "altivec_vsplth"
1973 [(use (match_operand:V8HI 0 "register_operand" ""))
1974 (use (match_operand:V8HI 1 "register_operand" ""))
1975 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1976 "TARGET_ALTIVEC"
1977 {
1978 rtvec v;
1979 rtx x;
1980
1981 /* Special handling for LE with -maltivec=be. We have to reflect
1982 the actual selected index for the splat in the RTL. */
1983 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1984 operands[2] = GEN_INT (7 - INTVAL (operands[2]));
1985
1986 v = gen_rtvec (1, operands[2]);
1987 x = gen_rtx_VEC_SELECT (HImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1988 x = gen_rtx_VEC_DUPLICATE (V8HImode, x);
1989 emit_insn (gen_rtx_SET (operands[0], x));
1990 DONE;
1991 })
1992
1993 (define_insn "*altivec_vsplth_internal"
1304 [(set (match_operand:V8HI 0 "register_operand" "=v") 1994 [(set (match_operand:V8HI 0 "register_operand" "=v")
1305 (vec_duplicate:V8HI 1995 (vec_duplicate:V8HI
1306 (vec_select:HI (match_operand:V8HI 1 "register_operand" "v") 1996 (vec_select:HI (match_operand:V8HI 1 "register_operand" "v")
1307 (parallel 1997 (parallel
1308 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))] 1998 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))]
1309 "TARGET_ALTIVEC" 1999 "TARGET_ALTIVEC"
2000 {
2001 /* For true LE, this adjusts the selected index. For LE with
2002 -maltivec=be, this reverses what was done in the define_expand
2003 because the instruction already has big-endian bias. */
2004 if (!BYTES_BIG_ENDIAN)
2005 operands[2] = GEN_INT (7 - INTVAL (operands[2]));
2006
2007 return "vsplth %0,%1,%2";
2008 }
2009 [(set_attr "type" "vecperm")])
2010
2011 (define_insn "altivec_vsplth_direct"
2012 [(set (match_operand:V8HI 0 "register_operand" "=v")
2013 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
2014 (match_operand:QI 2 "u5bit_cint_operand" "i")]
2015 UNSPEC_VSPLT_DIRECT))]
2016 "TARGET_ALTIVEC"
1310 "vsplth %0,%1,%2" 2017 "vsplth %0,%1,%2"
1311 [(set_attr "type" "vecperm")]) 2018 [(set_attr "type" "vecperm")])
1312 2019
1313 (define_insn "altivec_vspltw" 2020 (define_expand "altivec_vspltw"
2021 [(use (match_operand:V4SI 0 "register_operand" ""))
2022 (use (match_operand:V4SI 1 "register_operand" ""))
2023 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
2024 "TARGET_ALTIVEC"
2025 {
2026 rtvec v;
2027 rtx x;
2028
2029 /* Special handling for LE with -maltivec=be. We have to reflect
2030 the actual selected index for the splat in the RTL. */
2031 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2032 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
2033
2034 v = gen_rtvec (1, operands[2]);
2035 x = gen_rtx_VEC_SELECT (SImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
2036 x = gen_rtx_VEC_DUPLICATE (V4SImode, x);
2037 emit_insn (gen_rtx_SET (operands[0], x));
2038 DONE;
2039 })
2040
2041 (define_insn "*altivec_vspltw_internal"
1314 [(set (match_operand:V4SI 0 "register_operand" "=v") 2042 [(set (match_operand:V4SI 0 "register_operand" "=v")
1315 (vec_duplicate:V4SI 2043 (vec_duplicate:V4SI
1316 (vec_select:SI (match_operand:V4SI 1 "register_operand" "v") 2044 (vec_select:SI (match_operand:V4SI 1 "register_operand" "v")
1317 (parallel 2045 (parallel
1318 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))] 2046 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))]
1319 "TARGET_ALTIVEC" 2047 "TARGET_ALTIVEC"
2048 {
2049 /* For true LE, this adjusts the selected index. For LE with
2050 -maltivec=be, this reverses what was done in the define_expand
2051 because the instruction already has big-endian bias. */
2052 if (!BYTES_BIG_ENDIAN)
2053 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
2054
2055 return "vspltw %0,%1,%2";
2056 }
2057 [(set_attr "type" "vecperm")])
2058
2059 (define_insn "altivec_vspltw_direct"
2060 [(set (match_operand:V4SI 0 "register_operand" "=v")
2061 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2062 (match_operand:QI 2 "u5bit_cint_operand" "i")]
2063 UNSPEC_VSPLT_DIRECT))]
2064 "TARGET_ALTIVEC"
1320 "vspltw %0,%1,%2" 2065 "vspltw %0,%1,%2"
1321 [(set_attr "type" "vecperm")]) 2066 [(set_attr "type" "vecperm")])
1322 2067
1323 (define_insn "altivec_vspltsf" 2068 (define_expand "altivec_vspltsf"
2069 [(use (match_operand:V4SF 0 "register_operand" ""))
2070 (use (match_operand:V4SF 1 "register_operand" ""))
2071 (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
2072 "TARGET_ALTIVEC"
2073 {
2074 rtvec v;
2075 rtx x;
2076
2077 /* Special handling for LE with -maltivec=be. We have to reflect
2078 the actual selected index for the splat in the RTL. */
2079 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2080 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
2081
2082 v = gen_rtvec (1, operands[2]);
2083 x = gen_rtx_VEC_SELECT (SFmode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
2084 x = gen_rtx_VEC_DUPLICATE (V4SFmode, x);
2085 emit_insn (gen_rtx_SET (operands[0], x));
2086 DONE;
2087 })
2088
2089 (define_insn "*altivec_vspltsf_internal"
1324 [(set (match_operand:V4SF 0 "register_operand" "=v") 2090 [(set (match_operand:V4SF 0 "register_operand" "=v")
1325 (vec_duplicate:V4SF 2091 (vec_duplicate:V4SF
1326 (vec_select:SF (match_operand:V4SF 1 "register_operand" "v") 2092 (vec_select:SF (match_operand:V4SF 1 "register_operand" "v")
1327 (parallel 2093 (parallel
1328 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))] 2094 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))]
1329 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)" 2095 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1330 "vspltw %0,%1,%2" 2096 {
2097 /* For true LE, this adjusts the selected index. For LE with
2098 -maltivec=be, this reverses what was done in the define_expand
2099 because the instruction already has big-endian bias. */
2100 if (!BYTES_BIG_ENDIAN)
2101 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
2102
2103 return "vspltw %0,%1,%2";
2104 }
1331 [(set_attr "type" "vecperm")]) 2105 [(set_attr "type" "vecperm")])
1332 2106
1333 (define_insn "altivec_vspltis<VI_char>" 2107 (define_insn "altivec_vspltis<VI_char>"
1334 [(set (match_operand:VI 0 "register_operand" "=v") 2108 [(set (match_operand:VI 0 "register_operand" "=v")
1335 (vec_duplicate:VI 2109 (vec_duplicate:VI
1338 "vspltis<VI_char> %0,%1" 2112 "vspltis<VI_char> %0,%1"
1339 [(set_attr "type" "vecperm")]) 2113 [(set_attr "type" "vecperm")])
1340 2114
1341 (define_insn "*altivec_vrfiz" 2115 (define_insn "*altivec_vrfiz"
1342 [(set (match_operand:V4SF 0 "register_operand" "=v") 2116 [(set (match_operand:V4SF 0 "register_operand" "=v")
1343 (fix:V4SF (match_operand:V4SF 1 "register_operand" "v")))] 2117 (fix:V4SF (match_operand:V4SF 1 "register_operand" "v")))]
1344 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)" 2118 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1345 "vrfiz %0,%1" 2119 "vrfiz %0,%1"
1346 [(set_attr "type" "vecfloat")]) 2120 [(set_attr "type" "vecfloat")])
1347 2121
1348 (define_insn "altivec_vperm_<mode>" 2122 (define_expand "altivec_vperm_<mode>"
1349 [(set (match_operand:VM 0 "register_operand" "=v") 2123 [(set (match_operand:VM 0 "register_operand" "")
1350 (unspec:VM [(match_operand:VM 1 "register_operand" "v") 2124 (unspec:VM [(match_operand:VM 1 "register_operand" "")
1351 (match_operand:VM 2 "register_operand" "v") 2125 (match_operand:VM 2 "register_operand" "")
1352 (match_operand:V16QI 3 "register_operand" "v")] 2126 (match_operand:V16QI 3 "register_operand" "")]
1353 UNSPEC_VPERM))] 2127 UNSPEC_VPERM))]
1354 "TARGET_ALTIVEC" 2128 "TARGET_ALTIVEC"
1355 "vperm %0,%1,%2,%3" 2129 {
1356 [(set_attr "type" "vecperm")]) 2130 if (!VECTOR_ELT_ORDER_BIG)
1357 2131 {
1358 (define_insn "altivec_vperm_<mode>_uns" 2132 altivec_expand_vec_perm_le (operands);
1359 [(set (match_operand:VM 0 "register_operand" "=v") 2133 DONE;
1360 (unspec:VM [(match_operand:VM 1 "register_operand" "v") 2134 }
1361 (match_operand:VM 2 "register_operand" "v") 2135 })
1362 (match_operand:V16QI 3 "register_operand" "v")] 2136
2137 ;; Slightly prefer vperm, since the target does not overlap the source
2138 (define_insn "*altivec_vperm_<mode>_internal"
2139 [(set (match_operand:VM 0 "register_operand" "=v,?wo")
2140 (unspec:VM [(match_operand:VM 1 "register_operand" "v,wo")
2141 (match_operand:VM 2 "register_operand" "v,0")
2142 (match_operand:V16QI 3 "register_operand" "v,wo")]
2143 UNSPEC_VPERM))]
2144 "TARGET_ALTIVEC"
2145 "@
2146 vperm %0,%1,%2,%3
2147 xxperm %x0,%x1,%x3"
2148 [(set_attr "type" "vecperm")
2149 (set_attr "length" "4")])
2150
2151 (define_insn "altivec_vperm_v8hiv16qi"
2152 [(set (match_operand:V16QI 0 "register_operand" "=v,?wo")
2153 (unspec:V16QI [(match_operand:V8HI 1 "register_operand" "v,wo")
2154 (match_operand:V8HI 2 "register_operand" "v,0")
2155 (match_operand:V16QI 3 "register_operand" "v,wo")]
2156 UNSPEC_VPERM))]
2157 "TARGET_ALTIVEC"
2158 "@
2159 vperm %0,%1,%2,%3
2160 xxperm %x0,%x1,%x3"
2161 [(set_attr "type" "vecperm")
2162 (set_attr "length" "4")])
2163
2164 (define_expand "altivec_vperm_<mode>_uns"
2165 [(set (match_operand:VM 0 "register_operand" "")
2166 (unspec:VM [(match_operand:VM 1 "register_operand" "")
2167 (match_operand:VM 2 "register_operand" "")
2168 (match_operand:V16QI 3 "register_operand" "")]
1363 UNSPEC_VPERM_UNS))] 2169 UNSPEC_VPERM_UNS))]
1364 "TARGET_ALTIVEC" 2170 "TARGET_ALTIVEC"
1365 "vperm %0,%1,%2,%3" 2171 {
1366 [(set_attr "type" "vecperm")]) 2172 if (!VECTOR_ELT_ORDER_BIG)
2173 {
2174 altivec_expand_vec_perm_le (operands);
2175 DONE;
2176 }
2177 })
2178
2179 (define_insn "*altivec_vperm_<mode>_uns_internal"
2180 [(set (match_operand:VM 0 "register_operand" "=v,?wo")
2181 (unspec:VM [(match_operand:VM 1 "register_operand" "v,wo")
2182 (match_operand:VM 2 "register_operand" "v,0")
2183 (match_operand:V16QI 3 "register_operand" "v,wo")]
2184 UNSPEC_VPERM_UNS))]
2185 "TARGET_ALTIVEC"
2186 "@
2187 vperm %0,%1,%2,%3
2188 xxperm %x0,%x1,%x3"
2189 [(set_attr "type" "vecperm")
2190 (set_attr "length" "4")])
2191
2192 (define_expand "vec_permv16qi"
2193 [(set (match_operand:V16QI 0 "register_operand" "")
2194 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "")
2195 (match_operand:V16QI 2 "register_operand" "")
2196 (match_operand:V16QI 3 "register_operand" "")]
2197 UNSPEC_VPERM))]
2198 "TARGET_ALTIVEC"
2199 {
2200 if (!BYTES_BIG_ENDIAN) {
2201 altivec_expand_vec_perm_le (operands);
2202 DONE;
2203 }
2204 })
2205
2206 (define_expand "vec_perm_constv16qi"
2207 [(match_operand:V16QI 0 "register_operand" "")
2208 (match_operand:V16QI 1 "register_operand" "")
2209 (match_operand:V16QI 2 "register_operand" "")
2210 (match_operand:V16QI 3 "" "")]
2211 "TARGET_ALTIVEC"
2212 {
2213 if (altivec_expand_vec_perm_const (operands))
2214 DONE;
2215 else
2216 FAIL;
2217 })
2218
2219 (define_insn "*altivec_vpermr_<mode>_internal"
2220 [(set (match_operand:VM 0 "register_operand" "=v,?wo")
2221 (unspec:VM [(match_operand:VM 1 "register_operand" "v,wo")
2222 (match_operand:VM 2 "register_operand" "v,0")
2223 (match_operand:V16QI 3 "register_operand" "v,wo")]
2224 UNSPEC_VPERMR))]
2225 "TARGET_P9_VECTOR"
2226 "@
2227 vpermr %0,%2,%1,%3
2228 xxpermr %x0,%x1,%x3"
2229 [(set_attr "type" "vecperm")
2230 (set_attr "length" "4")])
1367 2231
1368 (define_insn "altivec_vrfip" ; ceil 2232 (define_insn "altivec_vrfip" ; ceil
1369 [(set (match_operand:V4SF 0 "register_operand" "=v") 2233 [(set (match_operand:V4SF 0 "register_operand" "=v")
1370 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")] 2234 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
1371 UNSPEC_FRIP))] 2235 UNSPEC_FRIP))]
1410 (define_insn "altivec_vctuxs" 2274 (define_insn "altivec_vctuxs"
1411 [(set (match_operand:V4SI 0 "register_operand" "=v") 2275 [(set (match_operand:V4SI 0 "register_operand" "=v")
1412 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v") 2276 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
1413 (match_operand:QI 2 "immediate_operand" "i")] 2277 (match_operand:QI 2 "immediate_operand" "i")]
1414 UNSPEC_VCTUXS)) 2278 UNSPEC_VCTUXS))
1415 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 2279 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1416 "TARGET_ALTIVEC" 2280 "TARGET_ALTIVEC"
1417 "vctuxs %0,%1,%2" 2281 "vctuxs %0,%1,%2"
1418 [(set_attr "type" "vecfloat")]) 2282 [(set_attr "type" "vecfloat")])
1419 2283
1420 (define_insn "altivec_vctsxs" 2284 (define_insn "altivec_vctsxs"
1421 [(set (match_operand:V4SI 0 "register_operand" "=v") 2285 [(set (match_operand:V4SI 0 "register_operand" "=v")
1422 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v") 2286 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
1423 (match_operand:QI 2 "immediate_operand" "i")] 2287 (match_operand:QI 2 "immediate_operand" "i")]
1424 UNSPEC_VCTSXS)) 2288 UNSPEC_VCTSXS))
1425 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))] 2289 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1426 "TARGET_ALTIVEC" 2290 "TARGET_ALTIVEC"
1427 "vctsxs %0,%1,%2" 2291 "vctsxs %0,%1,%2"
1428 [(set_attr "type" "vecfloat")]) 2292 [(set_attr "type" "vecfloat")])
1429 2293
1430 (define_insn "altivec_vlogefp" 2294 (define_insn "altivec_vlogefp"
1473 RTVEC_ELT (v, 0) = GEN_INT (mask_val); 2337 RTVEC_ELT (v, 0) = GEN_INT (mask_val);
1474 RTVEC_ELT (v, 1) = GEN_INT (mask_val); 2338 RTVEC_ELT (v, 1) = GEN_INT (mask_val);
1475 RTVEC_ELT (v, 2) = GEN_INT (mask_val); 2339 RTVEC_ELT (v, 2) = GEN_INT (mask_val);
1476 RTVEC_ELT (v, 3) = GEN_INT (mask_val); 2340 RTVEC_ELT (v, 3) = GEN_INT (mask_val);
1477 2341
1478 emit_insn (gen_vec_initv4si (mask, gen_rtx_PARALLEL (V4SImode, v))); 2342 emit_insn (gen_vec_initv4sisi (mask, gen_rtx_PARALLEL (V4SImode, v)));
1479 emit_insn (gen_vector_select_v4sf (operands[0], operands[1], operands[2], 2343 emit_insn (gen_vector_select_v4sf (operands[0], operands[1], operands[2],
1480 gen_lowpart (V4SFmode, mask))); 2344 gen_lowpart (V4SFmode, mask)));
1481 DONE; 2345 DONE;
1482 }") 2346 }")
1483 2347
1484 (define_insn "altivec_vsldoi_<mode>" 2348 (define_insn "altivec_vsldoi_<mode>"
1485 [(set (match_operand:VM 0 "register_operand" "=v") 2349 [(set (match_operand:VM 0 "register_operand" "=v")
1486 (unspec:VM [(match_operand:VM 1 "register_operand" "v") 2350 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
1487 (match_operand:VM 2 "register_operand" "v") 2351 (match_operand:VM 2 "register_operand" "v")
1488 (match_operand:QI 3 "immediate_operand" "i")] 2352 (match_operand:QI 3 "immediate_operand" "i")]
1489 UNSPEC_VLSDOI))] 2353 UNSPEC_VSLDOI))]
1490 "TARGET_ALTIVEC" 2354 "TARGET_ALTIVEC"
1491 "vsldoi %0,%1,%2,%3" 2355 "vsldoi %0,%1,%2,%3"
1492 [(set_attr "type" "vecperm")]) 2356 [(set_attr "type" "vecperm")])
1493 2357
1494 (define_insn "altivec_vupkhsb" 2358 (define_insn "altivec_vupkhs<VU_char>"
1495 [(set (match_operand:V8HI 0 "register_operand" "=v") 2359 [(set (match_operand:VP 0 "register_operand" "=v")
1496 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")] 2360 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
1497 UNSPEC_VUPKHSB))] 2361 UNSPEC_VUNPACK_HI_SIGN))]
1498 "TARGET_ALTIVEC" 2362 "<VI_unit>"
1499 "vupkhsb %0,%1" 2363 {
2364 if (VECTOR_ELT_ORDER_BIG)
2365 return "vupkhs<VU_char> %0,%1";
2366 else
2367 return "vupkls<VU_char> %0,%1";
2368 }
2369 [(set_attr "type" "vecperm")])
2370
2371 (define_insn "*altivec_vupkhs<VU_char>_direct"
2372 [(set (match_operand:VP 0 "register_operand" "=v")
2373 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2374 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2375 "<VI_unit>"
2376 "vupkhs<VU_char> %0,%1"
2377 [(set_attr "type" "vecperm")])
2378
2379 (define_insn "altivec_vupkls<VU_char>"
2380 [(set (match_operand:VP 0 "register_operand" "=v")
2381 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2382 UNSPEC_VUNPACK_LO_SIGN))]
2383 "<VI_unit>"
2384 {
2385 if (VECTOR_ELT_ORDER_BIG)
2386 return "vupkls<VU_char> %0,%1";
2387 else
2388 return "vupkhs<VU_char> %0,%1";
2389 }
2390 [(set_attr "type" "vecperm")])
2391
2392 (define_insn "*altivec_vupkls<VU_char>_direct"
2393 [(set (match_operand:VP 0 "register_operand" "=v")
2394 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2395 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2396 "<VI_unit>"
2397 "vupkls<VU_char> %0,%1"
1500 [(set_attr "type" "vecperm")]) 2398 [(set_attr "type" "vecperm")])
1501 2399
1502 (define_insn "altivec_vupkhpx" 2400 (define_insn "altivec_vupkhpx"
1503 [(set (match_operand:V4SI 0 "register_operand" "=v") 2401 [(set (match_operand:V4SI 0 "register_operand" "=v")
1504 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")] 2402 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
1505 UNSPEC_VUPKHPX))] 2403 UNSPEC_VUPKHPX))]
1506 "TARGET_ALTIVEC" 2404 "TARGET_ALTIVEC"
1507 "vupkhpx %0,%1" 2405 {
1508 [(set_attr "type" "vecperm")]) 2406 if (VECTOR_ELT_ORDER_BIG)
1509 2407 return "vupkhpx %0,%1";
1510 (define_insn "altivec_vupkhsh" 2408 else
1511 [(set (match_operand:V4SI 0 "register_operand" "=v") 2409 return "vupklpx %0,%1";
1512 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")] 2410 }
1513 UNSPEC_VUPKHSH))]
1514 "TARGET_ALTIVEC"
1515 "vupkhsh %0,%1"
1516 [(set_attr "type" "vecperm")])
1517
1518 (define_insn "altivec_vupklsb"
1519 [(set (match_operand:V8HI 0 "register_operand" "=v")
1520 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
1521 UNSPEC_VUPKLSB))]
1522 "TARGET_ALTIVEC"
1523 "vupklsb %0,%1"
1524 [(set_attr "type" "vecperm")]) 2411 [(set_attr "type" "vecperm")])
1525 2412
1526 (define_insn "altivec_vupklpx" 2413 (define_insn "altivec_vupklpx"
1527 [(set (match_operand:V4SI 0 "register_operand" "=v") 2414 [(set (match_operand:V4SI 0 "register_operand" "=v")
1528 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")] 2415 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
1529 UNSPEC_VUPKLPX))] 2416 UNSPEC_VUPKLPX))]
1530 "TARGET_ALTIVEC" 2417 "TARGET_ALTIVEC"
1531 "vupklpx %0,%1" 2418 {
1532 [(set_attr "type" "vecperm")]) 2419 if (VECTOR_ELT_ORDER_BIG)
1533 2420 return "vupklpx %0,%1";
1534 (define_insn "altivec_vupklsh" 2421 else
1535 [(set (match_operand:V4SI 0 "register_operand" "=v") 2422 return "vupkhpx %0,%1";
1536 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")] 2423 }
1537 UNSPEC_VUPKLSH))]
1538 "TARGET_ALTIVEC"
1539 "vupklsh %0,%1"
1540 [(set_attr "type" "vecperm")]) 2424 [(set_attr "type" "vecperm")])
1541 2425
1542 ;; Compare vectors producing a vector result and a predicate, setting CR6 to 2426 ;; Compare vectors producing a vector result and a predicate, setting CR6 to
1543 ;; indicate a combined status 2427 ;; indicate a combined status
1544 (define_insn "*altivec_vcmpequ<VI_char>_p" 2428 (define_insn "*altivec_vcmpequ<VI_char>_p"
1545 [(set (reg:CC 74) 2429 [(set (reg:CC CR6_REGNO)
1546 (unspec:CC [(eq:CC (match_operand:VI 1 "register_operand" "v") 2430 (unspec:CC [(eq:CC (match_operand:VI2 1 "register_operand" "v")
1547 (match_operand:VI 2 "register_operand" "v"))] 2431 (match_operand:VI2 2 "register_operand" "v"))]
1548 UNSPEC_PREDICATE)) 2432 UNSPEC_PREDICATE))
1549 (set (match_operand:VI 0 "register_operand" "=v") 2433 (set (match_operand:VI2 0 "register_operand" "=v")
1550 (eq:VI (match_dup 1) 2434 (eq:VI2 (match_dup 1)
1551 (match_dup 2)))] 2435 (match_dup 2)))]
1552 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)" 2436 "<VI_unit>"
1553 "vcmpequ<VI_char>. %0,%1,%2" 2437 "vcmpequ<VI_char>. %0,%1,%2"
1554 [(set_attr "type" "veccmp")]) 2438 [(set_attr "type" "veccmpfx")])
1555 2439
1556 (define_insn "*altivec_vcmpgts<VI_char>_p" 2440 (define_insn "*altivec_vcmpgts<VI_char>_p"
1557 [(set (reg:CC 74) 2441 [(set (reg:CC CR6_REGNO)
1558 (unspec:CC [(gt:CC (match_operand:VI 1 "register_operand" "v") 2442 (unspec:CC [(gt:CC (match_operand:VI2 1 "register_operand" "v")
1559 (match_operand:VI 2 "register_operand" "v"))] 2443 (match_operand:VI2 2 "register_operand" "v"))]
1560 UNSPEC_PREDICATE)) 2444 UNSPEC_PREDICATE))
1561 (set (match_operand:VI 0 "register_operand" "=v") 2445 (set (match_operand:VI2 0 "register_operand" "=v")
1562 (gt:VI (match_dup 1) 2446 (gt:VI2 (match_dup 1)
1563 (match_dup 2)))] 2447 (match_dup 2)))]
1564 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)" 2448 "<VI_unit>"
1565 "vcmpgts<VI_char>. %0,%1,%2" 2449 "vcmpgts<VI_char>. %0,%1,%2"
1566 [(set_attr "type" "veccmp")]) 2450 [(set_attr "type" "veccmpfx")])
1567 2451
1568 (define_insn "*altivec_vcmpgtu<VI_char>_p" 2452 (define_insn "*altivec_vcmpgtu<VI_char>_p"
1569 [(set (reg:CC 74) 2453 [(set (reg:CC CR6_REGNO)
1570 (unspec:CC [(gtu:CC (match_operand:VI 1 "register_operand" "v") 2454 (unspec:CC [(gtu:CC (match_operand:VI2 1 "register_operand" "v")
1571 (match_operand:VI 2 "register_operand" "v"))] 2455 (match_operand:VI2 2 "register_operand" "v"))]
1572 UNSPEC_PREDICATE)) 2456 UNSPEC_PREDICATE))
1573 (set (match_operand:VI 0 "register_operand" "=v") 2457 (set (match_operand:VI2 0 "register_operand" "=v")
1574 (gtu:VI (match_dup 1) 2458 (gtu:VI2 (match_dup 1)
1575 (match_dup 2)))] 2459 (match_dup 2)))]
1576 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)" 2460 "<VI_unit>"
1577 "vcmpgtu<VI_char>. %0,%1,%2" 2461 "vcmpgtu<VI_char>. %0,%1,%2"
1578 [(set_attr "type" "veccmp")]) 2462 [(set_attr "type" "veccmpfx")])
1579 2463
1580 (define_insn "*altivec_vcmpeqfp_p" 2464 (define_insn "*altivec_vcmpeqfp_p"
1581 [(set (reg:CC 74) 2465 [(set (reg:CC CR6_REGNO)
1582 (unspec:CC [(eq:CC (match_operand:V4SF 1 "register_operand" "v") 2466 (unspec:CC [(eq:CC (match_operand:V4SF 1 "register_operand" "v")
1583 (match_operand:V4SF 2 "register_operand" "v"))] 2467 (match_operand:V4SF 2 "register_operand" "v"))]
1584 UNSPEC_PREDICATE)) 2468 UNSPEC_PREDICATE))
1585 (set (match_operand:V4SF 0 "register_operand" "=v") 2469 (set (match_operand:V4SF 0 "register_operand" "=v")
1586 (eq:V4SF (match_dup 1) 2470 (eq:V4SF (match_dup 1)
1588 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)" 2472 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1589 "vcmpeqfp. %0,%1,%2" 2473 "vcmpeqfp. %0,%1,%2"
1590 [(set_attr "type" "veccmp")]) 2474 [(set_attr "type" "veccmp")])
1591 2475
1592 (define_insn "*altivec_vcmpgtfp_p" 2476 (define_insn "*altivec_vcmpgtfp_p"
1593 [(set (reg:CC 74) 2477 [(set (reg:CC CR6_REGNO)
1594 (unspec:CC [(gt:CC (match_operand:V4SF 1 "register_operand" "v") 2478 (unspec:CC [(gt:CC (match_operand:V4SF 1 "register_operand" "v")
1595 (match_operand:V4SF 2 "register_operand" "v"))] 2479 (match_operand:V4SF 2 "register_operand" "v"))]
1596 UNSPEC_PREDICATE)) 2480 UNSPEC_PREDICATE))
1597 (set (match_operand:V4SF 0 "register_operand" "=v") 2481 (set (match_operand:V4SF 0 "register_operand" "=v")
1598 (gt:V4SF (match_dup 1) 2482 (gt:V4SF (match_dup 1)
1600 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)" 2484 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1601 "vcmpgtfp. %0,%1,%2" 2485 "vcmpgtfp. %0,%1,%2"
1602 [(set_attr "type" "veccmp")]) 2486 [(set_attr "type" "veccmp")])
1603 2487
1604 (define_insn "*altivec_vcmpgefp_p" 2488 (define_insn "*altivec_vcmpgefp_p"
1605 [(set (reg:CC 74) 2489 [(set (reg:CC CR6_REGNO)
1606 (unspec:CC [(ge:CC (match_operand:V4SF 1 "register_operand" "v") 2490 (unspec:CC [(ge:CC (match_operand:V4SF 1 "register_operand" "v")
1607 (match_operand:V4SF 2 "register_operand" "v"))] 2491 (match_operand:V4SF 2 "register_operand" "v"))]
1608 UNSPEC_PREDICATE)) 2492 UNSPEC_PREDICATE))
1609 (set (match_operand:V4SF 0 "register_operand" "=v") 2493 (set (match_operand:V4SF 0 "register_operand" "=v")
1610 (ge:V4SF (match_dup 1) 2494 (ge:V4SF (match_dup 1)
1612 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)" 2496 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1613 "vcmpgefp. %0,%1,%2" 2497 "vcmpgefp. %0,%1,%2"
1614 [(set_attr "type" "veccmp")]) 2498 [(set_attr "type" "veccmp")])
1615 2499
1616 (define_insn "altivec_vcmpbfp_p" 2500 (define_insn "altivec_vcmpbfp_p"
1617 [(set (reg:CC 74) 2501 [(set (reg:CC CR6_REGNO)
1618 (unspec:CC [(match_operand:V4SF 1 "register_operand" "v") 2502 (unspec:CC [(match_operand:V4SF 1 "register_operand" "v")
1619 (match_operand:V4SF 2 "register_operand" "v")] 2503 (match_operand:V4SF 2 "register_operand" "v")]
1620 UNSPEC_VCMPBFP)) 2504 UNSPEC_VCMPBFP))
1621 (set (match_operand:V4SF 0 "register_operand" "=v") 2505 (set (match_operand:V4SF 0 "register_operand" "=v")
1622 (unspec:V4SF [(match_dup 1) 2506 (unspec:V4SF [(match_dup 1)
1625 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode)" 2509 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode)"
1626 "vcmpbfp. %0,%1,%2" 2510 "vcmpbfp. %0,%1,%2"
1627 [(set_attr "type" "veccmp")]) 2511 [(set_attr "type" "veccmp")])
1628 2512
1629 (define_insn "altivec_mtvscr" 2513 (define_insn "altivec_mtvscr"
1630 [(set (reg:SI 110) 2514 [(set (reg:SI VSCR_REGNO)
1631 (unspec_volatile:SI 2515 (unspec_volatile:SI
1632 [(match_operand:V4SI 0 "register_operand" "v")] UNSPECV_MTVSCR))] 2516 [(match_operand:V4SI 0 "register_operand" "v")] UNSPECV_MTVSCR))]
1633 "TARGET_ALTIVEC" 2517 "TARGET_ALTIVEC"
1634 "mtvscr %0" 2518 "mtvscr %0"
1635 [(set_attr "type" "vecsimple")]) 2519 [(set_attr "type" "vecsimple")])
1636 2520
1637 (define_insn "altivec_mfvscr" 2521 (define_insn "altivec_mfvscr"
1638 [(set (match_operand:V8HI 0 "register_operand" "=v") 2522 [(set (match_operand:V8HI 0 "register_operand" "=v")
1639 (unspec_volatile:V8HI [(reg:SI 110)] UNSPECV_MFVSCR))] 2523 (unspec_volatile:V8HI [(reg:SI VSCR_REGNO)] UNSPECV_MFVSCR))]
1640 "TARGET_ALTIVEC" 2524 "TARGET_ALTIVEC"
1641 "mfvscr %0" 2525 "mfvscr %0"
1642 [(set_attr "type" "vecsimple")]) 2526 [(set_attr "type" "vecsimple")])
1643 2527
1644 (define_insn "altivec_dssall" 2528 (define_insn "altivec_dssall"
1684 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTSTT)] 2568 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTSTT)]
1685 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode" 2569 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
1686 "dststt %0,%1,%2" 2570 "dststt %0,%1,%2"
1687 [(set_attr "type" "vecsimple")]) 2571 [(set_attr "type" "vecsimple")])
1688 2572
1689 (define_insn "altivec_lvsl" 2573 (define_expand "altivec_lvsl"
2574 [(use (match_operand:V16QI 0 "register_operand" ""))
2575 (use (match_operand:V16QI 1 "memory_operand" ""))]
2576 "TARGET_ALTIVEC"
2577 {
2578 if (VECTOR_ELT_ORDER_BIG)
2579 emit_insn (gen_altivec_lvsl_direct (operands[0], operands[1]));
2580 else
2581 {
2582 int i;
2583 rtx mask, perm[16], constv, vperm;
2584 mask = gen_reg_rtx (V16QImode);
2585 emit_insn (gen_altivec_lvsl_direct (mask, operands[1]));
2586 for (i = 0; i < 16; ++i)
2587 perm[i] = GEN_INT (i);
2588 constv = gen_rtx_CONST_VECTOR (V16QImode, gen_rtvec_v (16, perm));
2589 constv = force_reg (V16QImode, constv);
2590 vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2591 UNSPEC_VPERM);
2592 emit_insn (gen_rtx_SET (operands[0], vperm));
2593 }
2594 DONE;
2595 })
2596
2597 (define_insn "altivec_lvsl_reg"
2598 [(set (match_operand:V16QI 0 "altivec_register_operand" "=v")
2599 (unspec:V16QI
2600 [(match_operand:DI 1 "gpc_reg_operand" "b")]
2601 UNSPEC_LVSL_REG))]
2602 "TARGET_ALTIVEC"
2603 "lvsl %0,0,%1"
2604 [(set_attr "type" "vecload")])
2605
2606 (define_insn "altivec_lvsl_direct"
1690 [(set (match_operand:V16QI 0 "register_operand" "=v") 2607 [(set (match_operand:V16QI 0 "register_operand" "=v")
1691 (unspec:V16QI [(match_operand 1 "memory_operand" "Z")] UNSPEC_LVSL))] 2608 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2609 UNSPEC_LVSL))]
1692 "TARGET_ALTIVEC" 2610 "TARGET_ALTIVEC"
1693 "lvsl %0,%y1" 2611 "lvsl %0,%y1"
1694 [(set_attr "type" "vecload")]) 2612 [(set_attr "type" "vecload")])
1695 2613
1696 (define_insn "altivec_lvsr" 2614 (define_expand "altivec_lvsr"
2615 [(use (match_operand:V16QI 0 "altivec_register_operand"))
2616 (use (match_operand:V16QI 1 "memory_operand"))]
2617 "TARGET_ALTIVEC"
2618 {
2619 if (VECTOR_ELT_ORDER_BIG)
2620 emit_insn (gen_altivec_lvsr_direct (operands[0], operands[1]));
2621 else
2622 {
2623 int i;
2624 rtx mask, perm[16], constv, vperm;
2625 mask = gen_reg_rtx (V16QImode);
2626 emit_insn (gen_altivec_lvsr_direct (mask, operands[1]));
2627 for (i = 0; i < 16; ++i)
2628 perm[i] = GEN_INT (i);
2629 constv = gen_rtx_CONST_VECTOR (V16QImode, gen_rtvec_v (16, perm));
2630 constv = force_reg (V16QImode, constv);
2631 vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2632 UNSPEC_VPERM);
2633 emit_insn (gen_rtx_SET (operands[0], vperm));
2634 }
2635 DONE;
2636 })
2637
2638 (define_insn "altivec_lvsr_reg"
2639 [(set (match_operand:V16QI 0 "altivec_register_operand" "=v")
2640 (unspec:V16QI
2641 [(match_operand:DI 1 "gpc_reg_operand" "b")]
2642 UNSPEC_LVSR_REG))]
2643 "TARGET_ALTIVEC"
2644 "lvsr %0,0,%1"
2645 [(set_attr "type" "vecload")])
2646
2647 (define_insn "altivec_lvsr_direct"
1697 [(set (match_operand:V16QI 0 "register_operand" "=v") 2648 [(set (match_operand:V16QI 0 "register_operand" "=v")
1698 (unspec:V16QI [(match_operand 1 "memory_operand" "Z")] UNSPEC_LVSR))] 2649 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2650 UNSPEC_LVSR))]
1699 "TARGET_ALTIVEC" 2651 "TARGET_ALTIVEC"
1700 "lvsr %0,%y1" 2652 "lvsr %0,%y1"
1701 [(set_attr "type" "vecload")]) 2653 [(set_attr "type" "vecload")])
1702 2654
1703 (define_expand "build_vector_mask_for_load" 2655 (define_expand "build_vector_mask_for_load"
1711 2663
1712 gcc_assert (GET_CODE (operands[1]) == MEM); 2664 gcc_assert (GET_CODE (operands[1]) == MEM);
1713 2665
1714 addr = XEXP (operands[1], 0); 2666 addr = XEXP (operands[1], 0);
1715 temp = gen_reg_rtx (GET_MODE (addr)); 2667 temp = gen_reg_rtx (GET_MODE (addr));
1716 emit_insn (gen_rtx_SET (VOIDmode, temp, 2668 emit_insn (gen_rtx_SET (temp, gen_rtx_NEG (GET_MODE (addr), addr)));
1717 gen_rtx_NEG (GET_MODE (addr), addr)));
1718 emit_insn (gen_altivec_lvsr (operands[0], 2669 emit_insn (gen_altivec_lvsr (operands[0],
1719 replace_equiv_address (operands[1], temp))); 2670 replace_equiv_address (operands[1], temp)));
1720 DONE; 2671 DONE;
1721 }") 2672 }")
1722 2673
1723 ;; Parallel some of the LVE* and STV*'s with unspecs because some have 2674 ;; Parallel some of the LVE* and STV*'s with unspecs because some have
1724 ;; identical rtl but different instructions-- and gcc gets confused. 2675 ;; identical rtl but different instructions-- and gcc gets confused.
1725 2676
1726 (define_insn "altivec_lve<VI_char>x" 2677 (define_expand "altivec_lve<VI_char>x"
2678 [(parallel
2679 [(set (match_operand:VI 0 "register_operand" "=v")
2680 (match_operand:VI 1 "memory_operand" "Z"))
2681 (unspec [(const_int 0)] UNSPEC_LVE)])]
2682 "TARGET_ALTIVEC"
2683 {
2684 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2685 {
2686 altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_LVE);
2687 DONE;
2688 }
2689 })
2690
2691 (define_insn "*altivec_lve<VI_char>x_internal"
1727 [(parallel 2692 [(parallel
1728 [(set (match_operand:VI 0 "register_operand" "=v") 2693 [(set (match_operand:VI 0 "register_operand" "=v")
1729 (match_operand:VI 1 "memory_operand" "Z")) 2694 (match_operand:VI 1 "memory_operand" "Z"))
1730 (unspec [(const_int 0)] UNSPEC_LVE)])] 2695 (unspec [(const_int 0)] UNSPEC_LVE)])]
1731 "TARGET_ALTIVEC" 2696 "TARGET_ALTIVEC"
1739 (unspec [(const_int 0)] UNSPEC_LVE)])] 2704 (unspec [(const_int 0)] UNSPEC_LVE)])]
1740 "TARGET_ALTIVEC" 2705 "TARGET_ALTIVEC"
1741 "lvewx %0,%y1" 2706 "lvewx %0,%y1"
1742 [(set_attr "type" "vecload")]) 2707 [(set_attr "type" "vecload")])
1743 2708
1744 (define_insn "altivec_lvxl" 2709 (define_expand "altivec_lvxl_<mode>"
1745 [(parallel 2710 [(parallel
1746 [(set (match_operand:V4SI 0 "register_operand" "=v") 2711 [(set (match_operand:VM2 0 "register_operand" "=v")
1747 (match_operand:V4SI 1 "memory_operand" "Z")) 2712 (match_operand:VM2 1 "memory_operand" "Z"))
2713 (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
2714 "TARGET_ALTIVEC"
2715 {
2716 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2717 {
2718 altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_SET_VSCR);
2719 DONE;
2720 }
2721 })
2722
2723 (define_insn "*altivec_lvxl_<mode>_internal"
2724 [(parallel
2725 [(set (match_operand:VM2 0 "register_operand" "=v")
2726 (match_operand:VM2 1 "memory_operand" "Z"))
1748 (unspec [(const_int 0)] UNSPEC_SET_VSCR)])] 2727 (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
1749 "TARGET_ALTIVEC" 2728 "TARGET_ALTIVEC"
1750 "lvxl %0,%y1" 2729 "lvxl %0,%y1"
1751 [(set_attr "type" "vecload")]) 2730 [(set_attr "type" "vecload")])
1752 2731
1753 (define_insn "altivec_lvx_<mode>" 2732 ; This version of lvx is used only in cases where we need to force an lvx
2733 ; over any other load, and we don't care about losing CSE opportunities.
2734 ; Its primary use is for prologue register saves.
2735 (define_insn "altivec_lvx_<mode>_internal"
1754 [(parallel 2736 [(parallel
1755 [(set (match_operand:VM2 0 "register_operand" "=v") 2737 [(set (match_operand:VM2 0 "register_operand" "=v")
1756 (match_operand:VM2 1 "memory_operand" "Z")) 2738 (match_operand:VM2 1 "memory_operand" "Z"))
1757 (unspec [(const_int 0)] UNSPEC_LVX)])] 2739 (unspec [(const_int 0)] UNSPEC_LVX)])]
1758 "TARGET_ALTIVEC" 2740 "TARGET_ALTIVEC"
1759 "lvx %0,%y1" 2741 "lvx %0,%y1"
1760 [(set_attr "type" "vecload")]) 2742 [(set_attr "type" "vecload")])
1761 2743
1762 (define_insn "altivec_stvx_<mode>" 2744 ; The next two patterns embody what lvx should usually look like.
2745 (define_insn "altivec_lvx_<mode>_2op"
2746 [(set (match_operand:VM2 0 "register_operand" "=v")
2747 (mem:VM2 (and:DI (plus:DI (match_operand:DI 1 "register_operand" "b")
2748 (match_operand:DI 2 "register_operand" "r"))
2749 (const_int -16))))]
2750 "TARGET_ALTIVEC && TARGET_64BIT"
2751 "lvx %0,%1,%2"
2752 [(set_attr "type" "vecload")])
2753
2754 (define_insn "altivec_lvx_<mode>_1op"
2755 [(set (match_operand:VM2 0 "register_operand" "=v")
2756 (mem:VM2 (and:DI (match_operand:DI 1 "register_operand" "r")
2757 (const_int -16))))]
2758 "TARGET_ALTIVEC && TARGET_64BIT"
2759 "lvx %0,0,%1"
2760 [(set_attr "type" "vecload")])
2761
2762 ; 32-bit versions of the above.
2763 (define_insn "altivec_lvx_<mode>_2op_si"
2764 [(set (match_operand:VM2 0 "register_operand" "=v")
2765 (mem:VM2 (and:SI (plus:SI (match_operand:SI 1 "register_operand" "b")
2766 (match_operand:SI 2 "register_operand" "r"))
2767 (const_int -16))))]
2768 "TARGET_ALTIVEC && TARGET_32BIT"
2769 "lvx %0,%1,%2"
2770 [(set_attr "type" "vecload")])
2771
2772 (define_insn "altivec_lvx_<mode>_1op_si"
2773 [(set (match_operand:VM2 0 "register_operand" "=v")
2774 (mem:VM2 (and:SI (match_operand:SI 1 "register_operand" "r")
2775 (const_int -16))))]
2776 "TARGET_ALTIVEC && TARGET_32BIT"
2777 "lvx %0,0,%1"
2778 [(set_attr "type" "vecload")])
2779
2780 ; This version of stvx is used only in cases where we need to force an stvx
2781 ; over any other store, and we don't care about losing CSE opportunities.
2782 ; Its primary use is for epilogue register restores.
2783 (define_insn "altivec_stvx_<mode>_internal"
1763 [(parallel 2784 [(parallel
1764 [(set (match_operand:VM2 0 "memory_operand" "=Z") 2785 [(set (match_operand:VM2 0 "memory_operand" "=Z")
1765 (match_operand:VM2 1 "register_operand" "v")) 2786 (match_operand:VM2 1 "register_operand" "v"))
1766 (unspec [(const_int 0)] UNSPEC_STVX)])] 2787 (unspec [(const_int 0)] UNSPEC_STVX)])]
1767 "TARGET_ALTIVEC" 2788 "TARGET_ALTIVEC"
1768 "stvx %1,%y0" 2789 "stvx %1,%y0"
1769 [(set_attr "type" "vecstore")]) 2790 [(set_attr "type" "vecstore")])
1770 2791
1771 (define_insn "altivec_stvxl" 2792 ; The next two patterns embody what stvx should usually look like.
2793 (define_insn "altivec_stvx_<mode>_2op"
2794 [(set (mem:VM2 (and:DI (plus:DI (match_operand:DI 1 "register_operand" "b")
2795 (match_operand:DI 2 "register_operand" "r"))
2796 (const_int -16)))
2797 (match_operand:VM2 0 "register_operand" "v"))]
2798 "TARGET_ALTIVEC && TARGET_64BIT"
2799 "stvx %0,%1,%2"
2800 [(set_attr "type" "vecstore")])
2801
2802 (define_insn "altivec_stvx_<mode>_1op"
2803 [(set (mem:VM2 (and:DI (match_operand:DI 1 "register_operand" "r")
2804 (const_int -16)))
2805 (match_operand:VM2 0 "register_operand" "v"))]
2806 "TARGET_ALTIVEC && TARGET_64BIT"
2807 "stvx %0,0,%1"
2808 [(set_attr "type" "vecstore")])
2809
2810 ; 32-bit versions of the above.
2811 (define_insn "altivec_stvx_<mode>_2op_si"
2812 [(set (mem:VM2 (and:SI (plus:SI (match_operand:SI 1 "register_operand" "b")
2813 (match_operand:SI 2 "register_operand" "r"))
2814 (const_int -16)))
2815 (match_operand:VM2 0 "register_operand" "v"))]
2816 "TARGET_ALTIVEC && TARGET_32BIT"
2817 "stvx %0,%1,%2"
2818 [(set_attr "type" "vecstore")])
2819
2820 (define_insn "altivec_stvx_<mode>_1op_si"
2821 [(set (mem:VM2 (and:SI (match_operand:SI 1 "register_operand" "r")
2822 (const_int -16)))
2823 (match_operand:VM2 0 "register_operand" "v"))]
2824 "TARGET_ALTIVEC && TARGET_32BIT"
2825 "stvx %0,0,%1"
2826 [(set_attr "type" "vecstore")])
2827
2828 (define_expand "altivec_stvxl_<mode>"
1772 [(parallel 2829 [(parallel
1773 [(set (match_operand:V4SI 0 "memory_operand" "=Z") 2830 [(set (match_operand:VM2 0 "memory_operand" "=Z")
1774 (match_operand:V4SI 1 "register_operand" "v")) 2831 (match_operand:VM2 1 "register_operand" "v"))
2832 (unspec [(const_int 0)] UNSPEC_STVXL)])]
2833 "TARGET_ALTIVEC"
2834 {
2835 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2836 {
2837 altivec_expand_stvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVXL);
2838 DONE;
2839 }
2840 })
2841
2842 (define_insn "*altivec_stvxl_<mode>_internal"
2843 [(parallel
2844 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2845 (match_operand:VM2 1 "register_operand" "v"))
1775 (unspec [(const_int 0)] UNSPEC_STVXL)])] 2846 (unspec [(const_int 0)] UNSPEC_STVXL)])]
1776 "TARGET_ALTIVEC" 2847 "TARGET_ALTIVEC"
1777 "stvxl %1,%y0" 2848 "stvxl %1,%y0"
1778 [(set_attr "type" "vecstore")]) 2849 [(set_attr "type" "vecstore")])
1779 2850
1780 (define_insn "altivec_stve<VI_char>x" 2851 (define_expand "altivec_stve<VI_char>x"
2852 [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
2853 (unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
2854 "TARGET_ALTIVEC"
2855 {
2856 if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2857 {
2858 altivec_expand_stvex_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVE);
2859 DONE;
2860 }
2861 })
2862
2863 (define_insn "*altivec_stve<VI_char>x_internal"
1781 [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z") 2864 [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
1782 (unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))] 2865 (unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
1783 "TARGET_ALTIVEC" 2866 "TARGET_ALTIVEC"
1784 "stve<VI_char>x %1,%y0" 2867 "stve<VI_char>x %1,%y0"
1785 [(set_attr "type" "vecstore")]) 2868 [(set_attr "type" "vecstore")])
1789 (unspec:SF [(match_operand:V4SF 1 "register_operand" "v")] UNSPEC_STVE))] 2872 (unspec:SF [(match_operand:V4SF 1 "register_operand" "v")] UNSPEC_STVE))]
1790 "TARGET_ALTIVEC" 2873 "TARGET_ALTIVEC"
1791 "stvewx %1,%y0" 2874 "stvewx %1,%y0"
1792 [(set_attr "type" "vecstore")]) 2875 [(set_attr "type" "vecstore")])
1793 2876
2877 ;; Generate doublee
2878 ;; signed int/float to double convert words 0 and 2
2879 (define_expand "doublee<mode>2"
2880 [(set (match_operand:V2DF 0 "register_operand" "=v")
2881 (match_operand:VSX_W 1 "register_operand" "v"))]
2882 "TARGET_VSX"
2883 {
2884 machine_mode op_mode = GET_MODE (operands[1]);
2885
2886 if (VECTOR_ELT_ORDER_BIG)
2887 {
2888 /* Big endian word numbering for words in operand is 0 1 2 3.
2889 Input words 0 and 2 are where they need to be. */
2890 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], operands[1]));
2891 }
2892 else
2893 {
2894 /* Little endian word numbering for operand is 3 2 1 0.
2895 take (operand[1] operand[1]) and shift left one word
2896 3 2 1 0 3 2 1 0 => 2 1 0 3
2897 Input words 2 and 0 are now where they need to be for the
2898 conversion. */
2899 rtx rtx_tmp;
2900 rtx rtx_val = GEN_INT (1);
2901
2902 rtx_tmp = gen_reg_rtx (op_mode);
2903 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
2904 operands[1], rtx_val));
2905 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
2906 }
2907 DONE;
2908 }
2909 [(set_attr "type" "veccomplex")])
2910
2911 ;; Generate unsdoublee
2912 ;; unsigned int to double convert words 0 and 2
2913 (define_expand "unsdoubleev4si2"
2914 [(set (match_operand:V2DF 0 "register_operand" "=v")
2915 (match_operand:V4SI 1 "register_operand" "v"))]
2916 "TARGET_VSX"
2917 {
2918 if (VECTOR_ELT_ORDER_BIG)
2919 {
2920 /* Big endian word numbering for words in operand is 0 1 2 3.
2921 Input words 0 and 2 are where they need to be. */
2922 emit_insn (gen_vsx_xvcvuxwdp (operands[0], operands[1]));
2923 }
2924 else
2925 {
2926 /* Little endian word numbering for operand is 3 2 1 0.
2927 take (operand[1] operand[1]) and shift left one word
2928 3 2 1 0 3 2 1 0 => 2 1 0 3
2929 Input words 2 and 0 are now where they need to be for the
2930 conversion. */
2931 rtx rtx_tmp;
2932 rtx rtx_val = GEN_INT (1);
2933
2934 rtx_tmp = gen_reg_rtx (V4SImode);
2935 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
2936 operands[1], rtx_val));
2937 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
2938 }
2939 DONE;
2940 }
2941 [(set_attr "type" "veccomplex")])
2942
2943 ;; Generate doubleov
2944 ;; signed int/float to double convert words 1 and 3
2945 (define_expand "doubleo<mode>2"
2946 [(set (match_operand:V2DF 0 "register_operand" "=v")
2947 (match_operand:VSX_W 1 "register_operand" "v"))]
2948 "TARGET_VSX"
2949 {
2950 machine_mode op_mode = GET_MODE (operands[1]);
2951
2952 if (VECTOR_ELT_ORDER_BIG)
2953 {
2954 /* Big endian word numbering for words in operand is 0 1 2 3.
2955 take (operand[1] operand[1]) and shift left one word
2956 0 1 2 3 0 1 2 3 => 1 2 3 0
2957 Input words 1 and 3 are now where they need to be for the
2958 conversion. */
2959 rtx rtx_tmp;
2960 rtx rtx_val = GEN_INT (1);
2961
2962 rtx_tmp = gen_reg_rtx (op_mode);
2963 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
2964 operands[1], rtx_val));
2965 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
2966 }
2967 else
2968 {
2969 /* Little endian word numbering for operand is 3 2 1 0.
2970 Input words 3 and 1 are where they need to be. */
2971 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], operands[1]));
2972 }
2973 DONE;
2974 }
2975 [(set_attr "type" "veccomplex")])
2976
2977 ;; Generate unsdoubleov
2978 ;; unsigned int to double convert words 1 and 3
2979 (define_expand "unsdoubleov4si2"
2980 [(set (match_operand:V2DF 0 "register_operand" "=v")
2981 (match_operand:V4SI 1 "register_operand" "v"))]
2982 "TARGET_VSX"
2983 {
2984 if (VECTOR_ELT_ORDER_BIG)
2985 {
2986 /* Big endian word numbering for words in operand is 0 1 2 3.
2987 take (operand[1] operand[1]) and shift left one word
2988 0 1 2 3 0 1 2 3 => 1 2 3 0
2989 Input words 1 and 3 are now where they need to be for the
2990 conversion. */
2991 rtx rtx_tmp;
2992 rtx rtx_val = GEN_INT (1);
2993
2994 rtx_tmp = gen_reg_rtx (V4SImode);
2995 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
2996 operands[1], rtx_val));
2997 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
2998 }
2999 else
3000 {
3001 /* Want to convert the words 1 and 3.
3002 Little endian word numbering for operand is 3 2 1 0.
3003 Input words 3 and 1 are where they need to be. */
3004 emit_insn (gen_vsx_xvcvuxwdp (operands[0], operands[1]));
3005 }
3006 DONE;
3007 }
3008 [(set_attr "type" "veccomplex")])
3009
3010 ;; Generate doublehv
3011 ;; signed int/float to double convert words 0 and 1
3012 (define_expand "doubleh<mode>2"
3013 [(set (match_operand:V2DF 0 "register_operand" "=v")
3014 (match_operand:VSX_W 1 "register_operand" "v"))]
3015 "TARGET_VSX"
3016 {
3017 rtx rtx_tmp;
3018 rtx rtx_val;
3019
3020 machine_mode op_mode = GET_MODE (operands[1]);
3021 rtx_tmp = gen_reg_rtx (op_mode);
3022
3023 if (VECTOR_ELT_ORDER_BIG)
3024 {
3025 /* Big endian word numbering for words in operand is 0 1 2 3.
3026 Shift operand left one word, rtx_tmp word order is now 1 2 3 0.
3027 take (rts_tmp operand[1]) and shift left three words
3028 1 2 3 0 0 1 2 3 => 0 0 1 2
3029 Input words 0 and 1 are now where they need to be for the
3030 conversion. */
3031 rtx_val = GEN_INT (1);
3032 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3033 operands[1], rtx_val));
3034
3035 rtx_val = GEN_INT (3);
3036 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, rtx_tmp,
3037 operands[1], rtx_val));
3038 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3039 }
3040 else
3041 {
3042 /* Little endian word numbering for operand is 3 2 1 0.
3043 Shift operand left three words, rtx_tmp word order is now 0 3 2 1.
3044 take (operand[1] rts_tmp) and shift left two words
3045 3 2 1 0 0 3 2 1 => 1 0 0 3
3046 Input words 0 and 1 are now where they need to be for the
3047 conversion. */
3048 rtx_val = GEN_INT (3);
3049 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3050 operands[1], rtx_val));
3051
3052 rtx_val = GEN_INT (2);
3053 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3054 rtx_tmp, rtx_val));
3055 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3056 }
3057 DONE;
3058 }
3059 [(set_attr "type" "veccomplex")])
3060
3061 ;; Generate unsdoublehv
3062 ;; unsigned int to double convert words 0 and 1
3063 (define_expand "unsdoublehv4si2"
3064 [(set (match_operand:V2DF 0 "register_operand" "=v")
3065 (match_operand:V4SI 1 "register_operand" "v"))]
3066 "TARGET_VSX"
3067 {
3068 rtx rtx_tmp = gen_reg_rtx (V4SImode);
3069 rtx rtx_val = GEN_INT (12);
3070
3071 if (VECTOR_ELT_ORDER_BIG)
3072 {
3073 /* Big endian word numbering for words in operand is 0 1 2 3.
3074 Shift operand left one word, rtx_tmp word order is now 1 2 3 0.
3075 take (rts_tmp operand[1]) and shift left three words
3076 1 2 3 0 0 1 2 3 => 0 0 1 2
3077 Input words 0 and 1 are now where they need to be for the
3078 conversion. */
3079 rtx_val = GEN_INT (1);
3080 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3081 operands[1], rtx_val));
3082
3083 rtx_val = GEN_INT (3);
3084 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, rtx_tmp,
3085 operands[1], rtx_val));
3086 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3087 }
3088 else
3089 {
3090 /* Little endian word numbering for operand is 3 2 1 0.
3091 Shift operand left three words, rtx_tmp word order is now 0 3 2 1.
3092 take (operand[1] rts_tmp) and shift left two words
3093 3 2 1 0 0 3 2 1 => 1 0 0 3
3094 Input words 1 and 0 are now where they need to be for the
3095 conversion. */
3096 rtx_val = GEN_INT (3);
3097
3098 rtx_tmp = gen_reg_rtx (V4SImode);
3099 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3100 operands[1], rtx_val));
3101
3102 rtx_val = GEN_INT (2);
3103 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3104 rtx_tmp, rtx_val));
3105 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3106 }
3107 DONE;
3108 }
3109 [(set_attr "type" "veccomplex")])
3110
3111 ;; Generate doublelv
3112 ;; signed int/float to double convert words 2 and 3
3113 (define_expand "doublel<mode>2"
3114 [(set (match_operand:V2DF 0 "register_operand" "=v")
3115 (match_operand:VSX_W 1 "register_operand" "v"))]
3116 "TARGET_VSX"
3117 {
3118 rtx rtx_tmp;
3119 rtx rtx_val = GEN_INT (3);
3120
3121 machine_mode op_mode = GET_MODE (operands[1]);
3122 rtx_tmp = gen_reg_rtx (op_mode);
3123
3124 if (VECTOR_ELT_ORDER_BIG)
3125 {
3126 /* Big endian word numbering for operand is 0 1 2 3.
3127 Shift operand left three words, rtx_tmp word order is now 3 0 1 2.
3128 take (operand[1] rtx_tmp) and shift left two words
3129 0 1 2 3 3 0 1 2 => 2 3 3 0
3130 now use convert instruction to convert word 2 and 3 in the
3131 input vector. */
3132 rtx_val = GEN_INT (3);
3133 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3134 operands[1], rtx_val));
3135
3136 rtx_val = GEN_INT (2);
3137 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3138 rtx_tmp, rtx_val));
3139 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3140 }
3141 else
3142 {
3143 /* Little endian word numbering for operand is 3 2 1 0.
3144 Shift operand left one word, rtx_tmp word order is now 2 1 0 3.
3145 take (rtx_tmp operand[1]) and shift left three words
3146 2 1 0 3 3 2 1 0 => 3 3 2 1
3147 now use convert instruction to convert word 3 and 2 in the
3148 input vector. */
3149 rtx_val = GEN_INT (1);
3150 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3151 operands[1], rtx_val));
3152
3153 rtx_val = GEN_INT (3);
3154 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, rtx_tmp,
3155 operands[1], rtx_val));
3156 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3157 }
3158 DONE;
3159 }
3160 [(set_attr "type" "veccomplex")])
3161
3162 ;; Generate unsdoublelv
3163 ;; unsigned int to double convert convert 2 and 3
3164 (define_expand "unsdoublelv4si2"
3165 [(set (match_operand:V2DF 0 "register_operand" "=v")
3166 (match_operand:V4SI 1 "register_operand" "v"))]
3167 "TARGET_VSX"
3168 {
3169 rtx rtx_tmp = gen_reg_rtx (V4SImode);
3170 rtx rtx_val = GEN_INT (12);
3171
3172 if (VECTOR_ELT_ORDER_BIG)
3173 {
3174 /* Big endian word numbering for operand is 0 1 2 3.
3175 Shift operand left three words, rtx_tmp word order is now 3 0 1 2.
3176 take (operand[1] rtx_tmp) and shift left two words
3177 0 1 2 3 3 0 1 2 => 2 3 3 0
3178 now use convert instruction to convert word 2 and 3 in the
3179 input vector. */
3180 rtx_val = GEN_INT (3);
3181 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3182 operands[1], rtx_val));
3183
3184 rtx_val = GEN_INT (2);
3185 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3186 rtx_tmp, rtx_val));
3187 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3188 }
3189 else
3190 {
3191 /* Little endian word numbering for operand is 3 2 1 0.
3192 Shift operand left one word, rtx_tmp word order is now 2 1 0 3.
3193 take (rtx_tmp operand[1]) and shift left three words
3194 2 1 0 3 3 2 1 0 => 3 3 2 1
3195 now use convert instruction to convert word 3 and 2 in the
3196 input vector. */
3197 rtx_val = GEN_INT (1);
3198 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp,
3199 operands[1], operands[1], rtx_val));
3200
3201 rtx_val = GEN_INT (3);
3202 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, rtx_tmp,
3203 operands[1], rtx_val));
3204 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3205 }
3206 DONE;
3207 }
3208 [(set_attr "type" "veccomplex")])
3209
3210 ;; Generate two vector F32 converted to packed vector I16 vector
3211 (define_expand "convert_4f32_8i16"
3212 [(set (match_operand:V8HI 0 "register_operand" "=v")
3213 (unspec:V8HI [(match_operand:V4SF 1 "register_operand" "v")
3214 (match_operand:V4SF 2 "register_operand" "v")]
3215 UNSPEC_CONVERT_4F32_8I16))]
3216 "TARGET_P9_VECTOR"
3217 {
3218 rtx rtx_tmp_hi = gen_reg_rtx (V4SImode);
3219 rtx rtx_tmp_lo = gen_reg_rtx (V4SImode);
3220
3221 emit_insn (gen_altivec_vctuxs (rtx_tmp_hi, operands[1], const0_rtx));
3222 emit_insn (gen_altivec_vctuxs (rtx_tmp_lo, operands[2], const0_rtx));
3223 emit_insn (gen_altivec_vpkswss (operands[0], rtx_tmp_hi, rtx_tmp_lo));
3224 DONE;
3225 })
3226
1794 ;; Generate 3227 ;; Generate
1795 ;; vspltis? SCRATCH0,0 3228 ;; xxlxor/vxor SCRATCH0,SCRATCH0,SCRATCH0
1796 ;; vsubu?m SCRATCH2,SCRATCH1,%1 3229 ;; vsubu?m SCRATCH2,SCRATCH1,%1
1797 ;; vmaxs? %0,%1,SCRATCH2" 3230 ;; vmaxs? %0,%1,SCRATCH2"
1798 (define_expand "abs<mode>2" 3231 (define_expand "abs<mode>2"
1799 [(set (match_dup 2) (vec_duplicate:VI (const_int 0))) 3232 [(set (match_dup 2) (match_dup 3))
1800 (set (match_dup 3) 3233 (set (match_dup 4)
1801 (minus:VI (match_dup 2) 3234 (minus:VI2 (match_dup 2)
1802 (match_operand:VI 1 "register_operand" "v"))) 3235 (match_operand:VI2 1 "register_operand" "v")))
1803 (set (match_operand:VI 0 "register_operand" "=v") 3236 (set (match_operand:VI2 0 "register_operand" "=v")
1804 (smax:VI (match_dup 1) (match_dup 3)))] 3237 (smax:VI2 (match_dup 1) (match_dup 4)))]
1805 "TARGET_ALTIVEC" 3238 "<VI_unit>"
1806 { 3239 {
1807 operands[2] = gen_reg_rtx (GET_MODE (operands[0])); 3240 int i, n_elt = GET_MODE_NUNITS (<MODE>mode);
1808 operands[3] = gen_reg_rtx (GET_MODE (operands[0])); 3241 rtvec v = rtvec_alloc (n_elt);
3242
3243 /* Create an all 0 constant. */
3244 for (i = 0; i < n_elt; ++i)
3245 RTVEC_ELT (v, i) = const0_rtx;
3246
3247 operands[2] = gen_reg_rtx (<MODE>mode);
3248 operands[3] = gen_rtx_CONST_VECTOR (<MODE>mode, v);
3249 operands[4] = gen_reg_rtx (<MODE>mode);
3250 })
3251
3252 ;; Generate
3253 ;; vspltisw SCRATCH1,0
3254 ;; vsubu?m SCRATCH2,SCRATCH1,%1
3255 ;; vmins? %0,%1,SCRATCH2"
3256 (define_expand "nabs<mode>2"
3257 [(set (match_dup 2) (match_dup 3))
3258 (set (match_dup 4)
3259 (minus:VI2 (match_dup 2)
3260 (match_operand:VI2 1 "register_operand" "v")))
3261 (set (match_operand:VI2 0 "register_operand" "=v")
3262 (smin:VI2 (match_dup 1) (match_dup 4)))]
3263 "<VI_unit>"
3264 {
3265 int i;
3266 int n_elt = GET_MODE_NUNITS (<MODE>mode);
3267
3268 rtvec v = rtvec_alloc (n_elt);
3269
3270 /* Create an all 0 constant. */
3271 for (i = 0; i < n_elt; ++i)
3272 RTVEC_ELT (v, i) = const0_rtx;
3273
3274 operands[2] = gen_reg_rtx (<MODE>mode);
3275 operands[3] = gen_rtx_CONST_VECTOR (<MODE>mode, v);
3276 operands[4] = gen_reg_rtx (<MODE>mode);
1809 }) 3277 })
1810 3278
1811 ;; Generate 3279 ;; Generate
1812 ;; vspltisw SCRATCH1,-1 3280 ;; vspltisw SCRATCH1,-1
1813 ;; vslw SCRATCH2,SCRATCH1,SCRATCH1 3281 ;; vslw SCRATCH2,SCRATCH1,SCRATCH1
1834 [(set (match_dup 2) (vec_duplicate:VI (const_int 0))) 3302 [(set (match_dup 2) (vec_duplicate:VI (const_int 0)))
1835 (parallel [(set (match_dup 3) 3303 (parallel [(set (match_dup 3)
1836 (unspec:VI [(match_dup 2) 3304 (unspec:VI [(match_dup 2)
1837 (match_operand:VI 1 "register_operand" "v")] 3305 (match_operand:VI 1 "register_operand" "v")]
1838 UNSPEC_VSUBS)) 3306 UNSPEC_VSUBS))
1839 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]) 3307 (set (reg:SI VSCR_REGNO)
3308 (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))])
1840 (set (match_operand:VI 0 "register_operand" "=v") 3309 (set (match_operand:VI 0 "register_operand" "=v")
1841 (smax:VI (match_dup 1) (match_dup 3)))] 3310 (smax:VI (match_dup 1) (match_dup 3)))]
1842 "TARGET_ALTIVEC" 3311 "TARGET_ALTIVEC"
1843 { 3312 {
1844 operands[2] = gen_reg_rtx (GET_MODE (operands[0])); 3313 operands[2] = gen_reg_rtx (GET_MODE (operands[0]));
1845 operands[3] = gen_reg_rtx (GET_MODE (operands[0])); 3314 operands[3] = gen_reg_rtx (GET_MODE (operands[0]));
1846 }) 3315 })
1847 3316
1848 (define_insn "altivec_vsumsws_nomode" 3317 (define_expand "reduc_plus_scal_<mode>"
1849 [(set (match_operand 0 "register_operand" "=v") 3318 [(set (match_operand:<VI_scalar> 0 "register_operand" "=v")
1850 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1851 (match_operand:V4SI 2 "register_operand" "v")]
1852 UNSPEC_VSUMSWS))
1853 (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1854 "TARGET_ALTIVEC"
1855 "vsumsws %0,%1,%2"
1856 [(set_attr "type" "veccomplex")])
1857
1858 (define_expand "reduc_splus_<mode>"
1859 [(set (match_operand:VIshort 0 "register_operand" "=v")
1860 (unspec:VIshort [(match_operand:VIshort 1 "register_operand" "v")] 3319 (unspec:VIshort [(match_operand:VIshort 1 "register_operand" "v")]
1861 UNSPEC_REDUC_PLUS))] 3320 UNSPEC_REDUC_PLUS))]
1862 "TARGET_ALTIVEC" 3321 "TARGET_ALTIVEC"
1863 " 3322 {
1864 {
1865 rtx vzero = gen_reg_rtx (V4SImode); 3323 rtx vzero = gen_reg_rtx (V4SImode);
1866 rtx vtmp1 = gen_reg_rtx (V4SImode); 3324 rtx vtmp1 = gen_reg_rtx (V4SImode);
3325 rtx vtmp2 = gen_reg_rtx (<MODE>mode);
3326 rtx dest = gen_lowpart (V4SImode, vtmp2);
3327 int elt = VECTOR_ELT_ORDER_BIG ? GET_MODE_NUNITS (<MODE>mode) - 1 : 0;
1867 3328
1868 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx)); 3329 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
1869 emit_insn (gen_altivec_vsum4s<VI_char>s (vtmp1, operands[1], vzero)); 3330 emit_insn (gen_altivec_vsum4s<VI_char>s (vtmp1, operands[1], vzero));
1870 emit_insn (gen_altivec_vsumsws_nomode (operands[0], vtmp1, vzero)); 3331 emit_insn (gen_altivec_vsumsws_direct (dest, vtmp1, vzero));
1871 DONE; 3332 rs6000_expand_vector_extract (operands[0], vtmp2, GEN_INT (elt));
1872 }") 3333 DONE;
1873 3334 })
1874 (define_expand "reduc_uplus_v16qi" 3335
1875 [(set (match_operand:V16QI 0 "register_operand" "=v") 3336 (define_insn "*p9_neg<mode>2"
1876 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")] 3337 [(set (match_operand:VNEG 0 "altivec_register_operand" "=v")
1877 UNSPEC_REDUC_PLUS))] 3338 (neg:VNEG (match_operand:VNEG 1 "altivec_register_operand" "v")))]
1878 "TARGET_ALTIVEC" 3339 "TARGET_P9_VECTOR"
1879 " 3340 "vneg<VI_char> %0,%1"
1880 { 3341 [(set_attr "type" "vecsimple")])
1881 rtx vzero = gen_reg_rtx (V4SImode);
1882 rtx vtmp1 = gen_reg_rtx (V4SImode);
1883
1884 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
1885 emit_insn (gen_altivec_vsum4ubs (vtmp1, operands[1], vzero));
1886 emit_insn (gen_altivec_vsumsws_nomode (operands[0], vtmp1, vzero));
1887 DONE;
1888 }")
1889 3342
1890 (define_expand "neg<mode>2" 3343 (define_expand "neg<mode>2"
1891 [(use (match_operand:VI 0 "register_operand" "")) 3344 [(set (match_operand:VI2 0 "register_operand" "")
1892 (use (match_operand:VI 1 "register_operand" ""))] 3345 (neg:VI2 (match_operand:VI2 1 "register_operand" "")))]
1893 "TARGET_ALTIVEC" 3346 "<VI_unit>"
1894 " 3347 {
1895 { 3348 if (!TARGET_P9_VECTOR || (<MODE>mode != V4SImode && <MODE>mode != V2DImode))
1896 rtx vzero; 3349 {
1897 3350 rtx vzero;
1898 vzero = gen_reg_rtx (GET_MODE (operands[0])); 3351
1899 emit_insn (gen_altivec_vspltis<VI_char> (vzero, const0_rtx)); 3352 vzero = gen_reg_rtx (GET_MODE (operands[0]));
1900 emit_insn (gen_sub<mode>3 (operands[0], vzero, operands[1])); 3353 emit_move_insn (vzero, CONST0_RTX (<MODE>mode));
1901 3354 emit_insn (gen_sub<mode>3 (operands[0], vzero, operands[1]));
1902 DONE; 3355 DONE;
1903 }") 3356 }
3357 })
1904 3358
1905 (define_expand "udot_prod<mode>" 3359 (define_expand "udot_prod<mode>"
1906 [(set (match_operand:V4SI 0 "register_operand" "=v") 3360 [(set (match_operand:V4SI 0 "register_operand" "=v")
1907 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v") 3361 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
1908 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v") 3362 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
1971 emit_insn (gen_altivec_vspltish (vones, const1_rtx)); 3425 emit_insn (gen_altivec_vspltish (vones, const1_rtx));
1972 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], vones, operands[2])); 3426 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], vones, operands[2]));
1973 DONE; 3427 DONE;
1974 }") 3428 }")
1975 3429
1976 (define_expand "vec_unpacks_hi_v16qi" 3430 (define_expand "vec_unpacks_hi_<VP_small_lc>"
1977 [(set (match_operand:V8HI 0 "register_operand" "=v") 3431 [(set (match_operand:VP 0 "register_operand" "=v")
1978 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")] 3432 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
1979 UNSPEC_VUPKHSB))] 3433 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
1980 "TARGET_ALTIVEC" 3434 "<VI_unit>"
1981 " 3435 "")
1982 { 3436
1983 emit_insn (gen_altivec_vupkhsb (operands[0], operands[1])); 3437 (define_expand "vec_unpacks_lo_<VP_small_lc>"
1984 DONE; 3438 [(set (match_operand:VP 0 "register_operand" "=v")
1985 }") 3439 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
1986 3440 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
1987 (define_expand "vec_unpacks_hi_v8hi" 3441 "<VI_unit>"
1988 [(set (match_operand:V4SI 0 "register_operand" "=v") 3442 "")
1989 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
1990 UNSPEC_VUPKHSH))]
1991 "TARGET_ALTIVEC"
1992 "
1993 {
1994 emit_insn (gen_altivec_vupkhsh (operands[0], operands[1]));
1995 DONE;
1996 }")
1997
1998 (define_expand "vec_unpacks_lo_v16qi"
1999 [(set (match_operand:V8HI 0 "register_operand" "=v")
2000 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
2001 UNSPEC_VUPKLSB))]
2002 "TARGET_ALTIVEC"
2003 "
2004 {
2005 emit_insn (gen_altivec_vupklsb (operands[0], operands[1]));
2006 DONE;
2007 }")
2008
2009 (define_expand "vec_unpacks_lo_v8hi"
2010 [(set (match_operand:V4SI 0 "register_operand" "=v")
2011 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2012 UNSPEC_VUPKLSH))]
2013 "TARGET_ALTIVEC"
2014 "
2015 {
2016 emit_insn (gen_altivec_vupklsh (operands[0], operands[1]));
2017 DONE;
2018 }")
2019 3443
2020 (define_insn "vperm_v8hiv4si" 3444 (define_insn "vperm_v8hiv4si"
2021 [(set (match_operand:V4SI 0 "register_operand" "=v") 3445 [(set (match_operand:V4SI 0 "register_operand" "=v,?wo")
2022 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v") 3446 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v,wo")
2023 (match_operand:V4SI 2 "register_operand" "v") 3447 (match_operand:V4SI 2 "register_operand" "v,0")
2024 (match_operand:V16QI 3 "register_operand" "v")] 3448 (match_operand:V16QI 3 "register_operand" "v,wo")]
2025 UNSPEC_VPERMSI))] 3449 UNSPEC_VPERMSI))]
2026 "TARGET_ALTIVEC" 3450 "TARGET_ALTIVEC"
2027 "vperm %0,%1,%2,%3" 3451 "@
2028 [(set_attr "type" "vecperm")]) 3452 vperm %0,%1,%2,%3
3453 xxperm %x0,%x1,%x3"
3454 [(set_attr "type" "vecperm")
3455 (set_attr "length" "4")])
2029 3456
2030 (define_insn "vperm_v16qiv8hi" 3457 (define_insn "vperm_v16qiv8hi"
2031 [(set (match_operand:V8HI 0 "register_operand" "=v") 3458 [(set (match_operand:V8HI 0 "register_operand" "=v,?wo")
2032 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v") 3459 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v,wo")
2033 (match_operand:V8HI 2 "register_operand" "v") 3460 (match_operand:V8HI 2 "register_operand" "v,0")
2034 (match_operand:V16QI 3 "register_operand" "v")] 3461 (match_operand:V16QI 3 "register_operand" "v,wo")]
2035 UNSPEC_VPERMHI))] 3462 UNSPEC_VPERMHI))]
2036 "TARGET_ALTIVEC" 3463 "TARGET_ALTIVEC"
2037 "vperm %0,%1,%2,%3" 3464 "@
2038 [(set_attr "type" "vecperm")]) 3465 vperm %0,%1,%2,%3
3466 xxperm %x0,%x1,%x3"
3467 [(set_attr "type" "vecperm")
3468 (set_attr "length" "4")])
2039 3469
2040 3470
2041 (define_expand "vec_unpacku_hi_v16qi" 3471 (define_expand "vec_unpacku_hi_v16qi"
2042 [(set (match_operand:V8HI 0 "register_operand" "=v") 3472 [(set (match_operand:V8HI 0 "register_operand" "=v")
2043 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")] 3473 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
2046 " 3476 "
2047 { 3477 {
2048 rtx vzero = gen_reg_rtx (V8HImode); 3478 rtx vzero = gen_reg_rtx (V8HImode);
2049 rtx mask = gen_reg_rtx (V16QImode); 3479 rtx mask = gen_reg_rtx (V16QImode);
2050 rtvec v = rtvec_alloc (16); 3480 rtvec v = rtvec_alloc (16);
3481 bool be = BYTES_BIG_ENDIAN;
2051 3482
2052 emit_insn (gen_altivec_vspltish (vzero, const0_rtx)); 3483 emit_insn (gen_altivec_vspltish (vzero, const0_rtx));
2053 3484
2054 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, 16); 3485 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 7);
2055 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, 0); 3486 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 0 : 16);
2056 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, 16); 3487 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 16 : 6);
2057 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, 1); 3488 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 1 : 16);
2058 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, 16); 3489 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 5);
2059 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, 2); 3490 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 2 : 16);
2060 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, 16); 3491 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 16 : 4);
2061 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, 3); 3492 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 3 : 16);
2062 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, 16); 3493 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 3);
2063 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, 4); 3494 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 4 : 16);
2064 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, 16); 3495 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 16 : 2);
2065 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, 5); 3496 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 5 : 16);
2066 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, 16); 3497 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 1);
2067 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, 6); 3498 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 6 : 16);
2068 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, 16); 3499 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 16 : 0);
2069 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, 7); 3500 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 7 : 16);
2070 3501
2071 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v))); 3502 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2072 emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask)); 3503 emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask));
2073 DONE; 3504 DONE;
2074 }") 3505 }")
2075 3506
2076 (define_expand "vec_unpacku_hi_v8hi" 3507 (define_expand "vec_unpacku_hi_v8hi"
2081 " 3512 "
2082 { 3513 {
2083 rtx vzero = gen_reg_rtx (V4SImode); 3514 rtx vzero = gen_reg_rtx (V4SImode);
2084 rtx mask = gen_reg_rtx (V16QImode); 3515 rtx mask = gen_reg_rtx (V16QImode);
2085 rtvec v = rtvec_alloc (16); 3516 rtvec v = rtvec_alloc (16);
3517 bool be = BYTES_BIG_ENDIAN;
2086 3518
2087 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx)); 3519 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2088 3520
2089 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, 16); 3521 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 7);
2090 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, 17); 3522 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 17 : 6);
2091 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, 0); 3523 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 0 : 17);
2092 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, 1); 3524 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 1 : 16);
2093 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, 16); 3525 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 5);
2094 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, 17); 3526 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 17 : 4);
2095 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, 2); 3527 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 2 : 17);
2096 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, 3); 3528 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 3 : 16);
2097 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, 16); 3529 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 3);
2098 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, 17); 3530 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 17 : 2);
2099 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, 4); 3531 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 4 : 17);
2100 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, 5); 3532 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 5 : 16);
2101 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, 16); 3533 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 1);
2102 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, 17); 3534 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 17 : 0);
2103 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, 6); 3535 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 6 : 17);
2104 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, 7); 3536 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 7 : 16);
2105 3537
2106 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v))); 3538 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2107 emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask)); 3539 emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask));
2108 DONE; 3540 DONE;
2109 }") 3541 }")
2110 3542
2111 (define_expand "vec_unpacku_lo_v16qi" 3543 (define_expand "vec_unpacku_lo_v16qi"
2116 " 3548 "
2117 { 3549 {
2118 rtx vzero = gen_reg_rtx (V8HImode); 3550 rtx vzero = gen_reg_rtx (V8HImode);
2119 rtx mask = gen_reg_rtx (V16QImode); 3551 rtx mask = gen_reg_rtx (V16QImode);
2120 rtvec v = rtvec_alloc (16); 3552 rtvec v = rtvec_alloc (16);
3553 bool be = BYTES_BIG_ENDIAN;
2121 3554
2122 emit_insn (gen_altivec_vspltish (vzero, const0_rtx)); 3555 emit_insn (gen_altivec_vspltish (vzero, const0_rtx));
2123 3556
2124 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, 16); 3557 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 15);
2125 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, 8); 3558 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 8 : 16);
2126 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, 16); 3559 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 16 : 14);
2127 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, 9); 3560 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 9 : 16);
2128 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, 16); 3561 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 13);
2129 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, 10); 3562 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 10 : 16);
2130 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, 16); 3563 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 16 : 12);
2131 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, 11); 3564 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 11 : 16);
2132 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, 16); 3565 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 11);
2133 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, 12); 3566 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 12 : 16);
2134 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, 16); 3567 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 16 : 10);
2135 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, 13); 3568 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 13 : 16);
2136 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, 16); 3569 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 9);
2137 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, 14); 3570 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 14 : 16);
2138 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, 16); 3571 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 16 : 8);
2139 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, 15); 3572 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 15 : 16);
2140 3573
2141 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v))); 3574 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2142 emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask)); 3575 emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask));
2143 DONE; 3576 DONE;
2144 }") 3577 }")
2145 3578
2146 (define_expand "vec_unpacku_lo_v8hi" 3579 (define_expand "vec_unpacku_lo_v8hi"
2151 " 3584 "
2152 { 3585 {
2153 rtx vzero = gen_reg_rtx (V4SImode); 3586 rtx vzero = gen_reg_rtx (V4SImode);
2154 rtx mask = gen_reg_rtx (V16QImode); 3587 rtx mask = gen_reg_rtx (V16QImode);
2155 rtvec v = rtvec_alloc (16); 3588 rtvec v = rtvec_alloc (16);
3589 bool be = BYTES_BIG_ENDIAN;
2156 3590
2157 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx)); 3591 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2158 3592
2159 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, 16); 3593 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, be ? 16 : 15);
2160 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, 17); 3594 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, be ? 17 : 14);
2161 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, 8); 3595 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, be ? 8 : 17);
2162 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, 9); 3596 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, be ? 9 : 16);
2163 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, 16); 3597 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, be ? 16 : 13);
2164 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, 17); 3598 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, be ? 17 : 12);
2165 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, 10); 3599 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, be ? 10 : 17);
2166 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, 11); 3600 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, be ? 11 : 16);
2167 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, 16); 3601 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, be ? 16 : 11);
2168 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, 17); 3602 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, be ? 17 : 10);
2169 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, 12); 3603 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 12 : 17);
2170 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, 13); 3604 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 13 : 16);
2171 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, 16); 3605 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 : 9);
2172 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, 17); 3606 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 17 : 8);
2173 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, 14); 3607 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 14 : 17);
2174 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, 15); 3608 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 15 : 16);
2175 3609
2176 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v))); 3610 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2177 emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask)); 3611 emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask));
2178 DONE; 3612 DONE;
2179 }") 3613 }")
2180 3614
2181 (define_expand "vec_widen_umult_hi_v16qi" 3615 (define_expand "vec_widen_umult_hi_v16qi"
2187 " 3621 "
2188 { 3622 {
2189 rtx ve = gen_reg_rtx (V8HImode); 3623 rtx ve = gen_reg_rtx (V8HImode);
2190 rtx vo = gen_reg_rtx (V8HImode); 3624 rtx vo = gen_reg_rtx (V8HImode);
2191 3625
2192 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2])); 3626 if (BYTES_BIG_ENDIAN)
2193 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2])); 3627 {
2194 emit_insn (gen_altivec_vmrghh (operands[0], ve, vo)); 3628 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
3629 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
3630 emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
3631 }
3632 else
3633 {
3634 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3635 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3636 emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
3637 }
2195 DONE; 3638 DONE;
2196 }") 3639 }")
2197 3640
2198 (define_expand "vec_widen_umult_lo_v16qi" 3641 (define_expand "vec_widen_umult_lo_v16qi"
2199 [(set (match_operand:V8HI 0 "register_operand" "=v") 3642 [(set (match_operand:V8HI 0 "register_operand" "=v")
2204 " 3647 "
2205 { 3648 {
2206 rtx ve = gen_reg_rtx (V8HImode); 3649 rtx ve = gen_reg_rtx (V8HImode);
2207 rtx vo = gen_reg_rtx (V8HImode); 3650 rtx vo = gen_reg_rtx (V8HImode);
2208 3651
2209 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2])); 3652 if (BYTES_BIG_ENDIAN)
2210 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2])); 3653 {
2211 emit_insn (gen_altivec_vmrglh (operands[0], ve, vo)); 3654 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
3655 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
3656 emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
3657 }
3658 else
3659 {
3660 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3661 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3662 emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
3663 }
2212 DONE; 3664 DONE;
2213 }") 3665 }")
2214 3666
2215 (define_expand "vec_widen_smult_hi_v16qi" 3667 (define_expand "vec_widen_smult_hi_v16qi"
2216 [(set (match_operand:V8HI 0 "register_operand" "=v") 3668 [(set (match_operand:V8HI 0 "register_operand" "=v")
2221 " 3673 "
2222 { 3674 {
2223 rtx ve = gen_reg_rtx (V8HImode); 3675 rtx ve = gen_reg_rtx (V8HImode);
2224 rtx vo = gen_reg_rtx (V8HImode); 3676 rtx vo = gen_reg_rtx (V8HImode);
2225 3677
2226 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2])); 3678 if (BYTES_BIG_ENDIAN)
2227 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2])); 3679 {
2228 emit_insn (gen_altivec_vmrghh (operands[0], ve, vo)); 3680 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3681 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3682 emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
3683 }
3684 else
3685 {
3686 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3687 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3688 emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
3689 }
2229 DONE; 3690 DONE;
2230 }") 3691 }")
2231 3692
2232 (define_expand "vec_widen_smult_lo_v16qi" 3693 (define_expand "vec_widen_smult_lo_v16qi"
2233 [(set (match_operand:V8HI 0 "register_operand" "=v") 3694 [(set (match_operand:V8HI 0 "register_operand" "=v")
2238 " 3699 "
2239 { 3700 {
2240 rtx ve = gen_reg_rtx (V8HImode); 3701 rtx ve = gen_reg_rtx (V8HImode);
2241 rtx vo = gen_reg_rtx (V8HImode); 3702 rtx vo = gen_reg_rtx (V8HImode);
2242 3703
2243 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2])); 3704 if (BYTES_BIG_ENDIAN)
2244 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2])); 3705 {
2245 emit_insn (gen_altivec_vmrglh (operands[0], ve, vo)); 3706 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3707 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3708 emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
3709 }
3710 else
3711 {
3712 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3713 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3714 emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
3715 }
2246 DONE; 3716 DONE;
2247 }") 3717 }")
2248 3718
2249 (define_expand "vec_widen_umult_hi_v8hi" 3719 (define_expand "vec_widen_umult_hi_v8hi"
2250 [(set (match_operand:V4SI 0 "register_operand" "=v") 3720 [(set (match_operand:V4SI 0 "register_operand" "=v")
2255 " 3725 "
2256 { 3726 {
2257 rtx ve = gen_reg_rtx (V4SImode); 3727 rtx ve = gen_reg_rtx (V4SImode);
2258 rtx vo = gen_reg_rtx (V4SImode); 3728 rtx vo = gen_reg_rtx (V4SImode);
2259 3729
2260 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2])); 3730 if (BYTES_BIG_ENDIAN)
2261 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2])); 3731 {
2262 emit_insn (gen_altivec_vmrghw (operands[0], ve, vo)); 3732 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3733 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3734 emit_insn (gen_altivec_vmrghw_direct (operands[0], ve, vo));
3735 }
3736 else
3737 {
3738 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3739 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3740 emit_insn (gen_altivec_vmrghw_direct (operands[0], vo, ve));
3741 }
2263 DONE; 3742 DONE;
2264 }") 3743 }")
2265 3744
2266 (define_expand "vec_widen_umult_lo_v8hi" 3745 (define_expand "vec_widen_umult_lo_v8hi"
2267 [(set (match_operand:V4SI 0 "register_operand" "=v") 3746 [(set (match_operand:V4SI 0 "register_operand" "=v")
2272 " 3751 "
2273 { 3752 {
2274 rtx ve = gen_reg_rtx (V4SImode); 3753 rtx ve = gen_reg_rtx (V4SImode);
2275 rtx vo = gen_reg_rtx (V4SImode); 3754 rtx vo = gen_reg_rtx (V4SImode);
2276 3755
2277 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2])); 3756 if (BYTES_BIG_ENDIAN)
2278 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2])); 3757 {
2279 emit_insn (gen_altivec_vmrglw (operands[0], ve, vo)); 3758 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3759 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3760 emit_insn (gen_altivec_vmrglw_direct (operands[0], ve, vo));
3761 }
3762 else
3763 {
3764 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3765 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3766 emit_insn (gen_altivec_vmrglw_direct (operands[0], vo, ve));
3767 }
2280 DONE; 3768 DONE;
2281 }") 3769 }")
2282 3770
2283 (define_expand "vec_widen_smult_hi_v8hi" 3771 (define_expand "vec_widen_smult_hi_v8hi"
2284 [(set (match_operand:V4SI 0 "register_operand" "=v") 3772 [(set (match_operand:V4SI 0 "register_operand" "=v")
2289 " 3777 "
2290 { 3778 {
2291 rtx ve = gen_reg_rtx (V4SImode); 3779 rtx ve = gen_reg_rtx (V4SImode);
2292 rtx vo = gen_reg_rtx (V4SImode); 3780 rtx vo = gen_reg_rtx (V4SImode);
2293 3781
2294 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2])); 3782 if (BYTES_BIG_ENDIAN)
2295 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2])); 3783 {
2296 emit_insn (gen_altivec_vmrghw (operands[0], ve, vo)); 3784 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3785 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3786 emit_insn (gen_altivec_vmrghw_direct (operands[0], ve, vo));
3787 }
3788 else
3789 {
3790 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3791 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3792 emit_insn (gen_altivec_vmrghw_direct (operands[0], vo, ve));
3793 }
2297 DONE; 3794 DONE;
2298 }") 3795 }")
2299 3796
2300 (define_expand "vec_widen_smult_lo_v8hi" 3797 (define_expand "vec_widen_smult_lo_v8hi"
2301 [(set (match_operand:V4SI 0 "register_operand" "=v") 3798 [(set (match_operand:V4SI 0 "register_operand" "=v")
2306 " 3803 "
2307 { 3804 {
2308 rtx ve = gen_reg_rtx (V4SImode); 3805 rtx ve = gen_reg_rtx (V4SImode);
2309 rtx vo = gen_reg_rtx (V4SImode); 3806 rtx vo = gen_reg_rtx (V4SImode);
2310 3807
2311 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2])); 3808 if (BYTES_BIG_ENDIAN)
2312 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2])); 3809 {
2313 emit_insn (gen_altivec_vmrglw (operands[0], ve, vo)); 3810 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3811 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3812 emit_insn (gen_altivec_vmrglw_direct (operands[0], ve, vo));
3813 }
3814 else
3815 {
3816 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3817 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3818 emit_insn (gen_altivec_vmrglw_direct (operands[0], vo, ve));
3819 }
2314 DONE; 3820 DONE;
2315 }") 3821 }")
2316 3822
2317 (define_expand "vec_pack_trunc_v8hi" 3823 (define_expand "vec_pack_trunc_<mode>"
3824 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
3825 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
3826 (match_operand:VP 2 "register_operand" "v")]
3827 UNSPEC_VPACK_UNS_UNS_MOD))]
3828 "<VI_unit>"
3829 "")
3830
3831 (define_expand "mulv16qi3"
2318 [(set (match_operand:V16QI 0 "register_operand" "=v") 3832 [(set (match_operand:V16QI 0 "register_operand" "=v")
2319 (unspec:V16QI [(match_operand:V8HI 1 "register_operand" "v") 3833 (mult:V16QI (match_operand:V16QI 1 "register_operand" "v")
2320 (match_operand:V8HI 2 "register_operand" "v")] 3834 (match_operand:V16QI 2 "register_operand" "v")))]
2321 UNSPEC_VPKUHUM))]
2322 "TARGET_ALTIVEC" 3835 "TARGET_ALTIVEC"
2323 " 3836 "
2324 { 3837 {
2325 emit_insn (gen_altivec_vpkuhum (operands[0], operands[1], operands[2])); 3838 rtx even = gen_reg_rtx (V8HImode);
2326 DONE; 3839 rtx odd = gen_reg_rtx (V8HImode);
2327 }") 3840 rtx mask = gen_reg_rtx (V16QImode);
2328 3841 rtvec v = rtvec_alloc (16);
2329 (define_expand "vec_pack_trunc_v4si" 3842 int i;
2330 [(set (match_operand:V8HI 0 "register_operand" "=v") 3843
2331 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v") 3844 for (i = 0; i < 8; ++i) {
2332 (match_operand:V4SI 2 "register_operand" "v")] 3845 RTVEC_ELT (v, 2 * i)
2333 UNSPEC_VPKUWUM))] 3846 = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 1 : 31 - 2 * i);
2334 "TARGET_ALTIVEC" 3847 RTVEC_ELT (v, 2 * i + 1)
2335 " 3848 = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 17 : 15 - 2 * i);
2336 { 3849 }
2337 emit_insn (gen_altivec_vpkuwum (operands[0], operands[1], operands[2])); 3850
3851 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3852 emit_insn (gen_altivec_vmulesb (even, operands[1], operands[2]));
3853 emit_insn (gen_altivec_vmulosb (odd, operands[1], operands[2]));
3854 emit_insn (gen_altivec_vperm_v8hiv16qi (operands[0], even, odd, mask));
2338 DONE; 3855 DONE;
2339 }") 3856 }")
2340 3857
2341 (define_expand "altivec_negv4sf2" 3858 (define_expand "altivec_negv4sf2"
2342 [(use (match_operand:V4SF 0 "register_operand" "")) 3859 [(use (match_operand:V4SF 0 "register_operand" ""))
2356 gen_lowpart (V4SFmode, neg0), operands[1])); 3873 gen_lowpart (V4SFmode, neg0), operands[1]));
2357 3874
2358 DONE; 3875 DONE;
2359 }") 3876 }")
2360 3877
3878 ;; Vector reverse elements
3879 (define_expand "altivec_vreve<mode>2"
3880 [(set (match_operand:VEC_A 0 "register_operand" "=v")
3881 (unspec:VEC_A [(match_operand:VEC_A 1 "register_operand" "v")]
3882 UNSPEC_VREVEV))]
3883 "TARGET_ALTIVEC"
3884 {
3885 int i, j, size, num_elements;
3886 rtvec v = rtvec_alloc (16);
3887 rtx mask = gen_reg_rtx (V16QImode);
3888
3889 size = GET_MODE_UNIT_SIZE (<MODE>mode);
3890 num_elements = GET_MODE_NUNITS (<MODE>mode);
3891
3892 for (j = 0; j < num_elements; j++)
3893 for (i = 0; i < size; i++)
3894 RTVEC_ELT (v, i + j * size)
3895 = GEN_INT (i + (num_elements - 1 - j) * size);
3896
3897 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3898 emit_insn (gen_altivec_vperm_<mode> (operands[0], operands[1],
3899 operands[1], mask));
3900 DONE;
3901 })
3902
2361 ;; Vector SIMD PEM v2.06c defines LVLX, LVLXL, LVRX, LVRXL, 3903 ;; Vector SIMD PEM v2.06c defines LVLX, LVLXL, LVRX, LVRXL,
2362 ;; STVLX, STVLXL, STVVRX, STVRXL are available only on Cell. 3904 ;; STVLX, STVLXL, STVVRX, STVRXL are available only on Cell.
2363 (define_insn "altivec_lvlx" 3905 (define_insn "altivec_lvlx"
2364 [(set (match_operand:V16QI 0 "register_operand" "=v") 3906 [(set (match_operand:V16QI 0 "register_operand" "=v")
2365 (unspec:V16QI [(match_operand 1 "memory_operand" "Z")] 3907 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
2366 UNSPEC_LVLX))] 3908 UNSPEC_LVLX))]
2367 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL" 3909 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
2368 "lvlx %0,%y1" 3910 "lvlx %0,%y1"
2369 [(set_attr "type" "vecload")]) 3911 [(set_attr "type" "vecload")])
2370 3912
2371 (define_insn "altivec_lvlxl" 3913 (define_insn "altivec_lvlxl"
2372 [(set (match_operand:V16QI 0 "register_operand" "=v") 3914 [(set (match_operand:V16QI 0 "register_operand" "=v")
2373 (unspec:V16QI [(match_operand 1 "memory_operand" "Z")] 3915 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
2374 UNSPEC_LVLXL))] 3916 UNSPEC_LVLXL))]
2375 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL" 3917 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
2376 "lvlxl %0,%y1" 3918 "lvlxl %0,%y1"
2377 [(set_attr "type" "vecload")]) 3919 [(set_attr "type" "vecload")])
2378 3920
2379 (define_insn "altivec_lvrx" 3921 (define_insn "altivec_lvrx"
2380 [(set (match_operand:V16QI 0 "register_operand" "=v") 3922 [(set (match_operand:V16QI 0 "register_operand" "=v")
2381 (unspec:V16QI [(match_operand 1 "memory_operand" "Z")] 3923 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
2382 UNSPEC_LVRX))] 3924 UNSPEC_LVRX))]
2383 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL" 3925 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
2384 "lvrx %0,%y1" 3926 "lvrx %0,%y1"
2385 [(set_attr "type" "vecload")]) 3927 [(set_attr "type" "vecload")])
2386 3928
2387 (define_insn "altivec_lvrxl" 3929 (define_insn "altivec_lvrxl"
2388 [(set (match_operand:V16QI 0 "register_operand" "=v") 3930 [(set (match_operand:V16QI 0 "register_operand" "=v")
2389 (unspec:V16QI [(match_operand 1 "memory_operand" "Z")] 3931 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
2390 UNSPEC_LVRXL))] 3932 UNSPEC_LVRXL))]
2391 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL" 3933 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
2392 "lvrxl %0,%y1" 3934 "lvrxl %0,%y1"
2393 [(set_attr "type" "vecload")]) 3935 [(set_attr "type" "vecload")])
2394 3936
2395 (define_insn "altivec_stvlx" 3937 (define_insn "altivec_stvlx"
2396 [(parallel 3938 [(parallel
2397 [(set (match_operand:V4SI 0 "memory_operand" "=Z") 3939 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
2398 (match_operand:V4SI 1 "register_operand" "v")) 3940 (match_operand:V16QI 1 "register_operand" "v"))
2399 (unspec [(const_int 0)] UNSPEC_STVLX)])] 3941 (unspec [(const_int 0)] UNSPEC_STVLX)])]
2400 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL" 3942 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
2401 "stvlx %1,%y0" 3943 "stvlx %1,%y0"
2402 [(set_attr "type" "vecstore")]) 3944 [(set_attr "type" "vecstore")])
2403 3945
2404 (define_insn "altivec_stvlxl" 3946 (define_insn "altivec_stvlxl"
2405 [(parallel 3947 [(parallel
2406 [(set (match_operand:V4SI 0 "memory_operand" "=Z") 3948 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
2407 (match_operand:V4SI 1 "register_operand" "v")) 3949 (match_operand:V16QI 1 "register_operand" "v"))
2408 (unspec [(const_int 0)] UNSPEC_STVLXL)])] 3950 (unspec [(const_int 0)] UNSPEC_STVLXL)])]
2409 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL" 3951 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
2410 "stvlxl %1,%y0" 3952 "stvlxl %1,%y0"
2411 [(set_attr "type" "vecstore")]) 3953 [(set_attr "type" "vecstore")])
2412 3954
2413 (define_insn "altivec_stvrx" 3955 (define_insn "altivec_stvrx"
2414 [(parallel 3956 [(parallel
2415 [(set (match_operand:V4SI 0 "memory_operand" "=Z") 3957 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
2416 (match_operand:V4SI 1 "register_operand" "v")) 3958 (match_operand:V16QI 1 "register_operand" "v"))
2417 (unspec [(const_int 0)] UNSPEC_STVRX)])] 3959 (unspec [(const_int 0)] UNSPEC_STVRX)])]
2418 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL" 3960 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
2419 "stvrx %1,%y0" 3961 "stvrx %1,%y0"
2420 [(set_attr "type" "vecstore")]) 3962 [(set_attr "type" "vecstore")])
2421 3963
2422 (define_insn "altivec_stvrxl" 3964 (define_insn "altivec_stvrxl"
2423 [(parallel 3965 [(parallel
2424 [(set (match_operand:V4SI 0 "memory_operand" "=Z") 3966 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
2425 (match_operand:V4SI 1 "register_operand" "v")) 3967 (match_operand:V16QI 1 "register_operand" "v"))
2426 (unspec [(const_int 0)] UNSPEC_STVRXL)])] 3968 (unspec [(const_int 0)] UNSPEC_STVRXL)])]
2427 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL" 3969 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
2428 "stvrxl %1,%y0" 3970 "stvrxl %1,%y0"
2429 [(set_attr "type" "vecstore")]) 3971 [(set_attr "type" "vecstore")])
2430 3972
2431 (define_expand "vec_extract_evenv4si"
2432 [(set (match_operand:V4SI 0 "register_operand" "")
2433 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "")
2434 (match_operand:V4SI 2 "register_operand" "")]
2435 UNSPEC_EXTEVEN_V4SI))]
2436 "TARGET_ALTIVEC"
2437 "
2438 {
2439 rtx mask = gen_reg_rtx (V16QImode);
2440 rtvec v = rtvec_alloc (16);
2441
2442 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, 0);
2443 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, 1);
2444 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, 2);
2445 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, 3);
2446 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, 8);
2447 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, 9);
2448 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, 10);
2449 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, 11);
2450 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, 16);
2451 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, 17);
2452 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, 18);
2453 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, 19);
2454 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, 24);
2455 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, 25);
2456 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, 26);
2457 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, 27);
2458 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2459 emit_insn (gen_altivec_vperm_v4si (operands[0], operands[1], operands[2], mask));
2460
2461 DONE;
2462 }")
2463
2464 (define_expand "vec_extract_evenv4sf"
2465 [(set (match_operand:V4SF 0 "register_operand" "")
2466 (unspec:V8HI [(match_operand:V4SF 1 "register_operand" "")
2467 (match_operand:V4SF 2 "register_operand" "")]
2468 UNSPEC_EXTEVEN_V4SF))]
2469 "TARGET_ALTIVEC"
2470 "
2471 {
2472 rtx mask = gen_reg_rtx (V16QImode);
2473 rtvec v = rtvec_alloc (16);
2474
2475 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, 0);
2476 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, 1);
2477 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, 2);
2478 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, 3);
2479 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, 8);
2480 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, 9);
2481 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, 10);
2482 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, 11);
2483 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, 16);
2484 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, 17);
2485 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, 18);
2486 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, 19);
2487 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, 24);
2488 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, 25);
2489 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, 26);
2490 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, 27);
2491 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2492 emit_insn (gen_altivec_vperm_v4sf (operands[0], operands[1], operands[2], mask));
2493
2494 DONE;
2495 }")
2496
2497 (define_expand "vec_extract_evenv8hi"
2498 [(set (match_operand:V4SI 0 "register_operand" "")
2499 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "")
2500 (match_operand:V8HI 2 "register_operand" "")]
2501 UNSPEC_EXTEVEN_V8HI))]
2502 "TARGET_ALTIVEC"
2503 "
2504 {
2505 rtx mask = gen_reg_rtx (V16QImode);
2506 rtvec v = rtvec_alloc (16);
2507
2508 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, 0);
2509 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, 1);
2510 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, 4);
2511 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, 5);
2512 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, 8);
2513 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, 9);
2514 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, 12);
2515 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, 13);
2516 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, 16);
2517 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, 17);
2518 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, 20);
2519 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, 21);
2520 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, 24);
2521 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, 25);
2522 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, 28);
2523 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, 29);
2524 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2525 emit_insn (gen_altivec_vperm_v8hi (operands[0], operands[1], operands[2], mask));
2526
2527 DONE;
2528 }")
2529
2530 (define_expand "vec_extract_evenv16qi"
2531 [(set (match_operand:V4SI 0 "register_operand" "")
2532 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "")
2533 (match_operand:V16QI 2 "register_operand" "")]
2534 UNSPEC_EXTEVEN_V16QI))]
2535 "TARGET_ALTIVEC"
2536 "
2537 {
2538 rtx mask = gen_reg_rtx (V16QImode);
2539 rtvec v = rtvec_alloc (16);
2540
2541 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, 0);
2542 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, 2);
2543 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, 4);
2544 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, 6);
2545 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, 8);
2546 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, 10);
2547 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, 12);
2548 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, 14);
2549 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, 16);
2550 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, 18);
2551 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, 20);
2552 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, 22);
2553 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, 24);
2554 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, 26);
2555 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, 28);
2556 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, 30);
2557 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2558 emit_insn (gen_altivec_vperm_v16qi (operands[0], operands[1], operands[2], mask));
2559
2560 DONE;
2561 }")
2562
2563 (define_expand "vec_extract_oddv4si"
2564 [(set (match_operand:V4SI 0 "register_operand" "")
2565 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "")
2566 (match_operand:V4SI 2 "register_operand" "")]
2567 UNSPEC_EXTODD_V4SI))]
2568 "TARGET_ALTIVEC"
2569 "
2570 {
2571 rtx mask = gen_reg_rtx (V16QImode);
2572 rtvec v = rtvec_alloc (16);
2573
2574 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, 4);
2575 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, 5);
2576 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, 6);
2577 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, 7);
2578 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, 12);
2579 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, 13);
2580 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, 14);
2581 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, 15);
2582 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, 20);
2583 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, 21);
2584 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, 22);
2585 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, 23);
2586 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, 28);
2587 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, 29);
2588 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, 30);
2589 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, 31);
2590 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2591 emit_insn (gen_altivec_vperm_v4si (operands[0], operands[1], operands[2], mask));
2592
2593 DONE;
2594 }")
2595
2596 (define_expand "vec_extract_oddv4sf"
2597 [(set (match_operand:V4SF 0 "register_operand" "")
2598 (unspec:V8HI [(match_operand:V4SF 1 "register_operand" "")
2599 (match_operand:V4SF 2 "register_operand" "")]
2600 UNSPEC_EXTODD_V4SF))]
2601 "TARGET_ALTIVEC"
2602 "
2603 {
2604 rtx mask = gen_reg_rtx (V16QImode);
2605 rtvec v = rtvec_alloc (16);
2606
2607 RTVEC_ELT (v, 0) = gen_rtx_CONST_INT (QImode, 4);
2608 RTVEC_ELT (v, 1) = gen_rtx_CONST_INT (QImode, 5);
2609 RTVEC_ELT (v, 2) = gen_rtx_CONST_INT (QImode, 6);
2610 RTVEC_ELT (v, 3) = gen_rtx_CONST_INT (QImode, 7);
2611 RTVEC_ELT (v, 4) = gen_rtx_CONST_INT (QImode, 12);
2612 RTVEC_ELT (v, 5) = gen_rtx_CONST_INT (QImode, 13);
2613 RTVEC_ELT (v, 6) = gen_rtx_CONST_INT (QImode, 14);
2614 RTVEC_ELT (v, 7) = gen_rtx_CONST_INT (QImode, 15);
2615 RTVEC_ELT (v, 8) = gen_rtx_CONST_INT (QImode, 20);
2616 RTVEC_ELT (v, 9) = gen_rtx_CONST_INT (QImode, 21);
2617 RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, 22);
2618 RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, 23);
2619 RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, 28);
2620 RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, 29);
2621 RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, 30);
2622 RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, 31);
2623 emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2624 emit_insn (gen_altivec_vperm_v4sf (operands[0], operands[1], operands[2], mask));
2625
2626 DONE;
2627 }")
2628
2629 (define_insn "vpkuhum_nomode"
2630 [(set (match_operand:V16QI 0 "register_operand" "=v")
2631 (unspec:V16QI [(match_operand 1 "register_operand" "v")
2632 (match_operand 2 "register_operand" "v")]
2633 UNSPEC_VPKUHUM))]
2634 "TARGET_ALTIVEC"
2635 "vpkuhum %0,%1,%2"
2636 [(set_attr "type" "vecperm")])
2637
2638 (define_insn "vpkuwum_nomode"
2639 [(set (match_operand:V8HI 0 "register_operand" "=v")
2640 (unspec:V8HI [(match_operand 1 "register_operand" "v")
2641 (match_operand 2 "register_operand" "v")]
2642 UNSPEC_VPKUWUM))]
2643 "TARGET_ALTIVEC"
2644 "vpkuwum %0,%1,%2"
2645 [(set_attr "type" "vecperm")])
2646
2647 (define_expand "vec_extract_oddv8hi"
2648 [(set (match_operand:V8HI 0 "register_operand" "")
2649 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "")
2650 (match_operand:V8HI 2 "register_operand" "")]
2651 UNSPEC_EXTODD_V8HI))]
2652 "TARGET_ALTIVEC"
2653 "
2654 {
2655 emit_insn (gen_vpkuwum_nomode (operands[0], operands[1], operands[2]));
2656 DONE;
2657 }")
2658
2659 (define_expand "vec_extract_oddv16qi"
2660 [(set (match_operand:V16QI 0 "register_operand" "")
2661 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "")
2662 (match_operand:V16QI 2 "register_operand" "")]
2663 UNSPEC_EXTODD_V16QI))]
2664 "TARGET_ALTIVEC"
2665 "
2666 {
2667 emit_insn (gen_vpkuhum_nomode (operands[0], operands[1], operands[2]));
2668 DONE;
2669 }")
2670
2671 (define_expand "vec_interleave_high<mode>"
2672 [(set (match_operand:VI 0 "register_operand" "")
2673 (unspec:VI [(match_operand:VI 1 "register_operand" "")
2674 (match_operand:VI 2 "register_operand" "")]
2675 UNSPEC_INTERHI))]
2676 "TARGET_ALTIVEC"
2677 "
2678 {
2679 emit_insn (gen_altivec_vmrgh<VI_char> (operands[0], operands[1], operands[2]));
2680 DONE;
2681 }")
2682
2683 (define_expand "vec_interleave_low<mode>"
2684 [(set (match_operand:VI 0 "register_operand" "")
2685 (unspec:VI [(match_operand:VI 1 "register_operand" "")
2686 (match_operand:VI 2 "register_operand" "")]
2687 UNSPEC_INTERLO))]
2688 "TARGET_ALTIVEC"
2689 "
2690 {
2691 emit_insn (gen_altivec_vmrgl<VI_char> (operands[0], operands[1], operands[2]));
2692 DONE;
2693 }")
2694
2695 (define_expand "vec_unpacks_float_hi_v8hi" 3973 (define_expand "vec_unpacks_float_hi_v8hi"
2696 [(set (match_operand:V4SF 0 "register_operand" "") 3974 [(set (match_operand:V4SF 0 "register_operand" "")
2697 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")] 3975 (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
2698 UNSPEC_VUPKHS_V4SF))] 3976 UNSPEC_VUPKHS_V4SF))]
2699 "TARGET_ALTIVEC" 3977 "TARGET_ALTIVEC"
2745 4023
2746 emit_insn (gen_vec_unpacku_lo_v8hi (tmp, operands[1])); 4024 emit_insn (gen_vec_unpacku_lo_v8hi (tmp, operands[1]));
2747 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx)); 4025 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
2748 DONE; 4026 DONE;
2749 }") 4027 }")
4028
4029
4030 ;; Power8/power9 vector instructions encoded as Altivec instructions
4031
4032 ;; Vector count leading zeros
4033 (define_insn "*p8v_clz<mode>2"
4034 [(set (match_operand:VI2 0 "register_operand" "=v")
4035 (clz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
4036 "TARGET_P8_VECTOR"
4037 "vclz<wd> %0,%1"
4038 [(set_attr "length" "4")
4039 (set_attr "type" "vecsimple")])
4040
4041 ;; Vector absolute difference unsigned
4042 (define_expand "vadu<mode>3"
4043 [(set (match_operand:VI 0 "register_operand")
4044 (unspec:VI [(match_operand:VI 1 "register_operand")
4045 (match_operand:VI 2 "register_operand")]
4046 UNSPEC_VADU))]
4047 "TARGET_P9_VECTOR")
4048
4049 ;; Vector absolute difference unsigned
4050 (define_insn "*p9_vadu<mode>3"
4051 [(set (match_operand:VI 0 "register_operand" "=v")
4052 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
4053 (match_operand:VI 2 "register_operand" "v")]
4054 UNSPEC_VADU))]
4055 "TARGET_P9_VECTOR"
4056 "vabsdu<wd> %0,%1,%2"
4057 [(set_attr "type" "vecsimple")])
4058
4059 ;; Vector count trailing zeros
4060 (define_insn "*p9v_ctz<mode>2"
4061 [(set (match_operand:VI2 0 "register_operand" "=v")
4062 (ctz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
4063 "TARGET_P9_VECTOR"
4064 "vctz<wd> %0,%1"
4065 [(set_attr "length" "4")
4066 (set_attr "type" "vecsimple")])
4067
4068 ;; Vector population count
4069 (define_insn "*p8v_popcount<mode>2"
4070 [(set (match_operand:VI2 0 "register_operand" "=v")
4071 (popcount:VI2 (match_operand:VI2 1 "register_operand" "v")))]
4072 "TARGET_P8_VECTOR"
4073 "vpopcnt<wd> %0,%1"
4074 [(set_attr "length" "4")
4075 (set_attr "type" "vecsimple")])
4076
4077 ;; Vector parity
4078 (define_insn "*p9v_parity<mode>2"
4079 [(set (match_operand:VParity 0 "register_operand" "=v")
4080 (parity:VParity (match_operand:VParity 1 "register_operand" "v")))]
4081 "TARGET_P9_VECTOR"
4082 "vprtyb<wd> %0,%1"
4083 [(set_attr "length" "4")
4084 (set_attr "type" "vecsimple")])
4085
4086 ;; Vector Gather Bits by Bytes by Doubleword
4087 (define_insn "p8v_vgbbd"
4088 [(set (match_operand:V16QI 0 "register_operand" "=v")
4089 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")]
4090 UNSPEC_VGBBD))]
4091 "TARGET_P8_VECTOR"
4092 "vgbbd %0,%1"
4093 [(set_attr "length" "4")
4094 (set_attr "type" "vecsimple")])
4095
4096
4097 ;; 128-bit binary integer arithmetic
4098 ;; We have a special container type (V1TImode) to allow operations using the
4099 ;; ISA 2.07 128-bit binary support to target the VMX/altivec registers without
4100 ;; having to worry about the register allocator deciding GPRs are better.
4101
4102 (define_insn "altivec_vadduqm"
4103 [(set (match_operand:V1TI 0 "register_operand" "=v")
4104 (plus:V1TI (match_operand:V1TI 1 "register_operand" "v")
4105 (match_operand:V1TI 2 "register_operand" "v")))]
4106 "TARGET_VADDUQM"
4107 "vadduqm %0,%1,%2"
4108 [(set_attr "length" "4")
4109 (set_attr "type" "vecsimple")])
4110
4111 (define_insn "altivec_vaddcuq"
4112 [(set (match_operand:V1TI 0 "register_operand" "=v")
4113 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4114 (match_operand:V1TI 2 "register_operand" "v")]
4115 UNSPEC_VADDCUQ))]
4116 "TARGET_VADDUQM"
4117 "vaddcuq %0,%1,%2"
4118 [(set_attr "length" "4")
4119 (set_attr "type" "vecsimple")])
4120
4121 (define_insn "altivec_vsubuqm"
4122 [(set (match_operand:V1TI 0 "register_operand" "=v")
4123 (minus:V1TI (match_operand:V1TI 1 "register_operand" "v")
4124 (match_operand:V1TI 2 "register_operand" "v")))]
4125 "TARGET_VADDUQM"
4126 "vsubuqm %0,%1,%2"
4127 [(set_attr "length" "4")
4128 (set_attr "type" "vecsimple")])
4129
4130 (define_insn "altivec_vsubcuq"
4131 [(set (match_operand:V1TI 0 "register_operand" "=v")
4132 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4133 (match_operand:V1TI 2 "register_operand" "v")]
4134 UNSPEC_VSUBCUQ))]
4135 "TARGET_VADDUQM"
4136 "vsubcuq %0,%1,%2"
4137 [(set_attr "length" "4")
4138 (set_attr "type" "vecsimple")])
4139
4140 (define_insn "altivec_vaddeuqm"
4141 [(set (match_operand:V1TI 0 "register_operand" "=v")
4142 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4143 (match_operand:V1TI 2 "register_operand" "v")
4144 (match_operand:V1TI 3 "register_operand" "v")]
4145 UNSPEC_VADDEUQM))]
4146 "TARGET_VADDUQM"
4147 "vaddeuqm %0,%1,%2,%3"
4148 [(set_attr "length" "4")
4149 (set_attr "type" "vecsimple")])
4150
4151 (define_insn "altivec_vaddecuq"
4152 [(set (match_operand:V1TI 0 "register_operand" "=v")
4153 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4154 (match_operand:V1TI 2 "register_operand" "v")
4155 (match_operand:V1TI 3 "register_operand" "v")]
4156 UNSPEC_VADDECUQ))]
4157 "TARGET_VADDUQM"
4158 "vaddecuq %0,%1,%2,%3"
4159 [(set_attr "length" "4")
4160 (set_attr "type" "vecsimple")])
4161
4162 (define_insn "altivec_vsubeuqm"
4163 [(set (match_operand:V1TI 0 "register_operand" "=v")
4164 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4165 (match_operand:V1TI 2 "register_operand" "v")
4166 (match_operand:V1TI 3 "register_operand" "v")]
4167 UNSPEC_VSUBEUQM))]
4168 "TARGET_VADDUQM"
4169 "vsubeuqm %0,%1,%2,%3"
4170 [(set_attr "length" "4")
4171 (set_attr "type" "vecsimple")])
4172
4173 (define_insn "altivec_vsubecuq"
4174 [(set (match_operand:V1TI 0 "register_operand" "=v")
4175 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4176 (match_operand:V1TI 2 "register_operand" "v")
4177 (match_operand:V1TI 3 "register_operand" "v")]
4178 UNSPEC_VSUBECUQ))]
4179 "TARGET_VADDUQM"
4180 "vsubecuq %0,%1,%2,%3"
4181 [(set_attr "length" "4")
4182 (set_attr "type" "vecsimple")])
4183
4184 ;; We use V2DI as the output type to simplify converting the permute
4185 ;; bits into an integer
4186 (define_insn "altivec_vbpermq"
4187 [(set (match_operand:V2DI 0 "register_operand" "=v")
4188 (unspec:V2DI [(match_operand:V16QI 1 "register_operand" "v")
4189 (match_operand:V16QI 2 "register_operand" "v")]
4190 UNSPEC_VBPERMQ))]
4191 "TARGET_P8_VECTOR"
4192 "vbpermq %0,%1,%2"
4193 [(set_attr "type" "vecperm")])
4194
4195 ; One of the vector API interfaces requires returning vector unsigned char.
4196 (define_insn "altivec_vbpermq2"
4197 [(set (match_operand:V16QI 0 "register_operand" "=v")
4198 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
4199 (match_operand:V16QI 2 "register_operand" "v")]
4200 UNSPEC_VBPERMQ))]
4201 "TARGET_P8_VECTOR"
4202 "vbpermq %0,%1,%2"
4203 [(set_attr "type" "vecperm")])
4204
4205 (define_insn "altivec_vbpermd"
4206 [(set (match_operand:V2DI 0 "register_operand" "=v")
4207 (unspec:V2DI [(match_operand:V2DI 1 "register_operand" "v")
4208 (match_operand:V16QI 2 "register_operand" "v")]
4209 UNSPEC_VBPERMD))]
4210 "TARGET_P9_VECTOR"
4211 "vbpermd %0,%1,%2"
4212 [(set_attr "type" "vecsimple")])
4213
4214 ;; Decimal Integer operations
4215 (define_int_iterator UNSPEC_BCD_ADD_SUB [UNSPEC_BCDADD UNSPEC_BCDSUB])
4216
4217 (define_int_attr bcd_add_sub [(UNSPEC_BCDADD "add")
4218 (UNSPEC_BCDSUB "sub")])
4219
4220 (define_code_iterator BCD_TEST [eq lt gt unordered])
4221
4222 (define_insn "bcd<bcd_add_sub>"
4223 [(set (match_operand:V1TI 0 "gpc_reg_operand" "=v")
4224 (unspec:V1TI [(match_operand:V1TI 1 "gpc_reg_operand" "v")
4225 (match_operand:V1TI 2 "gpc_reg_operand" "v")
4226 (match_operand:QI 3 "const_0_to_1_operand" "n")]
4227 UNSPEC_BCD_ADD_SUB))
4228 (clobber (reg:CCFP CR6_REGNO))]
4229 "TARGET_P8_VECTOR"
4230 "bcd<bcd_add_sub>. %0,%1,%2,%3"
4231 [(set_attr "length" "4")
4232 (set_attr "type" "vecsimple")])
4233
4234 ;; Use a floating point type (V2DFmode) for the compare to set CR6 so that we
4235 ;; can use the unordered test for BCD nans and add/subtracts that overflow. An
4236 ;; UNORDERED test on an integer type (like V1TImode) is not defined. The type
4237 ;; probably should be one that can go in the VMX (Altivec) registers, so we
4238 ;; can't use DDmode or DFmode.
4239 (define_insn "*bcd<bcd_add_sub>_test"
4240 [(set (reg:CCFP CR6_REGNO)
4241 (compare:CCFP
4242 (unspec:V2DF [(match_operand:V1TI 1 "register_operand" "v")
4243 (match_operand:V1TI 2 "register_operand" "v")
4244 (match_operand:QI 3 "const_0_to_1_operand" "i")]
4245 UNSPEC_BCD_ADD_SUB)
4246 (match_operand:V2DF 4 "zero_constant" "j")))
4247 (clobber (match_scratch:V1TI 0 "=v"))]
4248 "TARGET_P8_VECTOR"
4249 "bcd<bcd_add_sub>. %0,%1,%2,%3"
4250 [(set_attr "length" "4")
4251 (set_attr "type" "vecsimple")])
4252
4253 (define_insn "*bcd<bcd_add_sub>_test2"
4254 [(set (match_operand:V1TI 0 "register_operand" "=v")
4255 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4256 (match_operand:V1TI 2 "register_operand" "v")
4257 (match_operand:QI 3 "const_0_to_1_operand" "i")]
4258 UNSPEC_BCD_ADD_SUB))
4259 (set (reg:CCFP CR6_REGNO)
4260 (compare:CCFP
4261 (unspec:V2DF [(match_dup 1)
4262 (match_dup 2)
4263 (match_dup 3)]
4264 UNSPEC_BCD_ADD_SUB)
4265 (match_operand:V2DF 4 "zero_constant" "j")))]
4266 "TARGET_P8_VECTOR"
4267 "bcd<bcd_add_sub>. %0,%1,%2,%3"
4268 [(set_attr "length" "4")
4269 (set_attr "type" "vecsimple")])
4270
4271 (define_insn "darn_32"
4272 [(set (match_operand:SI 0 "register_operand" "=r")
4273 (unspec:SI [(const_int 0)] UNSPEC_DARN_32))]
4274 "TARGET_P9_MISC"
4275 "darn %0,0"
4276 [(set_attr "type" "integer")])
4277
4278 (define_insn "darn_raw"
4279 [(set (match_operand:DI 0 "register_operand" "=r")
4280 (unspec:DI [(const_int 0)] UNSPEC_DARN_RAW))]
4281 "TARGET_P9_MISC && TARGET_64BIT"
4282 "darn %0,2"
4283 [(set_attr "type" "integer")])
4284
4285 (define_insn "darn"
4286 [(set (match_operand:DI 0 "register_operand" "=r")
4287 (unspec:DI [(const_int 0)] UNSPEC_DARN))]
4288 "TARGET_P9_MISC && TARGET_64BIT"
4289 "darn %0,1"
4290 [(set_attr "type" "integer")])
4291
4292 ;; Test byte within range.
4293 ;;
4294 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
4295 ;; represents a byte whose value is ignored in this context and
4296 ;; vv, the least significant byte, holds the byte value that is to
4297 ;; be tested for membership within the range specified by operand 2.
4298 ;; The bytes of operand 2 are organized as xx:xx:hi:lo.
4299 ;;
4300 ;; Return in target register operand 0 a value of 1 if lo <= vv and
4301 ;; vv <= hi. Otherwise, set register operand 0 to 0.
4302 ;;
4303 ;; Though the instructions to which this expansion maps operate on
4304 ;; 64-bit registers, the current implementation only operates on
4305 ;; SI-mode operands as the high-order bits provide no information
4306 ;; that is not already available in the low-order bits. To avoid the
4307 ;; costs of data widening operations, future enhancements might allow
4308 ;; DI mode for operand 0 and/or might allow operand 1 to be QI mode.
4309 (define_expand "cmprb"
4310 [(set (match_dup 3)
4311 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
4312 (match_operand:SI 2 "gpc_reg_operand" "r")]
4313 UNSPEC_CMPRB))
4314 (set (match_operand:SI 0 "gpc_reg_operand" "=r")
4315 (if_then_else:SI (lt (match_dup 3)
4316 (const_int 0))
4317 (const_int -1)
4318 (if_then_else (gt (match_dup 3)
4319 (const_int 0))
4320 (const_int 1)
4321 (const_int 0))))]
4322 "TARGET_P9_MISC"
4323 {
4324 operands[3] = gen_reg_rtx (CCmode);
4325 })
4326
4327 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
4328 ;; represents a byte whose value is ignored in this context and
4329 ;; vv, the least significant byte, holds the byte value that is to
4330 ;; be tested for membership within the range specified by operand 2.
4331 ;; The bytes of operand 2 are organized as xx:xx:hi:lo.
4332 ;;
4333 ;; Set bit 1 (the GT bit, 0x4) of CR register operand 0 to 1 if
4334 ;; lo <= vv and vv <= hi. Otherwise, set the GT bit to 0. The other
4335 ;; 3 bits of the target CR register are all set to 0.
4336 (define_insn "*cmprb_internal"
4337 [(set (match_operand:CC 0 "cc_reg_operand" "=y")
4338 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
4339 (match_operand:SI 2 "gpc_reg_operand" "r")]
4340 UNSPEC_CMPRB))]
4341 "TARGET_P9_MISC"
4342 "cmprb %0,0,%1,%2"
4343 [(set_attr "type" "logical")])
4344
4345 ;; Set operand 0 register to -1 if the LT bit (0x8) of condition
4346 ;; register operand 1 is on. Otherwise, set operand 0 register to 1
4347 ;; if the GT bit (0x4) of condition register operand 1 is on.
4348 ;; Otherwise, set operand 0 to 0. Note that the result stored into
4349 ;; register operand 0 is non-zero iff either the LT or GT bits are on
4350 ;; within condition register operand 1.
4351 (define_insn "setb_signed"
4352 [(set (match_operand:SI 0 "gpc_reg_operand" "=r")
4353 (if_then_else:SI (lt (match_operand:CC 1 "cc_reg_operand" "y")
4354 (const_int 0))
4355 (const_int -1)
4356 (if_then_else (gt (match_dup 1)
4357 (const_int 0))
4358 (const_int 1)
4359 (const_int 0))))]
4360 "TARGET_P9_MISC"
4361 "setb %0,%1"
4362 [(set_attr "type" "logical")])
4363
4364 (define_insn "setb_unsigned"
4365 [(set (match_operand:SI 0 "gpc_reg_operand" "=r")
4366 (if_then_else:SI (ltu (match_operand:CCUNS 1 "cc_reg_operand" "y")
4367 (const_int 0))
4368 (const_int -1)
4369 (if_then_else (gtu (match_dup 1)
4370 (const_int 0))
4371 (const_int 1)
4372 (const_int 0))))]
4373 "TARGET_P9_MISC"
4374 "setb %0,%1"
4375 [(set_attr "type" "logical")])
4376
4377 ;; Test byte within two ranges.
4378 ;;
4379 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
4380 ;; represents a byte whose value is ignored in this context and
4381 ;; vv, the least significant byte, holds the byte value that is to
4382 ;; be tested for membership within the range specified by operand 2.
4383 ;; The bytes of operand 2 are organized as hi_1:lo_1:hi_2:lo_2.
4384 ;;
4385 ;; Return in target register operand 0 a value of 1 if (lo_1 <= vv and
4386 ;; vv <= hi_1) or if (lo_2 <= vv and vv <= hi_2). Otherwise, set register
4387 ;; operand 0 to 0.
4388 ;;
4389 ;; Though the instructions to which this expansion maps operate on
4390 ;; 64-bit registers, the current implementation only operates on
4391 ;; SI-mode operands as the high-order bits provide no information
4392 ;; that is not already available in the low-order bits. To avoid the
4393 ;; costs of data widening operations, future enhancements might allow
4394 ;; DI mode for operand 0 and/or might allow operand 1 to be QI mode.
4395 (define_expand "cmprb2"
4396 [(set (match_dup 3)
4397 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
4398 (match_operand:SI 2 "gpc_reg_operand" "r")]
4399 UNSPEC_CMPRB2))
4400 (set (match_operand:SI 0 "gpc_reg_operand" "=r")
4401 (if_then_else:SI (lt (match_dup 3)
4402 (const_int 0))
4403 (const_int -1)
4404 (if_then_else (gt (match_dup 3)
4405 (const_int 0))
4406 (const_int 1)
4407 (const_int 0))))]
4408 "TARGET_P9_MISC"
4409 {
4410 operands[3] = gen_reg_rtx (CCmode);
4411 })
4412
4413 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
4414 ;; represents a byte whose value is ignored in this context and
4415 ;; vv, the least significant byte, holds the byte value that is to
4416 ;; be tested for membership within the ranges specified by operand 2.
4417 ;; The bytes of operand 2 are organized as hi_1:lo_1:hi_2:lo_2.
4418 ;;
4419 ;; Set bit 1 (the GT bit, 0x4) of CR register operand 0 to 1 if
4420 ;; (lo_1 <= vv and vv <= hi_1) or if (lo_2 <= vv and vv <= hi_2).
4421 ;; Otherwise, set the GT bit to 0. The other 3 bits of the target
4422 ;; CR register are all set to 0.
4423 (define_insn "*cmprb2_internal"
4424 [(set (match_operand:CC 0 "cc_reg_operand" "=y")
4425 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
4426 (match_operand:SI 2 "gpc_reg_operand" "r")]
4427 UNSPEC_CMPRB2))]
4428 "TARGET_P9_MISC"
4429 "cmprb %0,1,%1,%2"
4430 [(set_attr "type" "logical")])
4431
4432 ;; Test byte membership within set of 8 bytes.
4433 ;;
4434 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
4435 ;; represents a byte whose value is ignored in this context and
4436 ;; vv, the least significant byte, holds the byte value that is to
4437 ;; be tested for membership within the set specified by operand 2.
4438 ;; The bytes of operand 2 are organized as e0:e1:e2:e3:e4:e5:e6:e7.
4439 ;;
4440 ;; Return in target register operand 0 a value of 1 if vv equals one
4441 ;; of the values e0, e1, e2, e3, e4, e5, e6, or e7. Otherwise, set
4442 ;; register operand 0 to 0. Note that the 8 byte values held within
4443 ;; operand 2 need not be unique.
4444 ;;
4445 ;; Though the instructions to which this expansion maps operate on
4446 ;; 64-bit registers, the current implementation requires that operands
4447 ;; 0 and 1 have mode SI as the high-order bits provide no information
4448 ;; that is not already available in the low-order bits. To avoid the
4449 ;; costs of data widening operations, future enhancements might allow
4450 ;; DI mode for operand 0 and/or might allow operand 1 to be QI mode.
4451 (define_expand "cmpeqb"
4452 [(set (match_dup 3)
4453 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
4454 (match_operand:DI 2 "gpc_reg_operand" "r")]
4455 UNSPEC_CMPEQB))
4456 (set (match_operand:SI 0 "gpc_reg_operand" "=r")
4457 (if_then_else:SI (lt (match_dup 3)
4458 (const_int 0))
4459 (const_int -1)
4460 (if_then_else (gt (match_dup 3)
4461 (const_int 0))
4462 (const_int 1)
4463 (const_int 0))))]
4464 "TARGET_P9_MISC && TARGET_64BIT"
4465 {
4466 operands[3] = gen_reg_rtx (CCmode);
4467 })
4468
4469 ;; The bytes of operand 1 are organized as xx:xx:xx:vv, where xx
4470 ;; represents a byte whose value is ignored in this context and
4471 ;; vv, the least significant byte, holds the byte value that is to
4472 ;; be tested for membership within the set specified by operand 2.
4473 ;; The bytes of operand 2 are organized as e0:e1:e2:e3:e4:e5:e6:e7.
4474 ;;
4475 ;; Set bit 1 (the GT bit, 0x4) of CR register operand 0 to 1 if vv
4476 ;; equals one of the values e0, e1, e2, e3, e4, e5, e6, or e7. Otherwise,
4477 ;; set the GT bit to zero. The other 3 bits of the target CR register
4478 ;; are all set to 0.
4479 (define_insn "*cmpeqb_internal"
4480 [(set (match_operand:CC 0 "cc_reg_operand" "=y")
4481 (unspec:CC [(match_operand:SI 1 "gpc_reg_operand" "r")
4482 (match_operand:DI 2 "gpc_reg_operand" "r")]
4483 UNSPEC_CMPEQB))]
4484 "TARGET_P9_MISC && TARGET_64BIT"
4485 "cmpeqb %0,%1,%2"
4486 [(set_attr "type" "logical")])
4487
4488 (define_expand "bcd<bcd_add_sub>_<code>"
4489 [(parallel [(set (reg:CCFP CR6_REGNO)
4490 (compare:CCFP
4491 (unspec:V2DF [(match_operand:V1TI 1 "register_operand" "")
4492 (match_operand:V1TI 2 "register_operand" "")
4493 (match_operand:QI 3 "const_0_to_1_operand" "")]
4494 UNSPEC_BCD_ADD_SUB)
4495 (match_dup 4)))
4496 (clobber (match_scratch:V1TI 5 ""))])
4497 (set (match_operand:SI 0 "register_operand" "")
4498 (BCD_TEST:SI (reg:CCFP CR6_REGNO)
4499 (const_int 0)))]
4500 "TARGET_P8_VECTOR"
4501 {
4502 operands[4] = CONST0_RTX (V2DFmode);
4503 })
4504
4505 ;; Peephole2 pattern to combine a bcdadd/bcdsub that calculates the value and
4506 ;; the bcdadd/bcdsub that tests the value. The combiner won't work since
4507 ;; CR6 is a hard coded register. Unfortunately, all of the Altivec predicate
4508 ;; support is hard coded to use the fixed register CR6 instead of creating
4509 ;; a register class for CR6.
4510
4511 (define_peephole2
4512 [(parallel [(set (match_operand:V1TI 0 "register_operand" "")
4513 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "")
4514 (match_operand:V1TI 2 "register_operand" "")
4515 (match_operand:QI 3 "const_0_to_1_operand" "")]
4516 UNSPEC_BCD_ADD_SUB))
4517 (clobber (reg:CCFP CR6_REGNO))])
4518 (parallel [(set (reg:CCFP CR6_REGNO)
4519 (compare:CCFP
4520 (unspec:V2DF [(match_dup 1)
4521 (match_dup 2)
4522 (match_dup 3)]
4523 UNSPEC_BCD_ADD_SUB)
4524 (match_operand:V2DF 4 "zero_constant" "")))
4525 (clobber (match_operand:V1TI 5 "register_operand" ""))])]
4526 "TARGET_P8_VECTOR"
4527 [(parallel [(set (match_dup 0)
4528 (unspec:V1TI [(match_dup 1)
4529 (match_dup 2)
4530 (match_dup 3)]
4531 UNSPEC_BCD_ADD_SUB))
4532 (set (reg:CCFP CR6_REGNO)
4533 (compare:CCFP
4534 (unspec:V2DF [(match_dup 1)
4535 (match_dup 2)
4536 (match_dup 3)]
4537 UNSPEC_BCD_ADD_SUB)
4538 (match_dup 4)))])])