152
|
1 /* { dg-do compile } */
|
|
2 /* { dg-skip-if "Require optimisation to compile DCE tests" { *-*-* } { "-O0" "-mfloat-abi=softfp" } { "" } } */
|
|
3 /* { dg-require-effective-target arm_v8_1m_main_cde_mve_fp_ok } */
|
|
4 /* { dg-add-options arm_v8_1m_main_cde_mve_fp } */
|
|
5 /* We use -ffast-math so that the addition of 0.0 to a value is assumed to not
|
|
6 change the value. This means the tests for float types can use the same
|
|
7 trick of adding to a value initialised to zero to check whether the RTL
|
|
8 patterns correctly mark that the incoming value is not used. */
|
|
9 /* { dg-additional-options "-ffast-math" } */
|
|
10 /* { dg-final { check-function-bodies "**" "" } } */
|
|
11
|
|
12 #include "cde-mve-tests.c"
|
|
13
|
|
14 /* NOTE:
|
|
15 We avoid matching the functions returning a __builtin_neon_ti value since
|
|
16 there are variations between processors that make matching the whole
|
|
17 function difficult.
|
|
18 Since moving a TImode value into an MVE 'Q' register takes a few
|
|
19 temporaries, this leaves many instructions which can end up being
|
|
20 scheduled in different ways. Matching the ways this ends up getting
|
|
21 scheduled and restructured is awkward, and the extra tests for this one
|
|
22 data type don't seem to be worth the confusing testcases. */
|
|
23
|
|
24 /*
|
|
25 ** test_cde_vcx1q_u8float16x8_tintint:
|
|
26 ** vcx1 p0, q0, #33
|
|
27 ** bx lr
|
|
28 */
|
|
29 /*
|
|
30 ** test_cde_vcx1q_u8float32x4_tintint:
|
|
31 ** vcx1 p0, q0, #33
|
|
32 ** bx lr
|
|
33 */
|
|
34 /*
|
|
35 ** test_cde_vcx1q_u8uint8x16_tintint:
|
|
36 ** vcx1 p0, q0, #33
|
|
37 ** bx lr
|
|
38 */
|
|
39 /*
|
|
40 ** test_cde_vcx1q_u8uint16x8_tintint:
|
|
41 ** vcx1 p0, q0, #33
|
|
42 ** bx lr
|
|
43 */
|
|
44 /*
|
|
45 ** test_cde_vcx1q_u8uint32x4_tintint:
|
|
46 ** vcx1 p0, q0, #33
|
|
47 ** bx lr
|
|
48 */
|
|
49 /*
|
|
50 ** test_cde_vcx1q_u8uint64x2_tintint:
|
|
51 ** vcx1 p0, q0, #33
|
|
52 ** bx lr
|
|
53 */
|
|
54 /*
|
|
55 ** test_cde_vcx1q_u8int8x16_tintint:
|
|
56 ** vcx1 p0, q0, #33
|
|
57 ** bx lr
|
|
58 */
|
|
59 /*
|
|
60 ** test_cde_vcx1q_u8int16x8_tintint:
|
|
61 ** vcx1 p0, q0, #33
|
|
62 ** bx lr
|
|
63 */
|
|
64 /*
|
|
65 ** test_cde_vcx1q_u8int32x4_tintint:
|
|
66 ** vcx1 p0, q0, #33
|
|
67 ** bx lr
|
|
68 */
|
|
69 /*
|
|
70 ** test_cde_vcx1q_u8int64x2_tintint:
|
|
71 ** vcx1 p0, q0, #33
|
|
72 ** bx lr
|
|
73 */
|
|
74 /*
|
|
75 ** test_cde_vcx1qafloat16x8_tintint:
|
|
76 ** vldr\.64 d0, \.L([0-9]*)
|
|
77 ** vldr\.64 d1, \.L\1\+8
|
|
78 ** vcx1a p0, q0, #33
|
|
79 ** bx lr
|
|
80 */
|
|
81 /*
|
|
82 ** test_cde_vcx1qafloat32x4_tintint:
|
|
83 ** vldr\.64 d0, \.L([0-9]*)
|
|
84 ** vldr\.64 d1, \.L\1\+8
|
|
85 ** vcx1a p0, q0, #33
|
|
86 ** bx lr
|
|
87 */
|
|
88 /*
|
|
89 ** test_cde_vcx1qauint8x16_tintint:
|
|
90 ** vldr\.64 d0, \.L([0-9]*)
|
|
91 ** vldr\.64 d1, \.L\1\+8
|
|
92 ** vcx1a p0, q0, #33
|
|
93 ** bx lr
|
|
94 */
|
|
95 /*
|
|
96 ** test_cde_vcx1qauint16x8_tintint:
|
|
97 ** vldr\.64 d0, \.L([0-9]*)
|
|
98 ** vldr\.64 d1, \.L\1\+8
|
|
99 ** vcx1a p0, q0, #33
|
|
100 ** bx lr
|
|
101 */
|
|
102 /*
|
|
103 ** test_cde_vcx1qauint32x4_tintint:
|
|
104 ** vldr\.64 d0, \.L([0-9]*)
|
|
105 ** vldr\.64 d1, \.L\1\+8
|
|
106 ** vcx1a p0, q0, #33
|
|
107 ** bx lr
|
|
108 */
|
|
109 /*
|
|
110 ** test_cde_vcx1qauint64x2_tintint:
|
|
111 ** vldr\.64 d0, \.L([0-9]*)
|
|
112 ** vldr\.64 d1, \.L\1\+8
|
|
113 ** vcx1a p0, q0, #33
|
|
114 ** bx lr
|
|
115 */
|
|
116 /*
|
|
117 ** test_cde_vcx1qaint8x16_tintint:
|
|
118 ** vldr\.64 d0, \.L([0-9]*)
|
|
119 ** vldr\.64 d1, \.L\1\+8
|
|
120 ** vcx1a p0, q0, #33
|
|
121 ** bx lr
|
|
122 */
|
|
123 /*
|
|
124 ** test_cde_vcx1qaint16x8_tintint:
|
|
125 ** vldr\.64 d0, \.L([0-9]*)
|
|
126 ** vldr\.64 d1, \.L\1\+8
|
|
127 ** vcx1a p0, q0, #33
|
|
128 ** bx lr
|
|
129 */
|
|
130 /*
|
|
131 ** test_cde_vcx1qaint32x4_tintint:
|
|
132 ** vldr\.64 d0, \.L([0-9]*)
|
|
133 ** vldr\.64 d1, \.L\1\+8
|
|
134 ** vcx1a p0, q0, #33
|
|
135 ** bx lr
|
|
136 */
|
|
137 /*
|
|
138 ** test_cde_vcx1qaint64x2_tintint:
|
|
139 ** vldr\.64 d0, \.L([0-9]*)
|
|
140 ** vldr\.64 d1, \.L\1\+8
|
|
141 ** vcx1a p0, q0, #33
|
|
142 ** bx lr
|
|
143 */
|
|
144 /*
|
|
145 ** test_cde_vcx2q_u8float16x8_tuint16x8_tint:
|
|
146 ** vcx2 p0, q0, q0, #33
|
|
147 ** bx lr
|
|
148 */
|
|
149 /*
|
|
150 ** test_cde_vcx2q_u8float16x8_tfloat32x4_tint:
|
|
151 ** vcx2 p0, q0, q0, #33
|
|
152 ** bx lr
|
|
153 */
|
|
154 /*
|
|
155 ** test_cde_vcx2q_u8float32x4_tuint8x16_tint:
|
|
156 ** vcx2 p0, q0, q0, #33
|
|
157 ** bx lr
|
|
158 */
|
|
159 /*
|
|
160 ** test_cde_vcx2q_u8int64x2_tuint8x16_tint:
|
|
161 ** vcx2 p0, q0, q0, #33
|
|
162 ** bx lr
|
|
163 */
|
|
164 /*
|
|
165 ** test_cde_vcx2q_u8int8x16_tuint8x16_tint:
|
|
166 ** vcx2 p0, q0, q0, #33
|
|
167 ** bx lr
|
|
168 */
|
|
169 /*
|
|
170 ** test_cde_vcx2q_u8uint16x8_tuint8x16_tint:
|
|
171 ** vcx2 p0, q0, q0, #33
|
|
172 ** bx lr
|
|
173 */
|
|
174 /*
|
|
175 ** test_cde_vcx2q_u8uint8x16_tint64x2_tint:
|
|
176 ** vcx2 p0, q0, q0, #33
|
|
177 ** bx lr
|
|
178 */
|
|
179 /*
|
|
180 ** test_cde_vcx2q_u8uint8x16_tint8x16_tint:
|
|
181 ** vcx2 p0, q0, q0, #33
|
|
182 ** bx lr
|
|
183 */
|
|
184 /*
|
|
185 ** test_cde_vcx2q_u8uint8x16_tuint16x8_tint:
|
|
186 ** vcx2 p0, q0, q0, #33
|
|
187 ** bx lr
|
|
188 */
|
|
189 /*
|
|
190 ** test_cde_vcx2q_u8uint8x16_tuint8x16_tint:
|
|
191 ** vcx2 p0, q0, q0, #33
|
|
192 ** bx lr
|
|
193 */
|
|
194 /*
|
|
195 ** test_cde_vcx2qfloat16x8_tuint16x8_tint:
|
|
196 ** vcx2 p0, q0, q0, #33
|
|
197 ** bx lr
|
|
198 */
|
|
199 /*
|
|
200 ** test_cde_vcx2qfloat16x8_tfloat32x4_tint:
|
|
201 ** vcx2 p0, q0, q0, #33
|
|
202 ** bx lr
|
|
203 */
|
|
204 /*
|
|
205 ** test_cde_vcx2qfloat32x4_tuint8x16_tint:
|
|
206 ** vcx2 p0, q0, q0, #33
|
|
207 ** bx lr
|
|
208 */
|
|
209 /*
|
|
210 ** test_cde_vcx2qint64x2_tuint8x16_tint:
|
|
211 ** vcx2 p0, q0, q0, #33
|
|
212 ** bx lr
|
|
213 */
|
|
214 /*
|
|
215 ** test_cde_vcx2qint8x16_tuint8x16_tint:
|
|
216 ** vcx2 p0, q0, q0, #33
|
|
217 ** bx lr
|
|
218 */
|
|
219 /*
|
|
220 ** test_cde_vcx2quint16x8_tuint8x16_tint:
|
|
221 ** vcx2 p0, q0, q0, #33
|
|
222 ** bx lr
|
|
223 */
|
|
224 /*
|
|
225 ** test_cde_vcx2quint8x16_tint64x2_tint:
|
|
226 ** vcx2 p0, q0, q0, #33
|
|
227 ** bx lr
|
|
228 */
|
|
229 /*
|
|
230 ** test_cde_vcx2quint8x16_tint8x16_tint:
|
|
231 ** vcx2 p0, q0, q0, #33
|
|
232 ** bx lr
|
|
233 */
|
|
234 /*
|
|
235 ** test_cde_vcx2quint8x16_tuint16x8_tint:
|
|
236 ** vcx2 p0, q0, q0, #33
|
|
237 ** bx lr
|
|
238 */
|
|
239 /*
|
|
240 ** test_cde_vcx2quint8x16_tuint8x16_tint:
|
|
241 ** vcx2 p0, q0, q0, #33
|
|
242 ** bx lr
|
|
243 */
|
|
244 /*
|
|
245 ** test_cde_vcx2qafloat16x8_tuint16x8_tint:
|
|
246 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
247 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
248 ** vcx2a p0, (q[0-7]), q0, #33
|
|
249 ** vmov q0, \2
|
|
250 ** bx lr
|
|
251 */
|
|
252 /*
|
|
253 ** test_cde_vcx2qafloat16x8_tfloat32x4_tint:
|
|
254 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
255 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
256 ** vcx2a p0, (q[0-7]), q0, #33
|
|
257 ** vmov q0, \2
|
|
258 ** bx lr
|
|
259 */
|
|
260 /*
|
|
261 ** test_cde_vcx2qafloat32x4_tuint8x16_tint:
|
|
262 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
263 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
264 ** vcx2a p0, (q[0-7]), q0, #33
|
|
265 ** vmov q0, \2
|
|
266 ** bx lr
|
|
267 */
|
|
268 /*
|
|
269 ** test_cde_vcx2qaint64x2_tuint8x16_tint:
|
|
270 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
271 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
272 ** vcx2a p0, (q[0-7]), q0, #33
|
|
273 ** vmov q0, \2
|
|
274 ** bx lr
|
|
275 */
|
|
276 /*
|
|
277 ** test_cde_vcx2qaint8x16_tuint8x16_tint:
|
|
278 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
279 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
280 ** vcx2a p0, (q[0-7]), q0, #33
|
|
281 ** vmov q0, \2
|
|
282 ** bx lr
|
|
283 */
|
|
284 /*
|
|
285 ** test_cde_vcx2qauint16x8_tuint8x16_tint:
|
|
286 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
287 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
288 ** vcx2a p0, (q[0-7]), q0, #33
|
|
289 ** vmov q0, \2
|
|
290 ** bx lr
|
|
291 */
|
|
292 /*
|
|
293 ** test_cde_vcx2qauint8x16_tint64x2_tint:
|
|
294 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
295 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
296 ** vcx2a p0, (q[0-7]), q0, #33
|
|
297 ** vmov q0, \2
|
|
298 ** bx lr
|
|
299 */
|
|
300 /*
|
|
301 ** test_cde_vcx2qauint8x16_tint8x16_tint:
|
|
302 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
303 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
304 ** vcx2a p0, (q[0-7]), q0, #33
|
|
305 ** vmov q0, \2
|
|
306 ** bx lr
|
|
307 */
|
|
308 /*
|
|
309 ** test_cde_vcx2qauint8x16_tuint16x8_tint:
|
|
310 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
311 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
312 ** vcx2a p0, (q[0-7]), q0, #33
|
|
313 ** vmov q0, \2
|
|
314 ** bx lr
|
|
315 */
|
|
316 /*
|
|
317 ** test_cde_vcx2qauint8x16_tuint8x16_tint:
|
|
318 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
319 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
320 ** vcx2a p0, (q[0-7]), q0, #33
|
|
321 ** vmov q0, \2
|
|
322 ** bx lr
|
|
323 */
|
|
324 /*
|
|
325 ** test_cde_vcx3q_u8uint8x16_tuint8x16_tuint8x16_t:
|
|
326 ** vcx3 p0, q0, q0, q1, #12
|
|
327 ** bx lr
|
|
328 */
|
|
329 /*
|
|
330 ** test_cde_vcx3q_u8uint16x8_tuint8x16_tuint8x16_t:
|
|
331 ** vcx3 p0, q0, q0, q1, #12
|
|
332 ** bx lr
|
|
333 */
|
|
334 /*
|
|
335 ** test_cde_vcx3q_u8uint8x16_tuint16x8_tuint8x16_t:
|
|
336 ** vcx3 p0, q0, q0, q1, #12
|
|
337 ** bx lr
|
|
338 */
|
|
339 /*
|
|
340 ** test_cde_vcx3q_u8uint8x16_tuint8x16_tuint16x8_t:
|
|
341 ** vcx3 p0, q0, q0, q1, #12
|
|
342 ** bx lr
|
|
343 */
|
|
344 /*
|
|
345 ** test_cde_vcx3q_u8float16x8_tfloat16x8_tfloat16x8_t:
|
|
346 ** vcx3 p0, q0, q0, q1, #12
|
|
347 ** bx lr
|
|
348 */
|
|
349 /*
|
|
350 ** test_cde_vcx3q_u8float32x4_tuint64x2_tfloat16x8_t:
|
|
351 ** vcx3 p0, q0, q0, q1, #12
|
|
352 ** bx lr
|
|
353 */
|
|
354 /*
|
|
355 ** test_cde_vcx3q_u8int8x16_tuint8x16_tuint8x16_t:
|
|
356 ** vcx3 p0, q0, q0, q1, #12
|
|
357 ** bx lr
|
|
358 */
|
|
359 /*
|
|
360 ** test_cde_vcx3q_u8uint8x16_tint8x16_tuint8x16_t:
|
|
361 ** vcx3 p0, q0, q0, q1, #12
|
|
362 ** bx lr
|
|
363 */
|
|
364 /*
|
|
365 ** test_cde_vcx3q_u8uint8x16_tuint8x16_tint8x16_t:
|
|
366 ** vcx3 p0, q0, q0, q1, #12
|
|
367 ** bx lr
|
|
368 */
|
|
369 /*
|
|
370 ** test_cde_vcx3q_u8int64x2_tuint8x16_tuint8x16_t:
|
|
371 ** vcx3 p0, q0, q0, q1, #12
|
|
372 ** bx lr
|
|
373 */
|
|
374 /*
|
|
375 ** test_cde_vcx3q_u8uint8x16_tint64x2_tuint8x16_t:
|
|
376 ** vcx3 p0, q0, q0, q1, #12
|
|
377 ** bx lr
|
|
378 */
|
|
379 /*
|
|
380 ** test_cde_vcx3q_u8uint8x16_tuint8x16_tint64x2_t:
|
|
381 ** vcx3 p0, q0, q0, q1, #12
|
|
382 ** bx lr
|
|
383 */
|
|
384 /*
|
|
385 ** test_cde_vcx3q_u8uint8x16_tint64x2_tint64x2_t:
|
|
386 ** vcx3 p0, q0, q0, q1, #12
|
|
387 ** bx lr
|
|
388 */
|
|
389 /*
|
|
390 ** test_cde_vcx3quint8x16_tuint8x16_tuint8x16_t:
|
|
391 ** vcx3 p0, q0, q0, q1, #12
|
|
392 ** bx lr
|
|
393 */
|
|
394 /*
|
|
395 ** test_cde_vcx3qfloat16x8_tfloat16x8_tfloat16x8_t:
|
|
396 ** vcx3 p0, q0, q0, q1, #12
|
|
397 ** bx lr
|
|
398 */
|
|
399 /*
|
|
400 ** test_cde_vcx3qfloat32x4_tuint64x2_tfloat16x8_t:
|
|
401 ** vcx3 p0, q0, q0, q1, #12
|
|
402 ** bx lr
|
|
403 */
|
|
404 /*
|
|
405 ** test_cde_vcx3quint16x8_tuint8x16_tuint8x16_t:
|
|
406 ** vcx3 p0, q0, q0, q1, #12
|
|
407 ** bx lr
|
|
408 */
|
|
409 /*
|
|
410 ** test_cde_vcx3quint8x16_tuint16x8_tuint8x16_t:
|
|
411 ** vcx3 p0, q0, q0, q1, #12
|
|
412 ** bx lr
|
|
413 */
|
|
414 /*
|
|
415 ** test_cde_vcx3quint8x16_tuint8x16_tuint16x8_t:
|
|
416 ** vcx3 p0, q0, q0, q1, #12
|
|
417 ** bx lr
|
|
418 */
|
|
419 /*
|
|
420 ** test_cde_vcx3qint8x16_tuint8x16_tuint8x16_t:
|
|
421 ** vcx3 p0, q0, q0, q1, #12
|
|
422 ** bx lr
|
|
423 */
|
|
424 /*
|
|
425 ** test_cde_vcx3quint8x16_tint8x16_tuint8x16_t:
|
|
426 ** vcx3 p0, q0, q0, q1, #12
|
|
427 ** bx lr
|
|
428 */
|
|
429 /*
|
|
430 ** test_cde_vcx3quint8x16_tuint8x16_tint8x16_t:
|
|
431 ** vcx3 p0, q0, q0, q1, #12
|
|
432 ** bx lr
|
|
433 */
|
|
434 /*
|
|
435 ** test_cde_vcx3qint64x2_tuint8x16_tuint8x16_t:
|
|
436 ** vcx3 p0, q0, q0, q1, #12
|
|
437 ** bx lr
|
|
438 */
|
|
439 /*
|
|
440 ** test_cde_vcx3quint8x16_tint64x2_tuint8x16_t:
|
|
441 ** vcx3 p0, q0, q0, q1, #12
|
|
442 ** bx lr
|
|
443 */
|
|
444 /*
|
|
445 ** test_cde_vcx3quint8x16_tuint8x16_tint64x2_t:
|
|
446 ** vcx3 p0, q0, q0, q1, #12
|
|
447 ** bx lr
|
|
448 */
|
|
449 /*
|
|
450 ** test_cde_vcx3quint8x16_tint64x2_tint64x2_t:
|
|
451 ** vcx3 p0, q0, q0, q1, #12
|
|
452 ** bx lr
|
|
453 */
|
|
454 /*
|
|
455 ** test_cde_vcx3qauint8x16_tuint8x16_tuint8x16_t:
|
|
456 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
457 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
458 ** vcx3a p0, (q[0-7]), q0, q1, #12
|
|
459 ** vmov q0, \2
|
|
460 ** bx lr
|
|
461 */
|
|
462 /*
|
|
463 ** test_cde_vcx3qafloat16x8_tfloat16x8_tfloat16x8_t:
|
|
464 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
465 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
466 ** vcx3a p0, (q[0-7]), q0, q1, #12
|
|
467 ** vmov q0, \2
|
|
468 ** bx lr
|
|
469 */
|
|
470 /*
|
|
471 ** test_cde_vcx3qafloat32x4_tuint64x2_tfloat16x8_t:
|
|
472 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
473 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
474 ** vcx3a p0, (q[0-7]), q0, q1, #12
|
|
475 ** vmov q0, \2
|
|
476 ** bx lr
|
|
477 */
|
|
478 /*
|
|
479 ** test_cde_vcx3qauint16x8_tuint8x16_tuint8x16_t:
|
|
480 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
481 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
482 ** vcx3a p0, (q[0-7]), q0, q1, #12
|
|
483 ** vmov q0, \2
|
|
484 ** bx lr
|
|
485 */
|
|
486 /*
|
|
487 ** test_cde_vcx3qauint8x16_tuint16x8_tuint8x16_t:
|
|
488 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
489 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
490 ** vcx3a p0, (q[0-7]), q0, q1, #12
|
|
491 ** vmov q0, \2
|
|
492 ** bx lr
|
|
493 */
|
|
494 /*
|
|
495 ** test_cde_vcx3qauint8x16_tuint8x16_tuint16x8_t:
|
|
496 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
497 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
498 ** vcx3a p0, (q[0-7]), q0, q1, #12
|
|
499 ** vmov q0, \2
|
|
500 ** bx lr
|
|
501 */
|
|
502 /*
|
|
503 ** test_cde_vcx3qaint8x16_tuint8x16_tuint8x16_t:
|
|
504 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
505 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
506 ** vcx3a p0, (q[0-7]), q0, q1, #12
|
|
507 ** vmov q0, \2
|
|
508 ** bx lr
|
|
509 */
|
|
510 /*
|
|
511 ** test_cde_vcx3qauint8x16_tint8x16_tuint8x16_t:
|
|
512 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
513 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
514 ** vcx3a p0, (q[0-7]), q0, q1, #12
|
|
515 ** vmov q0, \2
|
|
516 ** bx lr
|
|
517 */
|
|
518 /*
|
|
519 ** test_cde_vcx3qauint8x16_tuint8x16_tint8x16_t:
|
|
520 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
521 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
522 ** vcx3a p0, (q[0-7]), q0, q1, #12
|
|
523 ** vmov q0, \2
|
|
524 ** bx lr
|
|
525 */
|
|
526 /*
|
|
527 ** test_cde_vcx3qaint64x2_tuint8x16_tuint8x16_t:
|
|
528 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
529 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
530 ** vcx3a p0, (q[0-7]), q0, q1, #12
|
|
531 ** vmov q0, \2
|
|
532 ** bx lr
|
|
533 */
|
|
534 /*
|
|
535 ** test_cde_vcx3qauint8x16_tint64x2_tuint8x16_t:
|
|
536 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
537 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
538 ** vcx3a p0, (q[0-7]), q0, q1, #12
|
|
539 ** vmov q0, \2
|
|
540 ** bx lr
|
|
541 */
|
|
542 /*
|
|
543 ** test_cde_vcx3qauint8x16_tuint8x16_tint64x2_t:
|
|
544 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
545 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
546 ** vcx3a p0, (q[0-7]), q0, q1, #12
|
|
547 ** vmov q0, \2
|
|
548 ** bx lr
|
|
549 */
|
|
550 /*
|
|
551 ** test_cde_vcx3qauint8x16_tint64x2_tint64x2_t:
|
|
552 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L([0-9]*)
|
|
553 ** vldr\.64 d(?:[01][0-4]|[0-9]), \.L\1\+8
|
|
554 ** vcx3a p0, (q[0-7]), q0, q1, #12
|
|
555 ** vmov q0, \2
|
|
556 ** bx lr
|
|
557 */
|
|
558
|
|
559 /* Predicated MVE intrinsics. */
|
|
560 /* Merging lane predication types.
|
|
561 NOTE: Depending on the target, the setup instructions (vldr's and vmsr) can
|
|
562 be in a different order. Here we just check that all the expected setup
|
|
563 instructions are there. We don't check that the setup instructions are
|
|
564 different since the likelyhood of the compiler generating repeated versions
|
|
565 of one rather than one and the other is very low and it's difficult to apply
|
|
566 such a constraint in TCL regexps (lookahead/lookbehind constraints may not
|
|
567 contain back references). */
|
|
568 /*
|
|
569 ** test_cde_vcx1q_mfloat16x8_tintint:
|
|
570 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
571 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
572 ** vpst
|
|
573 ** vcx1t p0, q0, #32
|
|
574 ** bx lr
|
|
575 */
|
|
576 /*
|
|
577 ** test_cde_vcx1q_mfloat32x4_tintint:
|
|
578 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
579 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
580 ** vpst
|
|
581 ** vcx1t p0, q0, #32
|
|
582 ** bx lr
|
|
583 */
|
|
584 /*
|
|
585 ** test_cde_vcx1q_muint8x16_tintint:
|
|
586 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
587 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
588 ** vpst
|
|
589 ** vcx1t p0, q0, #32
|
|
590 ** bx lr
|
|
591 */
|
|
592 /*
|
|
593 ** test_cde_vcx1q_muint16x8_tintint:
|
|
594 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
595 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
596 ** vpst
|
|
597 ** vcx1t p0, q0, #32
|
|
598 ** bx lr
|
|
599 */
|
|
600 /*
|
|
601 ** test_cde_vcx1q_muint32x4_tintint:
|
|
602 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
603 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
604 ** vpst
|
|
605 ** vcx1t p0, q0, #32
|
|
606 ** bx lr
|
|
607 */
|
|
608 /*
|
|
609 ** test_cde_vcx1q_muint64x2_tintint:
|
|
610 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
611 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
612 ** vpst
|
|
613 ** vcx1t p0, q0, #32
|
|
614 ** bx lr
|
|
615 */
|
|
616 /*
|
|
617 ** test_cde_vcx1q_mint8x16_tintint:
|
|
618 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
619 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
620 ** vpst
|
|
621 ** vcx1t p0, q0, #32
|
|
622 ** bx lr
|
|
623 */
|
|
624 /*
|
|
625 ** test_cde_vcx1q_mint16x8_tintint:
|
|
626 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
627 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
628 ** vpst
|
|
629 ** vcx1t p0, q0, #32
|
|
630 ** bx lr
|
|
631 */
|
|
632 /*
|
|
633 ** test_cde_vcx1q_mint32x4_tintint:
|
|
634 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
635 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
636 ** vpst
|
|
637 ** vcx1t p0, q0, #32
|
|
638 ** bx lr
|
|
639 */
|
|
640 /*
|
|
641 ** test_cde_vcx1q_mint64x2_tintint:
|
|
642 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
643 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
644 ** vpst
|
|
645 ** vcx1t p0, q0, #32
|
|
646 ** bx lr
|
|
647 */
|
|
648
|
|
649
|
|
650 /*
|
|
651 ** test_cde_vcx1qa_mfloat16x8_tintint:
|
|
652 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
653 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
654 ** vpst
|
|
655 ** vcx1at p0, q0, #32
|
|
656 ** bx lr
|
|
657 */
|
|
658 /*
|
|
659 ** test_cde_vcx1qa_mfloat32x4_tintint:
|
|
660 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
661 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
662 ** vpst
|
|
663 ** vcx1at p0, q0, #32
|
|
664 ** bx lr
|
|
665 */
|
|
666 /*
|
|
667 ** test_cde_vcx1qa_muint8x16_tintint:
|
|
668 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
669 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
670 ** vpst
|
|
671 ** vcx1at p0, q0, #32
|
|
672 ** bx lr
|
|
673 */
|
|
674 /*
|
|
675 ** test_cde_vcx1qa_muint16x8_tintint:
|
|
676 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
677 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
678 ** vpst
|
|
679 ** vcx1at p0, q0, #32
|
|
680 ** bx lr
|
|
681 */
|
|
682 /*
|
|
683 ** test_cde_vcx1qa_muint32x4_tintint:
|
|
684 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
685 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
686 ** vpst
|
|
687 ** vcx1at p0, q0, #32
|
|
688 ** bx lr
|
|
689 */
|
|
690 /*
|
|
691 ** test_cde_vcx1qa_muint64x2_tintint:
|
|
692 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
693 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
694 ** vpst
|
|
695 ** vcx1at p0, q0, #32
|
|
696 ** bx lr
|
|
697 */
|
|
698 /*
|
|
699 ** test_cde_vcx1qa_mint8x16_tintint:
|
|
700 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
701 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
702 ** vpst
|
|
703 ** vcx1at p0, q0, #32
|
|
704 ** bx lr
|
|
705 */
|
|
706 /*
|
|
707 ** test_cde_vcx1qa_mint16x8_tintint:
|
|
708 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
709 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
710 ** vpst
|
|
711 ** vcx1at p0, q0, #32
|
|
712 ** bx lr
|
|
713 */
|
|
714 /*
|
|
715 ** test_cde_vcx1qa_mint32x4_tintint:
|
|
716 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
717 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
718 ** vpst
|
|
719 ** vcx1at p0, q0, #32
|
|
720 ** bx lr
|
|
721 */
|
|
722 /*
|
|
723 ** test_cde_vcx1qa_mint64x2_tintint:
|
|
724 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
725 ** (?:vldr\.64 d0, \.L[0-9]*\n\tvldr\.64 d1, \.L[0-9]*\+8|vmsr P0, r2 @ movhi)
|
|
726 ** vpst
|
|
727 ** vcx1at p0, q0, #32
|
|
728 ** bx lr
|
|
729 */
|
|
730
|
|
731
|
|
732 /*
|
|
733 ** test_cde_vcx2q_mfloat16x8_tuint16x8_tint:
|
|
734 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
735 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
736 ** vpst
|
|
737 ** vcx2t p0, (q[0-7]), q0, #32
|
|
738 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
739 ** bx lr
|
|
740 */
|
|
741 /*
|
|
742 ** test_cde_vcx2q_mfloat16x8_tfloat32x4_tint:
|
|
743 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
744 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
745 ** vpst
|
|
746 ** vcx2t p0, (q[0-7]), q0, #32
|
|
747 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
748 ** bx lr
|
|
749 */
|
|
750 /*
|
|
751 ** test_cde_vcx2q_mfloat32x4_tuint8x16_tint:
|
|
752 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
753 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
754 ** vpst
|
|
755 ** vcx2t p0, (q[0-7]), q0, #32
|
|
756 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
757 ** bx lr
|
|
758 */
|
|
759 /*
|
|
760 ** test_cde_vcx2q_mint64x2_tuint8x16_tint:
|
|
761 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
762 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
763 ** vpst
|
|
764 ** vcx2t p0, (q[0-7]), q0, #32
|
|
765 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
766 ** bx lr
|
|
767 */
|
|
768 /*
|
|
769 ** test_cde_vcx2q_mint8x16_tuint8x16_tint:
|
|
770 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
771 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
772 ** vpst
|
|
773 ** vcx2t p0, (q[0-7]), q0, #32
|
|
774 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
775 ** bx lr
|
|
776 */
|
|
777 /*
|
|
778 ** test_cde_vcx2q_muint16x8_tuint8x16_tint:
|
|
779 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
780 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
781 ** vpst
|
|
782 ** vcx2t p0, (q[0-7]), q0, #32
|
|
783 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
784 ** bx lr
|
|
785 */
|
|
786 /*
|
|
787 ** test_cde_vcx2q_muint8x16_tint64x2_tint:
|
|
788 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
789 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
790 ** vpst
|
|
791 ** vcx2t p0, (q[0-7]), q0, #32
|
|
792 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
793 ** bx lr
|
|
794 */
|
|
795 /*
|
|
796 ** test_cde_vcx2q_muint8x16_tint8x16_tint:
|
|
797 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
798 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
799 ** vpst
|
|
800 ** vcx2t p0, (q[0-7]), q0, #32
|
|
801 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
802 ** bx lr
|
|
803 */
|
|
804 /*
|
|
805 ** test_cde_vcx2q_muint8x16_tuint16x8_tint:
|
|
806 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
807 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
808 ** vpst
|
|
809 ** vcx2t p0, (q[0-7]), q0, #32
|
|
810 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
811 ** bx lr
|
|
812 */
|
|
813 /*
|
|
814 ** test_cde_vcx2q_muint8x16_tuint8x16_tint:
|
|
815 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
816 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
817 ** vpst
|
|
818 ** vcx2t p0, (q[0-7]), q0, #32
|
|
819 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
820 ** bx lr
|
|
821 */
|
|
822
|
|
823
|
|
824 /*
|
|
825 ** test_cde_vcx2qa_mfloat16x8_tuint16x8_tint:
|
|
826 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
827 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
828 ** vpst
|
|
829 ** vcx2at p0, (q[0-7]), q0, #32
|
|
830 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
831 ** bx lr
|
|
832 */
|
|
833 /*
|
|
834 ** test_cde_vcx2qa_mfloat16x8_tfloat32x4_tint:
|
|
835 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
836 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
837 ** vpst
|
|
838 ** vcx2at p0, (q[0-7]), q0, #32
|
|
839 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
840 ** bx lr
|
|
841 */
|
|
842 /*
|
|
843 ** test_cde_vcx2qa_mfloat32x4_tuint8x16_tint:
|
|
844 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
845 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
846 ** vpst
|
|
847 ** vcx2at p0, (q[0-7]), q0, #32
|
|
848 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
849 ** bx lr
|
|
850 */
|
|
851 /*
|
|
852 ** test_cde_vcx2qa_mint64x2_tuint8x16_tint:
|
|
853 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
854 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
855 ** vpst
|
|
856 ** vcx2at p0, (q[0-7]), q0, #32
|
|
857 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
858 ** bx lr
|
|
859 */
|
|
860 /*
|
|
861 ** test_cde_vcx2qa_mint8x16_tuint8x16_tint:
|
|
862 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
863 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
864 ** vpst
|
|
865 ** vcx2at p0, (q[0-7]), q0, #32
|
|
866 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
867 ** bx lr
|
|
868 */
|
|
869 /*
|
|
870 ** test_cde_vcx2qa_muint16x8_tuint8x16_tint:
|
|
871 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
872 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
873 ** vpst
|
|
874 ** vcx2at p0, (q[0-7]), q0, #32
|
|
875 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
876 ** bx lr
|
|
877 */
|
|
878 /*
|
|
879 ** test_cde_vcx2qa_muint8x16_tint64x2_tint:
|
|
880 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
881 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
882 ** vpst
|
|
883 ** vcx2at p0, (q[0-7]), q0, #32
|
|
884 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
885 ** bx lr
|
|
886 */
|
|
887 /*
|
|
888 ** test_cde_vcx2qa_muint8x16_tint8x16_tint:
|
|
889 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
890 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
891 ** vpst
|
|
892 ** vcx2at p0, (q[0-7]), q0, #32
|
|
893 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
894 ** bx lr
|
|
895 */
|
|
896 /*
|
|
897 ** test_cde_vcx2qa_muint8x16_tuint16x8_tint:
|
|
898 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
899 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
900 ** vpst
|
|
901 ** vcx2at p0, (q[0-7]), q0, #32
|
|
902 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
903 ** bx lr
|
|
904 */
|
|
905 /*
|
|
906 ** test_cde_vcx2qa_muint8x16_tuint8x16_tint:
|
|
907 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
908 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r1 @ movhi)
|
|
909 ** vpst
|
|
910 ** vcx2at p0, (q[0-7]), q0, #32
|
|
911 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
912 ** bx lr
|
|
913 */
|
|
914
|
|
915
|
|
916 /*
|
|
917 ** test_cde_vcx3q_muint8x16_tuint8x16_tuint8x16_t:
|
|
918 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
919 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
920 ** vpst
|
|
921 ** vcx3t p0, (q[0-7]), q0, q1, #15
|
|
922 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
923 ** bx lr
|
|
924 */
|
|
925 /*
|
|
926 ** test_cde_vcx3q_mfloat16x8_tfloat16x8_tfloat16x8_t:
|
|
927 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
928 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
929 ** vpst
|
|
930 ** vcx3t p0, (q[0-7]), q0, q1, #15
|
|
931 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
932 ** bx lr
|
|
933 */
|
|
934 /*
|
|
935 ** test_cde_vcx3q_mfloat32x4_tuint64x2_tfloat16x8_t:
|
|
936 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
937 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
938 ** vpst
|
|
939 ** vcx3t p0, (q[0-7]), q0, q1, #15
|
|
940 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
941 ** bx lr
|
|
942 */
|
|
943 /*
|
|
944 ** test_cde_vcx3q_muint16x8_tuint8x16_tuint8x16_t:
|
|
945 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
946 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
947 ** vpst
|
|
948 ** vcx3t p0, (q[0-7]), q0, q1, #15
|
|
949 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
950 ** bx lr
|
|
951 */
|
|
952 /*
|
|
953 ** test_cde_vcx3q_muint8x16_tuint16x8_tuint8x16_t:
|
|
954 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
955 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
956 ** vpst
|
|
957 ** vcx3t p0, (q[0-7]), q0, q1, #15
|
|
958 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
959 ** bx lr
|
|
960 */
|
|
961 /*
|
|
962 ** test_cde_vcx3q_muint8x16_tuint8x16_tuint16x8_t:
|
|
963 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
964 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
965 ** vpst
|
|
966 ** vcx3t p0, (q[0-7]), q0, q1, #15
|
|
967 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
968 ** bx lr
|
|
969 */
|
|
970 /*
|
|
971 ** test_cde_vcx3q_mint8x16_tuint8x16_tuint8x16_t:
|
|
972 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
973 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
974 ** vpst
|
|
975 ** vcx3t p0, (q[0-7]), q0, q1, #15
|
|
976 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
977 ** bx lr
|
|
978 */
|
|
979 /*
|
|
980 ** test_cde_vcx3q_muint8x16_tint8x16_tuint8x16_t:
|
|
981 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
982 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
983 ** vpst
|
|
984 ** vcx3t p0, (q[0-7]), q0, q1, #15
|
|
985 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
986 ** bx lr
|
|
987 */
|
|
988 /*
|
|
989 ** test_cde_vcx3q_muint8x16_tuint8x16_tint8x16_t:
|
|
990 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
991 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
992 ** vpst
|
|
993 ** vcx3t p0, (q[0-7]), q0, q1, #15
|
|
994 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
995 ** bx lr
|
|
996 */
|
|
997 /*
|
|
998 ** test_cde_vcx3q_mint64x2_tuint8x16_tuint8x16_t:
|
|
999 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1000 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1001 ** vpst
|
|
1002 ** vcx3t p0, (q[0-7]), q0, q1, #15
|
|
1003 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1004 ** bx lr
|
|
1005 */
|
|
1006 /*
|
|
1007 ** test_cde_vcx3q_muint8x16_tint64x2_tuint8x16_t:
|
|
1008 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1009 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1010 ** vpst
|
|
1011 ** vcx3t p0, (q[0-7]), q0, q1, #15
|
|
1012 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1013 ** bx lr
|
|
1014 */
|
|
1015 /*
|
|
1016 ** test_cde_vcx3q_muint8x16_tuint8x16_tint64x2_t:
|
|
1017 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1018 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1019 ** vpst
|
|
1020 ** vcx3t p0, (q[0-7]), q0, q1, #15
|
|
1021 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1022 ** bx lr
|
|
1023 */
|
|
1024 /*
|
|
1025 ** test_cde_vcx3q_muint8x16_tint64x2_tint64x2_t:
|
|
1026 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1027 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1028 ** vpst
|
|
1029 ** vcx3t p0, (q[0-7]), q0, q1, #15
|
|
1030 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1031 ** bx lr
|
|
1032 */
|
|
1033
|
|
1034
|
|
1035 /*
|
|
1036 ** test_cde_vcx3qa_muint8x16_tuint8x16_tuint8x16_t:
|
|
1037 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1038 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1039 ** vpst
|
|
1040 ** vcx3at p0, (q[0-7]), q0, q1, #15
|
|
1041 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1042 ** bx lr
|
|
1043 */
|
|
1044 /*
|
|
1045 ** test_cde_vcx3qa_mfloat16x8_tfloat16x8_tfloat16x8_t:
|
|
1046 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1047 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1048 ** vpst
|
|
1049 ** vcx3at p0, (q[0-7]), q0, q1, #15
|
|
1050 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1051 ** bx lr
|
|
1052 */
|
|
1053 /*
|
|
1054 ** test_cde_vcx3qa_mfloat32x4_tuint64x2_tfloat16x8_t:
|
|
1055 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1056 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1057 ** vpst
|
|
1058 ** vcx3at p0, (q[0-7]), q0, q1, #15
|
|
1059 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1060 ** bx lr
|
|
1061 */
|
|
1062 /*
|
|
1063 ** test_cde_vcx3qa_muint16x8_tuint8x16_tuint8x16_t:
|
|
1064 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1065 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1066 ** vpst
|
|
1067 ** vcx3at p0, (q[0-7]), q0, q1, #15
|
|
1068 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1069 ** bx lr
|
|
1070 */
|
|
1071 /*
|
|
1072 ** test_cde_vcx3qa_muint8x16_tuint16x8_tuint8x16_t:
|
|
1073 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1074 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1075 ** vpst
|
|
1076 ** vcx3at p0, (q[0-7]), q0, q1, #15
|
|
1077 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1078 ** bx lr
|
|
1079 */
|
|
1080 /*
|
|
1081 ** test_cde_vcx3qa_muint8x16_tuint8x16_tuint16x8_t:
|
|
1082 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1083 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1084 ** vpst
|
|
1085 ** vcx3at p0, (q[0-7]), q0, q1, #15
|
|
1086 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1087 ** bx lr
|
|
1088 */
|
|
1089 /*
|
|
1090 ** test_cde_vcx3qa_mint8x16_tuint8x16_tuint8x16_t:
|
|
1091 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1092 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1093 ** vpst
|
|
1094 ** vcx3at p0, (q[0-7]), q0, q1, #15
|
|
1095 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1096 ** bx lr
|
|
1097 */
|
|
1098 /*
|
|
1099 ** test_cde_vcx3qa_muint8x16_tint8x16_tuint8x16_t:
|
|
1100 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1101 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1102 ** vpst
|
|
1103 ** vcx3at p0, (q[0-7]), q0, q1, #15
|
|
1104 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1105 ** bx lr
|
|
1106 */
|
|
1107 /*
|
|
1108 ** test_cde_vcx3qa_muint8x16_tuint8x16_tint8x16_t:
|
|
1109 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1110 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1111 ** vpst
|
|
1112 ** vcx3at p0, (q[0-7]), q0, q1, #15
|
|
1113 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1114 ** bx lr
|
|
1115 */
|
|
1116 /*
|
|
1117 ** test_cde_vcx3qa_mint64x2_tuint8x16_tuint8x16_t:
|
|
1118 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1119 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1120 ** vpst
|
|
1121 ** vcx3at p0, (q[0-7]), q0, q1, #15
|
|
1122 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1123 ** bx lr
|
|
1124 */
|
|
1125 /*
|
|
1126 ** test_cde_vcx3qa_muint8x16_tint64x2_tuint8x16_t:
|
|
1127 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1128 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1129 ** vpst
|
|
1130 ** vcx3at p0, (q[0-7]), q0, q1, #15
|
|
1131 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1132 ** bx lr
|
|
1133 */
|
|
1134 /*
|
|
1135 ** test_cde_vcx3qa_muint8x16_tuint8x16_tint64x2_t:
|
|
1136 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1137 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1138 ** vpst
|
|
1139 ** vcx3at p0, (q[0-7]), q0, q1, #15
|
|
1140 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1141 ** bx lr
|
|
1142 */
|
|
1143 /*
|
|
1144 ** test_cde_vcx3qa_muint8x16_tint64x2_tint64x2_t:
|
|
1145 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1146 ** (?:vldr\.64 d(?:[02468]|1[024]), \.L[0-9]*\n\tvldr\.64 d(?:[13579]|1[135]), \.L[0-9]*\+8|vmsr P0, r0 @ movhi)
|
|
1147 ** vpst
|
|
1148 ** vcx3at p0, (q[0-7]), q0, q1, #15
|
|
1149 ** vmov q0, \1([[:space:]]+@ [^\n]*)?
|
|
1150 ** bx lr
|
|
1151 */
|