111
|
1 ;; Machine description for AArch64 architecture.
|
145
|
2 ;; Copyright (C) 2009-2020 Free Software Foundation, Inc.
|
111
|
3 ;; Contributed by ARM Ltd.
|
|
4 ;;
|
|
5 ;; This file is part of GCC.
|
|
6 ;;
|
|
7 ;; GCC is free software; you can redistribute it and/or modify it
|
|
8 ;; under the terms of the GNU General Public License as published by
|
|
9 ;; the Free Software Foundation; either version 3, or (at your option)
|
|
10 ;; any later version.
|
|
11 ;;
|
|
12 ;; GCC is distributed in the hope that it will be useful, but
|
|
13 ;; WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
14 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
15 ;; General Public License for more details.
|
|
16 ;;
|
|
17 ;; You should have received a copy of the GNU General Public License
|
|
18 ;; along with GCC; see the file COPYING3. If not see
|
|
19 ;; <http://www.gnu.org/licenses/>.
|
|
20
|
|
21 (define_special_predicate "cc_register"
|
|
22 (and (match_code "reg")
|
|
23 (and (match_test "REGNO (op) == CC_REGNUM")
|
|
24 (ior (match_test "mode == GET_MODE (op)")
|
|
25 (match_test "mode == VOIDmode
|
|
26 && GET_MODE_CLASS (GET_MODE (op)) == MODE_CC"))))
|
|
27 )
|
|
28
|
|
29 (define_predicate "aarch64_call_insn_operand"
|
|
30 (ior (match_code "symbol_ref")
|
|
31 (match_operand 0 "register_operand")))
|
|
32
|
145
|
33 (define_predicate "aarch64_general_reg"
|
|
34 (and (match_operand 0 "register_operand")
|
|
35 (match_test "REGNO_REG_CLASS (REGNO (op)) == GENERAL_REGS")))
|
|
36
|
111
|
37 ;; Return true if OP a (const_int 0) operand.
|
|
38 (define_predicate "const0_operand"
|
|
39 (and (match_code "const_int")
|
|
40 (match_test "op == CONST0_RTX (mode)")))
|
|
41
|
145
|
42 (define_predicate "const_1_to_3_operand"
|
|
43 (match_code "const_int,const_vector")
|
|
44 {
|
|
45 op = unwrap_const_vec_duplicate (op);
|
|
46 return CONST_INT_P (op) && IN_RANGE (INTVAL (op), 1, 3);
|
|
47 })
|
|
48
|
|
49 (define_predicate "subreg_lowpart_operator"
|
|
50 (ior (match_code "truncate")
|
|
51 (and (match_code "subreg")
|
|
52 (match_test "subreg_lowpart_p (op)"))))
|
111
|
53
|
|
54 (define_predicate "aarch64_ccmp_immediate"
|
|
55 (and (match_code "const_int")
|
|
56 (match_test "IN_RANGE (INTVAL (op), -31, 31)")))
|
|
57
|
|
58 (define_predicate "aarch64_ccmp_operand"
|
|
59 (ior (match_operand 0 "register_operand")
|
|
60 (match_operand 0 "aarch64_ccmp_immediate")))
|
|
61
|
|
62 (define_predicate "aarch64_simd_register"
|
|
63 (and (match_code "reg")
|
145
|
64 (match_test "FP_REGNUM_P (REGNO (op))")))
|
111
|
65
|
|
66 (define_predicate "aarch64_reg_or_zero"
|
145
|
67 (and (match_code "reg,subreg,const_int,const_double")
|
111
|
68 (ior (match_operand 0 "register_operand")
|
145
|
69 (match_test "op == CONST0_RTX (GET_MODE (op))"))))
|
111
|
70
|
|
71 (define_predicate "aarch64_reg_or_fp_zero"
|
|
72 (ior (match_operand 0 "register_operand")
|
|
73 (and (match_code "const_double")
|
|
74 (match_test "aarch64_float_const_zero_rtx_p (op)"))))
|
|
75
|
131
|
76 (define_predicate "aarch64_reg_zero_or_fp_zero"
|
|
77 (ior (match_operand 0 "aarch64_reg_or_fp_zero")
|
|
78 (match_operand 0 "aarch64_reg_or_zero")))
|
|
79
|
111
|
80 (define_predicate "aarch64_reg_zero_or_m1_or_1"
|
|
81 (and (match_code "reg,subreg,const_int")
|
|
82 (ior (match_operand 0 "register_operand")
|
|
83 (ior (match_test "op == const0_rtx")
|
|
84 (ior (match_test "op == constm1_rtx")
|
|
85 (match_test "op == const1_rtx"))))))
|
|
86
|
|
87 (define_predicate "aarch64_reg_or_orr_imm"
|
|
88 (ior (match_operand 0 "register_operand")
|
|
89 (and (match_code "const_vector")
|
131
|
90 (match_test "aarch64_simd_valid_immediate (op, NULL,
|
|
91 AARCH64_CHECK_ORR)"))))
|
111
|
92
|
|
93 (define_predicate "aarch64_reg_or_bic_imm"
|
|
94 (ior (match_operand 0 "register_operand")
|
|
95 (and (match_code "const_vector")
|
131
|
96 (match_test "aarch64_simd_valid_immediate (op, NULL,
|
|
97 AARCH64_CHECK_BIC)"))))
|
111
|
98
|
|
99 (define_predicate "aarch64_fp_compare_operand"
|
|
100 (ior (match_operand 0 "register_operand")
|
|
101 (and (match_code "const_double")
|
|
102 (match_test "aarch64_float_const_zero_rtx_p (op)"))))
|
|
103
|
|
104 (define_predicate "aarch64_fp_pow2"
|
|
105 (and (match_code "const_double")
|
|
106 (match_test "aarch64_fpconst_pow_of_2 (op) > 0")))
|
|
107
|
145
|
108 (define_predicate "aarch64_fp_pow2_recip"
|
|
109 (and (match_code "const_double")
|
|
110 (match_test "aarch64_fpconst_pow2_recip (op) > 0")))
|
|
111
|
111
|
112 (define_predicate "aarch64_fp_vec_pow2"
|
|
113 (match_test "aarch64_vec_fpconst_pow_of_2 (op) > 0"))
|
|
114
|
131
|
115 (define_predicate "aarch64_sve_cnt_immediate"
|
|
116 (and (match_code "const_poly_int")
|
|
117 (match_test "aarch64_sve_cnt_immediate_p (op)")))
|
|
118
|
111
|
119 (define_predicate "aarch64_sub_immediate"
|
|
120 (and (match_code "const_int")
|
|
121 (match_test "aarch64_uimm12_shift (-INTVAL (op))")))
|
|
122
|
|
123 (define_predicate "aarch64_plus_immediate"
|
|
124 (and (match_code "const_int")
|
|
125 (ior (match_test "aarch64_uimm12_shift (INTVAL (op))")
|
|
126 (match_test "aarch64_uimm12_shift (-INTVAL (op))"))))
|
|
127
|
|
128 (define_predicate "aarch64_plus_operand"
|
|
129 (ior (match_operand 0 "register_operand")
|
|
130 (match_operand 0 "aarch64_plus_immediate")))
|
|
131
|
145
|
132 (define_predicate "aarch64_plushi_immediate"
|
|
133 (match_code "const_int")
|
|
134 {
|
|
135 HOST_WIDE_INT val = INTVAL (op);
|
|
136 /* The HImode value must be zero-extendable to an SImode plus_operand. */
|
|
137 return ((val & 0xfff) == val || sext_hwi (val & 0xf000, 16) == val);
|
|
138 })
|
|
139
|
|
140 (define_predicate "aarch64_plushi_operand"
|
|
141 (ior (match_operand 0 "register_operand")
|
|
142 (match_operand 0 "aarch64_plushi_immediate")))
|
|
143
|
111
|
144 (define_predicate "aarch64_pluslong_immediate"
|
|
145 (and (match_code "const_int")
|
|
146 (match_test "(INTVAL (op) < 0xffffff && INTVAL (op) > -0xffffff)")))
|
|
147
|
|
148 (define_predicate "aarch64_pluslong_strict_immedate"
|
|
149 (and (match_operand 0 "aarch64_pluslong_immediate")
|
|
150 (not (match_operand 0 "aarch64_plus_immediate"))))
|
|
151
|
145
|
152 (define_predicate "aarch64_sve_scalar_inc_dec_immediate"
|
|
153 (and (match_code "const_poly_int")
|
|
154 (match_test "aarch64_sve_scalar_inc_dec_immediate_p (op)")))
|
|
155
|
131
|
156 (define_predicate "aarch64_sve_addvl_addpl_immediate"
|
|
157 (and (match_code "const_poly_int")
|
|
158 (match_test "aarch64_sve_addvl_addpl_immediate_p (op)")))
|
|
159
|
145
|
160 (define_predicate "aarch64_sve_plus_immediate"
|
|
161 (ior (match_operand 0 "aarch64_sve_scalar_inc_dec_immediate")
|
|
162 (match_operand 0 "aarch64_sve_addvl_addpl_immediate")))
|
|
163
|
131
|
164 (define_predicate "aarch64_split_add_offset_immediate"
|
|
165 (and (match_code "const_poly_int")
|
|
166 (match_test "aarch64_add_offset_temporaries (op) == 1")))
|
|
167
|
111
|
168 (define_predicate "aarch64_pluslong_operand"
|
|
169 (ior (match_operand 0 "register_operand")
|
131
|
170 (match_operand 0 "aarch64_pluslong_immediate")
|
145
|
171 (and (match_test "TARGET_SVE")
|
|
172 (match_operand 0 "aarch64_sve_plus_immediate"))))
|
131
|
173
|
|
174 (define_predicate "aarch64_pluslong_or_poly_operand"
|
|
175 (ior (match_operand 0 "aarch64_pluslong_operand")
|
|
176 (match_operand 0 "aarch64_split_add_offset_immediate")))
|
111
|
177
|
|
178 (define_predicate "aarch64_logical_immediate"
|
|
179 (and (match_code "const_int")
|
|
180 (match_test "aarch64_bitmask_imm (INTVAL (op), mode)")))
|
|
181
|
|
182 (define_predicate "aarch64_logical_operand"
|
|
183 (ior (match_operand 0 "register_operand")
|
|
184 (match_operand 0 "aarch64_logical_immediate")))
|
|
185
|
|
186 (define_predicate "aarch64_mov_imm_operand"
|
|
187 (and (match_code "const_int")
|
|
188 (match_test "aarch64_move_imm (INTVAL (op), mode)")))
|
|
189
|
|
190 (define_predicate "aarch64_logical_and_immediate"
|
|
191 (and (match_code "const_int")
|
|
192 (match_test "aarch64_and_bitmask_imm (INTVAL (op), mode)")))
|
|
193
|
|
194 (define_predicate "aarch64_shift_imm_si"
|
|
195 (and (match_code "const_int")
|
|
196 (match_test "(unsigned HOST_WIDE_INT) INTVAL (op) < 32")))
|
|
197
|
|
198 (define_predicate "aarch64_shift_imm_di"
|
|
199 (and (match_code "const_int")
|
|
200 (match_test "(unsigned HOST_WIDE_INT) INTVAL (op) < 64")))
|
|
201
|
|
202 (define_predicate "aarch64_shift_imm64_di"
|
|
203 (and (match_code "const_int")
|
|
204 (match_test "(unsigned HOST_WIDE_INT) INTVAL (op) <= 64")))
|
|
205
|
|
206 (define_predicate "aarch64_reg_or_shift_imm_si"
|
|
207 (ior (match_operand 0 "register_operand")
|
|
208 (match_operand 0 "aarch64_shift_imm_si")))
|
|
209
|
|
210 (define_predicate "aarch64_reg_or_shift_imm_di"
|
|
211 (ior (match_operand 0 "register_operand")
|
|
212 (match_operand 0 "aarch64_shift_imm_di")))
|
|
213
|
|
214 ;; The imm3 field is a 3-bit field that only accepts immediates in the
|
|
215 ;; range 0..4.
|
|
216 (define_predicate "aarch64_imm3"
|
|
217 (and (match_code "const_int")
|
|
218 (match_test "(unsigned HOST_WIDE_INT) INTVAL (op) <= 4")))
|
|
219
|
131
|
220 ;; The imm2 field is a 2-bit field that only accepts immediates in the
|
|
221 ;; range 0..3.
|
|
222 (define_predicate "aarch64_imm2"
|
|
223 (and (match_code "const_int")
|
|
224 (match_test "UINTVAL (op) <= 3")))
|
|
225
|
|
226 ;; The imm3 field is a 3-bit field that only accepts immediates in the
|
|
227 ;; range 0..7.
|
|
228 (define_predicate "aarch64_lane_imm3"
|
|
229 (and (match_code "const_int")
|
|
230 (match_test "UINTVAL (op) <= 7")))
|
|
231
|
111
|
232 ;; An immediate that fits into 24 bits.
|
|
233 (define_predicate "aarch64_imm24"
|
|
234 (and (match_code "const_int")
|
|
235 (match_test "IN_RANGE (UINTVAL (op), 0, 0xffffff)")))
|
|
236
|
|
237 (define_predicate "aarch64_pwr_imm3"
|
|
238 (and (match_code "const_int")
|
|
239 (match_test "INTVAL (op) != 0
|
|
240 && (unsigned) exact_log2 (INTVAL (op)) <= 4")))
|
|
241
|
|
242 (define_predicate "aarch64_pwr_2_si"
|
|
243 (and (match_code "const_int")
|
|
244 (match_test "INTVAL (op) != 0
|
|
245 && (unsigned) exact_log2 (INTVAL (op)) < 32")))
|
|
246
|
|
247 (define_predicate "aarch64_pwr_2_di"
|
|
248 (and (match_code "const_int")
|
|
249 (match_test "INTVAL (op) != 0
|
|
250 && (unsigned) exact_log2 (INTVAL (op)) < 64")))
|
|
251
|
|
252 (define_predicate "aarch64_mem_pair_offset"
|
|
253 (and (match_code "const_int")
|
|
254 (match_test "aarch64_offset_7bit_signed_scaled_p (mode, INTVAL (op))")))
|
|
255
|
|
256 (define_predicate "aarch64_mem_pair_operand"
|
|
257 (and (match_code "mem")
|
131
|
258 (match_test "aarch64_legitimate_address_p (mode, XEXP (op, 0), false,
|
|
259 ADDR_QUERY_LDP_STP)")))
|
|
260
|
|
261 ;; Used for storing two 64-bit values in an AdvSIMD register using an STP
|
|
262 ;; as a 128-bit vec_concat.
|
|
263 (define_predicate "aarch64_mem_pair_lanes_operand"
|
|
264 (and (match_code "mem")
|
|
265 (match_test "aarch64_legitimate_address_p (GET_MODE (op), XEXP (op, 0),
|
|
266 false,
|
|
267 ADDR_QUERY_LDP_STP_N)")))
|
111
|
268
|
|
269 (define_predicate "aarch64_prefetch_operand"
|
|
270 (match_test "aarch64_address_valid_for_prefetch_p (op, false)"))
|
|
271
|
|
272 (define_predicate "aarch64_valid_symref"
|
|
273 (match_code "const, symbol_ref, label_ref")
|
|
274 {
|
|
275 return (aarch64_classify_symbolic_expression (op)
|
|
276 != SYMBOL_FORCE_TO_MEM);
|
|
277 })
|
|
278
|
|
279 (define_predicate "aarch64_tls_ie_symref"
|
|
280 (match_code "const, symbol_ref, label_ref")
|
|
281 {
|
|
282 switch (GET_CODE (op))
|
|
283 {
|
|
284 case CONST:
|
|
285 op = XEXP (op, 0);
|
|
286 if (GET_CODE (op) != PLUS
|
|
287 || GET_CODE (XEXP (op, 0)) != SYMBOL_REF
|
|
288 || GET_CODE (XEXP (op, 1)) != CONST_INT)
|
|
289 return false;
|
|
290 op = XEXP (op, 0);
|
|
291 /* FALLTHRU */
|
|
292
|
|
293 case SYMBOL_REF:
|
|
294 return SYMBOL_REF_TLS_MODEL (op) == TLS_MODEL_INITIAL_EXEC;
|
|
295
|
|
296 default:
|
|
297 gcc_unreachable ();
|
|
298 }
|
|
299 })
|
|
300
|
|
301 (define_predicate "aarch64_tls_le_symref"
|
|
302 (match_code "const, symbol_ref, label_ref")
|
|
303 {
|
|
304 switch (GET_CODE (op))
|
|
305 {
|
|
306 case CONST:
|
|
307 op = XEXP (op, 0);
|
|
308 if (GET_CODE (op) != PLUS
|
|
309 || GET_CODE (XEXP (op, 0)) != SYMBOL_REF
|
|
310 || GET_CODE (XEXP (op, 1)) != CONST_INT)
|
|
311 return false;
|
|
312 op = XEXP (op, 0);
|
|
313 /* FALLTHRU */
|
|
314
|
|
315 case SYMBOL_REF:
|
|
316 return SYMBOL_REF_TLS_MODEL (op) == TLS_MODEL_LOCAL_EXEC;
|
|
317
|
|
318 default:
|
|
319 gcc_unreachable ();
|
|
320 }
|
|
321 })
|
|
322
|
|
323 (define_predicate "aarch64_mov_operand"
|
131
|
324 (and (match_code "reg,subreg,mem,const,const_int,symbol_ref,label_ref,high,
|
|
325 const_poly_int,const_vector")
|
111
|
326 (ior (match_operand 0 "register_operand")
|
|
327 (ior (match_operand 0 "memory_operand")
|
|
328 (match_test "aarch64_mov_operand_p (op, mode)")))))
|
|
329
|
131
|
330 (define_predicate "aarch64_nonmemory_operand"
|
|
331 (and (match_code "reg,subreg,const,const_int,symbol_ref,label_ref,high,
|
|
332 const_poly_int,const_vector")
|
111
|
333 (ior (match_operand 0 "register_operand")
|
131
|
334 (match_test "aarch64_mov_operand_p (op, mode)"))))
|
|
335
|
|
336 (define_predicate "aarch64_movti_operand"
|
|
337 (ior (match_operand 0 "register_operand")
|
|
338 (match_operand 0 "memory_operand")
|
|
339 (and (match_operand 0 "const_scalar_int_operand")
|
|
340 (match_test "aarch64_mov128_immediate (op)"))))
|
111
|
341
|
|
342 (define_predicate "aarch64_reg_or_imm"
|
131
|
343 (ior (match_operand 0 "register_operand")
|
|
344 (match_operand 0 "const_scalar_int_operand")))
|
111
|
345
|
|
346 ;; True for integer comparisons and for FP comparisons other than LTGT or UNEQ.
|
|
347 (define_special_predicate "aarch64_comparison_operator"
|
|
348 (match_code "eq,ne,le,lt,ge,gt,geu,gtu,leu,ltu,unordered,
|
|
349 ordered,unlt,unle,unge,ungt"))
|
|
350
|
|
351 ;; Same as aarch64_comparison_operator but don't ignore the mode.
|
|
352 ;; RTL SET operations require their operands source and destination have
|
|
353 ;; the same modes, so we can't ignore the modes there. See PR target/69161.
|
|
354 (define_predicate "aarch64_comparison_operator_mode"
|
|
355 (match_code "eq,ne,le,lt,ge,gt,geu,gtu,leu,ltu,unordered,
|
|
356 ordered,unlt,unle,unge,ungt"))
|
|
357
|
|
358 (define_special_predicate "aarch64_comparison_operation"
|
|
359 (match_code "eq,ne,le,lt,ge,gt,geu,gtu,leu,ltu,unordered,
|
|
360 ordered,unlt,unle,unge,ungt")
|
|
361 {
|
|
362 if (XEXP (op, 1) != const0_rtx)
|
|
363 return false;
|
|
364 rtx op0 = XEXP (op, 0);
|
|
365 if (!REG_P (op0) || REGNO (op0) != CC_REGNUM)
|
|
366 return false;
|
|
367 return aarch64_get_condition_code (op) >= 0;
|
|
368 })
|
|
369
|
131
|
370 (define_special_predicate "aarch64_equality_operator"
|
|
371 (match_code "eq,ne"))
|
|
372
|
111
|
373 (define_special_predicate "aarch64_carry_operation"
|
145
|
374 (match_code "ltu,geu")
|
111
|
375 {
|
|
376 if (XEXP (op, 1) != const0_rtx)
|
|
377 return false;
|
|
378 rtx op0 = XEXP (op, 0);
|
145
|
379 if (!REG_P (op0) || REGNO (op0) != CC_REGNUM)
|
|
380 return false;
|
|
381 machine_mode ccmode = GET_MODE (op0);
|
|
382 if (ccmode == CC_Cmode)
|
|
383 return GET_CODE (op) == LTU;
|
|
384 if (ccmode == CC_ADCmode || ccmode == CCmode)
|
|
385 return GET_CODE (op) == GEU;
|
|
386 return false;
|
111
|
387 })
|
|
388
|
145
|
389 ; borrow is essentially the inverse of carry since the sense of the C flag
|
|
390 ; is inverted during subtraction. See the note in aarch64-modes.def.
|
111
|
391 (define_special_predicate "aarch64_borrow_operation"
|
145
|
392 (match_code "geu,ltu")
|
111
|
393 {
|
|
394 if (XEXP (op, 1) != const0_rtx)
|
|
395 return false;
|
|
396 rtx op0 = XEXP (op, 0);
|
145
|
397 if (!REG_P (op0) || REGNO (op0) != CC_REGNUM)
|
|
398 return false;
|
|
399 machine_mode ccmode = GET_MODE (op0);
|
|
400 if (ccmode == CC_Cmode)
|
|
401 return GET_CODE (op) == GEU;
|
|
402 if (ccmode == CC_ADCmode || ccmode == CCmode)
|
|
403 return GET_CODE (op) == LTU;
|
|
404 return false;
|
111
|
405 })
|
|
406
|
|
407 ;; True if the operand is memory reference suitable for a load/store exclusive.
|
|
408 (define_predicate "aarch64_sync_memory_operand"
|
|
409 (and (match_operand 0 "memory_operand")
|
|
410 (match_code "reg" "0")))
|
|
411
|
131
|
412 (define_predicate "aarch64_9bit_offset_memory_operand"
|
|
413 (and (match_operand 0 "memory_operand")
|
|
414 (ior (match_code "reg" "0")
|
|
415 (and (match_code "plus" "0")
|
|
416 (match_code "reg" "00")
|
|
417 (match_code "const_int" "01"))))
|
|
418 {
|
|
419 rtx mem_op = XEXP (op, 0);
|
|
420
|
|
421 if (REG_P (mem_op))
|
|
422 return GET_MODE (mem_op) == DImode;
|
|
423
|
|
424 rtx plus_op0 = XEXP (mem_op, 0);
|
|
425 rtx plus_op1 = XEXP (mem_op, 1);
|
|
426
|
|
427 if (GET_MODE (plus_op0) != DImode)
|
|
428 return false;
|
|
429
|
|
430 poly_int64 offset;
|
|
431 if (!poly_int_rtx_p (plus_op1, &offset))
|
|
432 gcc_unreachable ();
|
|
433
|
|
434 return aarch64_offset_9bit_signed_unscaled_p (mode, offset);
|
|
435 })
|
|
436
|
|
437 (define_predicate "aarch64_rcpc_memory_operand"
|
|
438 (if_then_else (match_test "AARCH64_ISA_RCPC8_4")
|
|
439 (match_operand 0 "aarch64_9bit_offset_memory_operand")
|
|
440 (match_operand 0 "aarch64_sync_memory_operand")))
|
|
441
|
111
|
442 ;; Predicates for parallel expanders based on mode.
|
|
443 (define_special_predicate "vect_par_cnst_hi_half"
|
|
444 (match_code "parallel")
|
|
445 {
|
|
446 return aarch64_simd_check_vect_par_cnst_half (op, mode, true);
|
|
447 })
|
|
448
|
|
449 (define_special_predicate "vect_par_cnst_lo_half"
|
|
450 (match_code "parallel")
|
|
451 {
|
|
452 return aarch64_simd_check_vect_par_cnst_half (op, mode, false);
|
|
453 })
|
|
454
|
145
|
455 (define_predicate "descending_int_parallel"
|
|
456 (match_code "parallel")
|
|
457 {
|
|
458 return aarch64_stepped_int_parallel_p (op, -1);
|
|
459 })
|
|
460
|
|
461 (define_predicate "ascending_int_parallel"
|
|
462 (match_code "parallel")
|
|
463 {
|
|
464 return aarch64_stepped_int_parallel_p (op, 1);
|
|
465 })
|
|
466
|
111
|
467 (define_special_predicate "aarch64_simd_lshift_imm"
|
131
|
468 (match_code "const,const_vector")
|
111
|
469 {
|
|
470 return aarch64_simd_shift_imm_p (op, mode, true);
|
|
471 })
|
|
472
|
|
473 (define_special_predicate "aarch64_simd_rshift_imm"
|
131
|
474 (match_code "const,const_vector")
|
111
|
475 {
|
|
476 return aarch64_simd_shift_imm_p (op, mode, false);
|
|
477 })
|
|
478
|
131
|
479 (define_predicate "aarch64_simd_imm_zero"
|
|
480 (and (match_code "const,const_vector")
|
|
481 (match_test "op == CONST0_RTX (GET_MODE (op))")))
|
|
482
|
145
|
483 (define_predicate "aarch64_simd_imm_one"
|
|
484 (and (match_code "const_vector")
|
|
485 (match_test "op == CONST1_RTX (GET_MODE (op))")))
|
|
486
|
131
|
487 (define_predicate "aarch64_simd_or_scalar_imm_zero"
|
|
488 (and (match_code "const_int,const_double,const,const_vector")
|
|
489 (match_test "op == CONST0_RTX (GET_MODE (op))")))
|
|
490
|
|
491 (define_predicate "aarch64_simd_imm_minus_one"
|
|
492 (and (match_code "const,const_vector")
|
|
493 (match_test "op == CONSTM1_RTX (GET_MODE (op))")))
|
|
494
|
111
|
495 (define_predicate "aarch64_simd_reg_or_zero"
|
131
|
496 (and (match_code "reg,subreg,const_int,const_double,const,const_vector")
|
111
|
497 (ior (match_operand 0 "register_operand")
|
131
|
498 (match_test "op == const0_rtx")
|
|
499 (match_operand 0 "aarch64_simd_or_scalar_imm_zero"))))
|
111
|
500
|
145
|
501 (define_predicate "aarch64_simd_reg_or_minus_one"
|
|
502 (ior (match_operand 0 "register_operand")
|
|
503 (match_operand 0 "aarch64_simd_imm_minus_one")))
|
|
504
|
111
|
505 (define_predicate "aarch64_simd_struct_operand"
|
|
506 (and (match_code "mem")
|
|
507 (match_test "TARGET_SIMD && aarch64_simd_mem_operand_p (op)")))
|
|
508
|
|
509 ;; Like general_operand but allow only valid SIMD addressing modes.
|
|
510 (define_predicate "aarch64_simd_general_operand"
|
|
511 (and (match_operand 0 "general_operand")
|
|
512 (match_test "!MEM_P (op)
|
|
513 || GET_CODE (XEXP (op, 0)) == POST_INC
|
|
514 || GET_CODE (XEXP (op, 0)) == REG")))
|
|
515
|
|
516 ;; Like nonimmediate_operand but allow only valid SIMD addressing modes.
|
|
517 (define_predicate "aarch64_simd_nonimmediate_operand"
|
|
518 (and (match_operand 0 "nonimmediate_operand")
|
|
519 (match_test "!MEM_P (op)
|
|
520 || GET_CODE (XEXP (op, 0)) == POST_INC
|
|
521 || GET_CODE (XEXP (op, 0)) == REG")))
|
|
522
|
|
523 ;; Predicates used by the various SIMD shift operations. These
|
|
524 ;; fall in to 3 categories.
|
|
525 ;; Shifts with a range 0-(bit_size - 1) (aarch64_simd_shift_imm)
|
|
526 ;; Shifts with a range 1-bit_size (aarch64_simd_shift_imm_offset)
|
|
527 ;; Shifts with a range 0-bit_size (aarch64_simd_shift_imm_bitsize)
|
|
528 (define_predicate "aarch64_simd_shift_imm_qi"
|
|
529 (and (match_code "const_int")
|
|
530 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
|
|
531
|
|
532 (define_predicate "aarch64_simd_shift_imm_hi"
|
|
533 (and (match_code "const_int")
|
|
534 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
|
|
535
|
|
536 (define_predicate "aarch64_simd_shift_imm_si"
|
|
537 (and (match_code "const_int")
|
|
538 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
|
|
539
|
|
540 (define_predicate "aarch64_simd_shift_imm_di"
|
|
541 (and (match_code "const_int")
|
|
542 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
|
|
543
|
|
544 (define_predicate "aarch64_simd_shift_imm_offset_qi"
|
|
545 (and (match_code "const_int")
|
|
546 (match_test "IN_RANGE (INTVAL (op), 1, 8)")))
|
|
547
|
|
548 (define_predicate "aarch64_simd_shift_imm_offset_hi"
|
|
549 (and (match_code "const_int")
|
|
550 (match_test "IN_RANGE (INTVAL (op), 1, 16)")))
|
|
551
|
|
552 (define_predicate "aarch64_simd_shift_imm_offset_si"
|
|
553 (and (match_code "const_int")
|
|
554 (match_test "IN_RANGE (INTVAL (op), 1, 32)")))
|
|
555
|
|
556 (define_predicate "aarch64_simd_shift_imm_offset_di"
|
|
557 (and (match_code "const_int")
|
|
558 (match_test "IN_RANGE (INTVAL (op), 1, 64)")))
|
|
559
|
|
560 (define_predicate "aarch64_simd_shift_imm_bitsize_qi"
|
|
561 (and (match_code "const_int")
|
|
562 (match_test "IN_RANGE (INTVAL (op), 0, 8)")))
|
|
563
|
|
564 (define_predicate "aarch64_simd_shift_imm_bitsize_hi"
|
|
565 (and (match_code "const_int")
|
|
566 (match_test "IN_RANGE (INTVAL (op), 0, 16)")))
|
|
567
|
|
568 (define_predicate "aarch64_simd_shift_imm_bitsize_si"
|
|
569 (and (match_code "const_int")
|
|
570 (match_test "IN_RANGE (INTVAL (op), 0, 32)")))
|
|
571
|
|
572 (define_predicate "aarch64_simd_shift_imm_bitsize_di"
|
|
573 (and (match_code "const_int")
|
|
574 (match_test "IN_RANGE (INTVAL (op), 0, 64)")))
|
|
575
|
|
576 (define_predicate "aarch64_constant_pool_symref"
|
|
577 (and (match_code "symbol_ref")
|
|
578 (match_test "CONSTANT_POOL_ADDRESS_P (op)")))
|
131
|
579
|
|
580 (define_predicate "aarch64_constant_vector_operand"
|
|
581 (match_code "const,const_vector"))
|
|
582
|
|
583 (define_predicate "aarch64_sve_ld1r_operand"
|
|
584 (and (match_operand 0 "memory_operand")
|
|
585 (match_test "aarch64_sve_ld1r_operand_p (op)")))
|
|
586
|
145
|
587 (define_predicate "aarch64_sve_ld1rq_operand"
|
|
588 (and (match_code "mem")
|
|
589 (match_test "aarch64_sve_ld1rq_operand_p (op)")))
|
|
590
|
|
591 (define_predicate "aarch64_sve_ld1ro_operand_b"
|
|
592 (and (match_code "mem")
|
|
593 (match_test "aarch64_sve_ld1ro_operand_p (op, QImode)")))
|
|
594
|
|
595 (define_predicate "aarch64_sve_ld1ro_operand_h"
|
|
596 (and (match_code "mem")
|
|
597 (match_test "aarch64_sve_ld1ro_operand_p (op, HImode)")))
|
|
598
|
|
599 (define_predicate "aarch64_sve_ld1ro_operand_w"
|
|
600 (and (match_code "mem")
|
|
601 (match_test "aarch64_sve_ld1ro_operand_p (op, SImode)")))
|
|
602
|
|
603 (define_predicate "aarch64_sve_ld1ro_operand_d"
|
|
604 (and (match_code "mem")
|
|
605 (match_test "aarch64_sve_ld1ro_operand_p (op, DImode)")))
|
|
606
|
|
607 (define_predicate "aarch64_sve_ldff1_operand"
|
|
608 (and (match_code "mem")
|
|
609 (match_test "aarch64_sve_ldff1_operand_p (op)")))
|
|
610
|
|
611 (define_predicate "aarch64_sve_ldnf1_operand"
|
|
612 (and (match_code "mem")
|
|
613 (match_test "aarch64_sve_ldnf1_operand_p (op)")))
|
|
614
|
131
|
615 ;; Like memory_operand, but restricted to addresses that are valid for
|
|
616 ;; SVE LDR and STR instructions.
|
|
617 (define_predicate "aarch64_sve_ldr_operand"
|
|
618 (and (match_code "mem")
|
|
619 (match_test "aarch64_sve_ldr_operand_p (op)")))
|
|
620
|
145
|
621 (define_special_predicate "aarch64_sve_prefetch_operand"
|
|
622 (and (match_code "reg, plus")
|
|
623 (match_test "aarch64_sve_prefetch_operand_p (op, mode)")))
|
|
624
|
131
|
625 (define_predicate "aarch64_sve_nonimmediate_operand"
|
|
626 (ior (match_operand 0 "register_operand")
|
|
627 (match_operand 0 "aarch64_sve_ldr_operand")))
|
|
628
|
|
629 (define_predicate "aarch64_sve_general_operand"
|
|
630 (and (match_code "reg,subreg,mem,const,const_vector")
|
|
631 (ior (match_operand 0 "register_operand")
|
|
632 (match_operand 0 "aarch64_sve_ldr_operand")
|
|
633 (match_test "aarch64_mov_operand_p (op, mode)"))))
|
|
634
|
|
635 (define_predicate "aarch64_sve_struct_memory_operand"
|
|
636 (and (match_code "mem")
|
|
637 (match_test "aarch64_sve_struct_memory_operand_p (op)")))
|
|
638
|
|
639 (define_predicate "aarch64_sve_struct_nonimmediate_operand"
|
|
640 (ior (match_operand 0 "register_operand")
|
|
641 (match_operand 0 "aarch64_sve_struct_memory_operand")))
|
|
642
|
|
643 ;; Doesn't include immediates, since those are handled by the move
|
|
644 ;; patterns instead.
|
|
645 (define_predicate "aarch64_sve_dup_operand"
|
|
646 (ior (match_operand 0 "register_operand")
|
|
647 (match_operand 0 "aarch64_sve_ld1r_operand")))
|
|
648
|
145
|
649 (define_predicate "aarch64_sve_ptrue_svpattern_immediate"
|
|
650 (and (match_code "const")
|
|
651 (match_test "aarch64_sve_ptrue_svpattern_p (op, NULL)")))
|
|
652
|
131
|
653 (define_predicate "aarch64_sve_arith_immediate"
|
|
654 (and (match_code "const,const_vector")
|
145
|
655 (match_test "aarch64_sve_arith_immediate_p (mode, op, false)")))
|
131
|
656
|
|
657 (define_predicate "aarch64_sve_sub_arith_immediate"
|
|
658 (and (match_code "const,const_vector")
|
145
|
659 (match_test "aarch64_sve_arith_immediate_p (mode, op, true)")))
|
|
660
|
|
661 (define_predicate "aarch64_sve_qadd_immediate"
|
|
662 (and (match_code "const,const_vector")
|
|
663 (match_test "aarch64_sve_sqadd_sqsub_immediate_p (mode, op, false)")))
|
|
664
|
|
665 (define_predicate "aarch64_sve_qsub_immediate"
|
|
666 (and (match_code "const,const_vector")
|
|
667 (match_test "aarch64_sve_sqadd_sqsub_immediate_p (mode, op, true)")))
|
|
668
|
|
669 (define_predicate "aarch64_sve_vector_inc_dec_immediate"
|
|
670 (and (match_code "const,const_vector")
|
|
671 (match_test "aarch64_sve_vector_inc_dec_immediate_p (op)")))
|
|
672
|
|
673 (define_predicate "aarch64_sve_gather_immediate_b"
|
|
674 (and (match_code "const_int")
|
|
675 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
|
|
676
|
|
677 (define_predicate "aarch64_sve_gather_immediate_h"
|
|
678 (and (match_code "const_int")
|
|
679 (match_test "IN_RANGE (INTVAL (op), 0, 62)")
|
|
680 (match_test "(INTVAL (op) & 1) == 0")))
|
131
|
681
|
145
|
682 (define_predicate "aarch64_sve_gather_immediate_w"
|
|
683 (and (match_code "const_int")
|
|
684 (match_test "IN_RANGE (INTVAL (op), 0, 124)")
|
|
685 (match_test "(INTVAL (op) & 3) == 0")))
|
|
686
|
|
687 (define_predicate "aarch64_sve_gather_immediate_d"
|
|
688 (and (match_code "const_int")
|
|
689 (match_test "IN_RANGE (INTVAL (op), 0, 248)")
|
|
690 (match_test "(INTVAL (op) & 7) == 0")))
|
|
691
|
|
692 (define_predicate "aarch64_sve_uxtb_immediate"
|
|
693 (and (match_code "const_vector")
|
|
694 (match_test "GET_MODE_UNIT_BITSIZE (GET_MODE (op)) > 8")
|
|
695 (match_test "aarch64_const_vec_all_same_int_p (op, 0xff)")))
|
|
696
|
|
697 (define_predicate "aarch64_sve_uxth_immediate"
|
|
698 (and (match_code "const_vector")
|
|
699 (match_test "GET_MODE_UNIT_BITSIZE (GET_MODE (op)) > 16")
|
|
700 (match_test "aarch64_const_vec_all_same_int_p (op, 0xffff)")))
|
|
701
|
|
702 (define_predicate "aarch64_sve_uxtw_immediate"
|
|
703 (and (match_code "const_vector")
|
|
704 (match_test "GET_MODE_UNIT_BITSIZE (GET_MODE (op)) > 32")
|
|
705 (match_test "aarch64_const_vec_all_same_int_p (op, 0xffffffff)")))
|
|
706
|
|
707 (define_predicate "aarch64_sve_uxt_immediate"
|
|
708 (ior (match_operand 0 "aarch64_sve_uxtb_immediate")
|
|
709 (match_operand 0 "aarch64_sve_uxth_immediate")
|
|
710 (match_operand 0 "aarch64_sve_uxtw_immediate")))
|
131
|
711
|
|
712 (define_predicate "aarch64_sve_logical_immediate"
|
|
713 (and (match_code "const,const_vector")
|
|
714 (match_test "aarch64_sve_bitmask_immediate_p (op)")))
|
|
715
|
145
|
716 ;; Used for SVE UMAX and UMIN.
|
|
717 (define_predicate "aarch64_sve_vsb_immediate"
|
|
718 (and (match_code "const_vector")
|
|
719 (match_test "GET_MODE_INNER (GET_MODE (op)) == QImode
|
|
720 ? aarch64_const_vec_all_same_in_range_p (op, -128, 127)
|
|
721 : aarch64_const_vec_all_same_in_range_p (op, 0, 255)")))
|
|
722
|
|
723 ;; Used for SVE MUL, SMAX and SMIN.
|
|
724 (define_predicate "aarch64_sve_vsm_immediate"
|
131
|
725 (and (match_code "const,const_vector")
|
|
726 (match_test "aarch64_const_vec_all_same_in_range_p (op, -128, 127)")))
|
|
727
|
|
728 (define_predicate "aarch64_sve_dup_immediate"
|
|
729 (and (match_code "const,const_vector")
|
145
|
730 (ior (match_test "aarch64_sve_dup_immediate_p (op)")
|
|
731 (match_test "aarch64_float_const_representable_p (op)"))))
|
131
|
732
|
|
733 (define_predicate "aarch64_sve_cmp_vsc_immediate"
|
145
|
734 (and (match_code "const_int,const_vector")
|
131
|
735 (match_test "aarch64_sve_cmp_immediate_p (op, true)")))
|
|
736
|
|
737 (define_predicate "aarch64_sve_cmp_vsd_immediate"
|
145
|
738 (and (match_code "const_int,const_vector")
|
131
|
739 (match_test "aarch64_sve_cmp_immediate_p (op, false)")))
|
|
740
|
|
741 (define_predicate "aarch64_sve_index_immediate"
|
|
742 (and (match_code "const_int")
|
|
743 (match_test "aarch64_sve_index_immediate_p (op)")))
|
|
744
|
|
745 (define_predicate "aarch64_sve_float_arith_immediate"
|
|
746 (and (match_code "const,const_vector")
|
|
747 (match_test "aarch64_sve_float_arith_immediate_p (op, false)")))
|
|
748
|
145
|
749 (define_predicate "aarch64_sve_float_negated_arith_immediate"
|
131
|
750 (and (match_code "const,const_vector")
|
|
751 (match_test "aarch64_sve_float_arith_immediate_p (op, true)")))
|
|
752
|
145
|
753 (define_predicate "aarch64_sve_float_arith_with_sub_immediate"
|
|
754 (ior (match_operand 0 "aarch64_sve_float_arith_immediate")
|
|
755 (match_operand 0 "aarch64_sve_float_negated_arith_immediate")))
|
|
756
|
131
|
757 (define_predicate "aarch64_sve_float_mul_immediate"
|
|
758 (and (match_code "const,const_vector")
|
|
759 (match_test "aarch64_sve_float_mul_immediate_p (op)")))
|
|
760
|
145
|
761 (define_predicate "aarch64_sve_float_maxmin_immediate"
|
|
762 (and (match_code "const_vector")
|
|
763 (ior (match_test "op == CONST0_RTX (GET_MODE (op))")
|
|
764 (match_test "op == CONST1_RTX (GET_MODE (op))"))))
|
|
765
|
131
|
766 (define_predicate "aarch64_sve_arith_operand"
|
|
767 (ior (match_operand 0 "register_operand")
|
|
768 (match_operand 0 "aarch64_sve_arith_immediate")))
|
|
769
|
|
770 (define_predicate "aarch64_sve_add_operand"
|
|
771 (ior (match_operand 0 "aarch64_sve_arith_operand")
|
|
772 (match_operand 0 "aarch64_sve_sub_arith_immediate")
|
145
|
773 (match_operand 0 "aarch64_sve_vector_inc_dec_immediate")))
|
|
774
|
|
775 (define_predicate "aarch64_sve_sqadd_operand"
|
|
776 (ior (match_operand 0 "register_operand")
|
|
777 (match_operand 0 "aarch64_sve_qadd_immediate")
|
|
778 (match_operand 0 "aarch64_sve_qsub_immediate")))
|
|
779
|
|
780 (define_predicate "aarch64_sve_pred_and_operand"
|
|
781 (ior (match_operand 0 "register_operand")
|
|
782 (match_operand 0 "aarch64_sve_uxt_immediate")))
|
131
|
783
|
|
784 (define_predicate "aarch64_sve_logical_operand"
|
|
785 (ior (match_operand 0 "register_operand")
|
|
786 (match_operand 0 "aarch64_sve_logical_immediate")))
|
|
787
|
145
|
788 (define_predicate "aarch64_sve_gather_offset_b"
|
|
789 (ior (match_operand 0 "register_operand")
|
|
790 (match_operand 0 "aarch64_sve_gather_immediate_b")))
|
|
791
|
|
792 (define_predicate "aarch64_sve_gather_offset_h"
|
|
793 (ior (match_operand 0 "register_operand")
|
|
794 (match_operand 0 "aarch64_sve_gather_immediate_h")))
|
|
795
|
|
796 (define_predicate "aarch64_sve_gather_offset_w"
|
|
797 (ior (match_operand 0 "register_operand")
|
|
798 (match_operand 0 "aarch64_sve_gather_immediate_w")))
|
|
799
|
|
800 (define_predicate "aarch64_sve_gather_offset_d"
|
|
801 (ior (match_operand 0 "register_operand")
|
|
802 (match_operand 0 "aarch64_sve_gather_immediate_d")))
|
|
803
|
131
|
804 (define_predicate "aarch64_sve_lshift_operand"
|
|
805 (ior (match_operand 0 "register_operand")
|
|
806 (match_operand 0 "aarch64_simd_lshift_imm")))
|
|
807
|
|
808 (define_predicate "aarch64_sve_rshift_operand"
|
|
809 (ior (match_operand 0 "register_operand")
|
|
810 (match_operand 0 "aarch64_simd_rshift_imm")))
|
|
811
|
145
|
812 (define_predicate "aarch64_sve_vsb_operand"
|
|
813 (ior (match_operand 0 "register_operand")
|
|
814 (match_operand 0 "aarch64_sve_vsb_immediate")))
|
|
815
|
|
816 (define_predicate "aarch64_sve_vsm_operand"
|
131
|
817 (ior (match_operand 0 "register_operand")
|
145
|
818 (match_operand 0 "aarch64_sve_vsm_immediate")))
|
|
819
|
|
820 (define_predicate "aarch64_sve_reg_or_dup_imm"
|
|
821 (ior (match_operand 0 "register_operand")
|
|
822 (match_operand 0 "aarch64_sve_dup_immediate")))
|
131
|
823
|
|
824 (define_predicate "aarch64_sve_cmp_vsc_operand"
|
|
825 (ior (match_operand 0 "register_operand")
|
|
826 (match_operand 0 "aarch64_sve_cmp_vsc_immediate")))
|
|
827
|
|
828 (define_predicate "aarch64_sve_cmp_vsd_operand"
|
|
829 (ior (match_operand 0 "register_operand")
|
|
830 (match_operand 0 "aarch64_sve_cmp_vsd_immediate")))
|
|
831
|
|
832 (define_predicate "aarch64_sve_index_operand"
|
|
833 (ior (match_operand 0 "register_operand")
|
|
834 (match_operand 0 "aarch64_sve_index_immediate")))
|
|
835
|
|
836 (define_predicate "aarch64_sve_float_arith_operand"
|
|
837 (ior (match_operand 0 "register_operand")
|
|
838 (match_operand 0 "aarch64_sve_float_arith_immediate")))
|
|
839
|
|
840 (define_predicate "aarch64_sve_float_arith_with_sub_operand"
|
145
|
841 (ior (match_operand 0 "register_operand")
|
131
|
842 (match_operand 0 "aarch64_sve_float_arith_with_sub_immediate")))
|
|
843
|
|
844 (define_predicate "aarch64_sve_float_mul_operand"
|
|
845 (ior (match_operand 0 "register_operand")
|
|
846 (match_operand 0 "aarch64_sve_float_mul_immediate")))
|
|
847
|
145
|
848 (define_predicate "aarch64_sve_float_maxmin_operand"
|
|
849 (ior (match_operand 0 "register_operand")
|
|
850 (match_operand 0 "aarch64_sve_float_maxmin_immediate")))
|
|
851
|
131
|
852 (define_predicate "aarch64_sve_vec_perm_operand"
|
|
853 (ior (match_operand 0 "register_operand")
|
|
854 (match_operand 0 "aarch64_constant_vector_operand")))
|
|
855
|
145
|
856 (define_predicate "aarch64_sve_ptrue_flag"
|
|
857 (and (match_code "const_int")
|
|
858 (ior (match_test "INTVAL (op) == SVE_MAYBE_NOT_PTRUE")
|
|
859 (match_test "INTVAL (op) == SVE_KNOWN_PTRUE"))))
|
|
860
|
|
861 (define_predicate "aarch64_sve_gp_strictness"
|
|
862 (and (match_code "const_int")
|
|
863 (ior (match_test "INTVAL (op) == SVE_RELAXED_GP")
|
|
864 (match_test "INTVAL (op) == SVE_STRICT_GP"))))
|
|
865
|
|
866 (define_predicate "aarch64_gather_scale_operand_b"
|
|
867 (and (match_code "const_int")
|
|
868 (match_test "INTVAL (op) == 1")))
|
|
869
|
|
870 (define_predicate "aarch64_gather_scale_operand_h"
|
|
871 (and (match_code "const_int")
|
|
872 (match_test "INTVAL (op) == 1 || INTVAL (op) == 2")))
|
|
873
|
131
|
874 (define_predicate "aarch64_gather_scale_operand_w"
|
|
875 (and (match_code "const_int")
|
|
876 (match_test "INTVAL (op) == 1 || INTVAL (op) == 4")))
|
|
877
|
|
878 (define_predicate "aarch64_gather_scale_operand_d"
|
|
879 (and (match_code "const_int")
|
|
880 (match_test "INTVAL (op) == 1 || INTVAL (op) == 8")))
|
|
881
|
|
882 ;; A special predicate that doesn't match a particular mode.
|
|
883 (define_special_predicate "aarch64_any_register_operand"
|
|
884 (match_code "reg"))
|
|
885
|
|
886 (define_predicate "aarch64_sve_any_binary_operator"
|
|
887 (match_code "plus,minus,mult,div,udiv,smax,umax,smin,umin,and,ior,xor"))
|
145
|
888
|
|
889 (define_predicate "aarch64_bytes_per_sve_vector_operand"
|
|
890 (and (match_code "const_int,const_poly_int")
|
|
891 (match_test "known_eq (wi::to_poly_wide (op, mode),
|
|
892 BYTES_PER_SVE_VECTOR)")))
|
|
893
|
|
894 (define_predicate "aarch64_memtag_tag_offset"
|
|
895 (and (match_code "const_int")
|
|
896 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
|
|
897
|
|
898 (define_predicate "aarch64_granule16_uimm6"
|
|
899 (and (match_code "const_int")
|
|
900 (match_test "IN_RANGE (INTVAL (op), 0, 1008)
|
|
901 && !(INTVAL (op) & 0xf)")))
|
|
902
|
|
903 (define_predicate "aarch64_granule16_simm9"
|
|
904 (and (match_code "const_int")
|
|
905 (match_test "IN_RANGE (INTVAL (op), -4096, 4080)
|
|
906 && !(INTVAL (op) & 0xf)")))
|