comparison gcc/config/i386/predicates.md @ 0:a06113de4d67

first commit
author kent <kent@cr.ie.u-ryukyu.ac.jp>
date Fri, 17 Jul 2009 14:47:48 +0900
parents
children 3bfb6c00c1e0
comparison
equal deleted inserted replaced
-1:000000000000 0:a06113de4d67
1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 ;; Free Software Foundation, Inc.
4 ;;
5 ;; This file is part of GCC.
6 ;;
7 ;; GCC is free software; you can redistribute it and/or modify
8 ;; it under the terms of the GNU General Public License as published by
9 ;; the Free Software Foundation; either version 3, or (at your option)
10 ;; any later version.
11 ;;
12 ;; GCC is distributed in the hope that it will be useful,
13 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
14 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 ;; GNU General Public License for more details.
16 ;;
17 ;; You should have received a copy of the GNU General Public License
18 ;; along with GCC; see the file COPYING3. If not see
19 ;; <http://www.gnu.org/licenses/>.
20
21 ;; Return nonzero if OP is either a i387 or SSE fp register.
22 (define_predicate "any_fp_register_operand"
23 (and (match_code "reg")
24 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
25
26 ;; Return nonzero if OP is an i387 fp register.
27 (define_predicate "fp_register_operand"
28 (and (match_code "reg")
29 (match_test "FP_REGNO_P (REGNO (op))")))
30
31 ;; Return nonzero if OP is a non-fp register_operand.
32 (define_predicate "register_and_not_any_fp_reg_operand"
33 (and (match_code "reg")
34 (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
35
36 ;; Return nonzero if OP is a register operand other than an i387 fp register.
37 (define_predicate "register_and_not_fp_reg_operand"
38 (and (match_code "reg")
39 (not (match_test "FP_REGNO_P (REGNO (op))"))))
40
41 ;; True if the operand is an MMX register.
42 (define_predicate "mmx_reg_operand"
43 (and (match_code "reg")
44 (match_test "MMX_REGNO_P (REGNO (op))")))
45
46 ;; True if the operand is a Q_REGS class register.
47 (define_predicate "q_regs_operand"
48 (match_operand 0 "register_operand")
49 {
50 if (GET_CODE (op) == SUBREG)
51 op = SUBREG_REG (op);
52 return ANY_QI_REG_P (op);
53 })
54
55 ;; Match an SI or HImode register for a zero_extract.
56 (define_special_predicate "ext_register_operand"
57 (match_operand 0 "register_operand")
58 {
59 if ((!TARGET_64BIT || GET_MODE (op) != DImode)
60 && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
61 return 0;
62 if (GET_CODE (op) == SUBREG)
63 op = SUBREG_REG (op);
64
65 /* Be careful to accept only registers having upper parts. */
66 return REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) < 4;
67 })
68
69 ;; Return true if op is the AX register.
70 (define_predicate "ax_reg_operand"
71 (and (match_code "reg")
72 (match_test "REGNO (op) == 0")))
73
74 ;; Return true if op is the flags register.
75 (define_predicate "flags_reg_operand"
76 (and (match_code "reg")
77 (match_test "REGNO (op) == FLAGS_REG")))
78
79 ;; Return true if op is not xmm0 register.
80 (define_predicate "reg_not_xmm0_operand"
81 (and (match_operand 0 "register_operand")
82 (match_test "GET_CODE (op) != REG
83 || REGNO (op) != FIRST_SSE_REG")))
84
85 ;; As above, but allow nonimmediate operands.
86 (define_predicate "nonimm_not_xmm0_operand"
87 (and (match_operand 0 "nonimmediate_operand")
88 (match_test "GET_CODE (op) != REG
89 || REGNO (op) != FIRST_SSE_REG")))
90
91 ;; Return 1 if VALUE can be stored in a sign extended immediate field.
92 (define_predicate "x86_64_immediate_operand"
93 (match_code "const_int,symbol_ref,label_ref,const")
94 {
95 if (!TARGET_64BIT)
96 return immediate_operand (op, mode);
97
98 switch (GET_CODE (op))
99 {
100 case CONST_INT:
101 /* CONST_DOUBLES never match, since HOST_BITS_PER_WIDE_INT is known
102 to be at least 32 and this all acceptable constants are
103 represented as CONST_INT. */
104 if (HOST_BITS_PER_WIDE_INT == 32)
105 return 1;
106 else
107 {
108 HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
109 return trunc_int_for_mode (val, SImode) == val;
110 }
111 break;
112
113 case SYMBOL_REF:
114 /* For certain code models, the symbolic references are known to fit.
115 in CM_SMALL_PIC model we know it fits if it is local to the shared
116 library. Don't count TLS SYMBOL_REFs here, since they should fit
117 only if inside of UNSPEC handled below. */
118 /* TLS symbols are not constant. */
119 if (SYMBOL_REF_TLS_MODEL (op))
120 return false;
121 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
122 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
123
124 case LABEL_REF:
125 /* For certain code models, the code is near as well. */
126 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
127 || ix86_cmodel == CM_KERNEL);
128
129 case CONST:
130 /* We also may accept the offsetted memory references in certain
131 special cases. */
132 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
133 switch (XINT (XEXP (op, 0), 1))
134 {
135 case UNSPEC_GOTPCREL:
136 case UNSPEC_DTPOFF:
137 case UNSPEC_GOTNTPOFF:
138 case UNSPEC_NTPOFF:
139 return 1;
140 default:
141 break;
142 }
143
144 if (GET_CODE (XEXP (op, 0)) == PLUS)
145 {
146 rtx op1 = XEXP (XEXP (op, 0), 0);
147 rtx op2 = XEXP (XEXP (op, 0), 1);
148 HOST_WIDE_INT offset;
149
150 if (ix86_cmodel == CM_LARGE)
151 return 0;
152 if (!CONST_INT_P (op2))
153 return 0;
154 offset = trunc_int_for_mode (INTVAL (op2), DImode);
155 switch (GET_CODE (op1))
156 {
157 case SYMBOL_REF:
158 /* TLS symbols are not constant. */
159 if (SYMBOL_REF_TLS_MODEL (op1))
160 return 0;
161 /* For CM_SMALL assume that latest object is 16MB before
162 end of 31bits boundary. We may also accept pretty
163 large negative constants knowing that all objects are
164 in the positive half of address space. */
165 if ((ix86_cmodel == CM_SMALL
166 || (ix86_cmodel == CM_MEDIUM
167 && !SYMBOL_REF_FAR_ADDR_P (op1)))
168 && offset < 16*1024*1024
169 && trunc_int_for_mode (offset, SImode) == offset)
170 return 1;
171 /* For CM_KERNEL we know that all object resist in the
172 negative half of 32bits address space. We may not
173 accept negative offsets, since they may be just off
174 and we may accept pretty large positive ones. */
175 if (ix86_cmodel == CM_KERNEL
176 && offset > 0
177 && trunc_int_for_mode (offset, SImode) == offset)
178 return 1;
179 break;
180
181 case LABEL_REF:
182 /* These conditions are similar to SYMBOL_REF ones, just the
183 constraints for code models differ. */
184 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
185 && offset < 16*1024*1024
186 && trunc_int_for_mode (offset, SImode) == offset)
187 return 1;
188 if (ix86_cmodel == CM_KERNEL
189 && offset > 0
190 && trunc_int_for_mode (offset, SImode) == offset)
191 return 1;
192 break;
193
194 case UNSPEC:
195 switch (XINT (op1, 1))
196 {
197 case UNSPEC_DTPOFF:
198 case UNSPEC_NTPOFF:
199 if (offset > 0
200 && trunc_int_for_mode (offset, SImode) == offset)
201 return 1;
202 }
203 break;
204
205 default:
206 break;
207 }
208 }
209 break;
210
211 default:
212 gcc_unreachable ();
213 }
214
215 return 0;
216 })
217
218 ;; Return 1 if VALUE can be stored in the zero extended immediate field.
219 (define_predicate "x86_64_zext_immediate_operand"
220 (match_code "const_double,const_int,symbol_ref,label_ref,const")
221 {
222 switch (GET_CODE (op))
223 {
224 case CONST_DOUBLE:
225 if (HOST_BITS_PER_WIDE_INT == 32)
226 return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
227 else
228 return 0;
229
230 case CONST_INT:
231 if (HOST_BITS_PER_WIDE_INT == 32)
232 return INTVAL (op) >= 0;
233 else
234 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
235
236 case SYMBOL_REF:
237 /* For certain code models, the symbolic references are known to fit. */
238 /* TLS symbols are not constant. */
239 if (SYMBOL_REF_TLS_MODEL (op))
240 return false;
241 return (ix86_cmodel == CM_SMALL
242 || (ix86_cmodel == CM_MEDIUM
243 && !SYMBOL_REF_FAR_ADDR_P (op)));
244
245 case LABEL_REF:
246 /* For certain code models, the code is near as well. */
247 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
248
249 case CONST:
250 /* We also may accept the offsetted memory references in certain
251 special cases. */
252 if (GET_CODE (XEXP (op, 0)) == PLUS)
253 {
254 rtx op1 = XEXP (XEXP (op, 0), 0);
255 rtx op2 = XEXP (XEXP (op, 0), 1);
256
257 if (ix86_cmodel == CM_LARGE)
258 return 0;
259 switch (GET_CODE (op1))
260 {
261 case SYMBOL_REF:
262 /* TLS symbols are not constant. */
263 if (SYMBOL_REF_TLS_MODEL (op1))
264 return 0;
265 /* For small code model we may accept pretty large positive
266 offsets, since one bit is available for free. Negative
267 offsets are limited by the size of NULL pointer area
268 specified by the ABI. */
269 if ((ix86_cmodel == CM_SMALL
270 || (ix86_cmodel == CM_MEDIUM
271 && !SYMBOL_REF_FAR_ADDR_P (op1)))
272 && CONST_INT_P (op2)
273 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
274 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
275 return 1;
276 /* ??? For the kernel, we may accept adjustment of
277 -0x10000000, since we know that it will just convert
278 negative address space to positive, but perhaps this
279 is not worthwhile. */
280 break;
281
282 case LABEL_REF:
283 /* These conditions are similar to SYMBOL_REF ones, just the
284 constraints for code models differ. */
285 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
286 && CONST_INT_P (op2)
287 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
288 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
289 return 1;
290 break;
291
292 default:
293 return 0;
294 }
295 }
296 break;
297
298 default:
299 gcc_unreachable ();
300 }
301 return 0;
302 })
303
304 ;; Return nonzero if OP is general operand representable on x86_64.
305 (define_predicate "x86_64_general_operand"
306 (if_then_else (match_test "TARGET_64BIT")
307 (ior (match_operand 0 "nonimmediate_operand")
308 (match_operand 0 "x86_64_immediate_operand"))
309 (match_operand 0 "general_operand")))
310
311 ;; Return nonzero if OP is general operand representable on x86_64
312 ;; as either sign extended or zero extended constant.
313 (define_predicate "x86_64_szext_general_operand"
314 (if_then_else (match_test "TARGET_64BIT")
315 (ior (match_operand 0 "nonimmediate_operand")
316 (ior (match_operand 0 "x86_64_immediate_operand")
317 (match_operand 0 "x86_64_zext_immediate_operand")))
318 (match_operand 0 "general_operand")))
319
320 ;; Return nonzero if OP is nonmemory operand representable on x86_64.
321 (define_predicate "x86_64_nonmemory_operand"
322 (if_then_else (match_test "TARGET_64BIT")
323 (ior (match_operand 0 "register_operand")
324 (match_operand 0 "x86_64_immediate_operand"))
325 (match_operand 0 "nonmemory_operand")))
326
327 ;; Return nonzero if OP is nonmemory operand representable on x86_64.
328 (define_predicate "x86_64_szext_nonmemory_operand"
329 (if_then_else (match_test "TARGET_64BIT")
330 (ior (match_operand 0 "register_operand")
331 (ior (match_operand 0 "x86_64_immediate_operand")
332 (match_operand 0 "x86_64_zext_immediate_operand")))
333 (match_operand 0 "nonmemory_operand")))
334
335 ;; Return true when operand is PIC expression that can be computed by lea
336 ;; operation.
337 (define_predicate "pic_32bit_operand"
338 (match_code "const,symbol_ref,label_ref")
339 {
340 if (!flag_pic)
341 return 0;
342 /* Rule out relocations that translate into 64bit constants. */
343 if (TARGET_64BIT && GET_CODE (op) == CONST)
344 {
345 op = XEXP (op, 0);
346 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
347 op = XEXP (op, 0);
348 if (GET_CODE (op) == UNSPEC
349 && (XINT (op, 1) == UNSPEC_GOTOFF
350 || XINT (op, 1) == UNSPEC_GOT))
351 return 0;
352 }
353 return symbolic_operand (op, mode);
354 })
355
356
357 ;; Return nonzero if OP is nonmemory operand acceptable by movabs patterns.
358 (define_predicate "x86_64_movabs_operand"
359 (if_then_else (match_test "!TARGET_64BIT || !flag_pic")
360 (match_operand 0 "nonmemory_operand")
361 (ior (match_operand 0 "register_operand")
362 (and (match_operand 0 "const_double_operand")
363 (match_test "GET_MODE_SIZE (mode) <= 8")))))
364
365 ;; Returns nonzero if OP is either a symbol reference or a sum of a symbol
366 ;; reference and a constant.
367 (define_predicate "symbolic_operand"
368 (match_code "symbol_ref,label_ref,const")
369 {
370 switch (GET_CODE (op))
371 {
372 case SYMBOL_REF:
373 case LABEL_REF:
374 return 1;
375
376 case CONST:
377 op = XEXP (op, 0);
378 if (GET_CODE (op) == SYMBOL_REF
379 || GET_CODE (op) == LABEL_REF
380 || (GET_CODE (op) == UNSPEC
381 && (XINT (op, 1) == UNSPEC_GOT
382 || XINT (op, 1) == UNSPEC_GOTOFF
383 || XINT (op, 1) == UNSPEC_GOTPCREL)))
384 return 1;
385 if (GET_CODE (op) != PLUS
386 || !CONST_INT_P (XEXP (op, 1)))
387 return 0;
388
389 op = XEXP (op, 0);
390 if (GET_CODE (op) == SYMBOL_REF
391 || GET_CODE (op) == LABEL_REF)
392 return 1;
393 /* Only @GOTOFF gets offsets. */
394 if (GET_CODE (op) != UNSPEC
395 || XINT (op, 1) != UNSPEC_GOTOFF)
396 return 0;
397
398 op = XVECEXP (op, 0, 0);
399 if (GET_CODE (op) == SYMBOL_REF
400 || GET_CODE (op) == LABEL_REF)
401 return 1;
402 return 0;
403
404 default:
405 gcc_unreachable ();
406 }
407 })
408
409 ;; Return true if the operand contains a @GOT or @GOTOFF reference.
410 (define_predicate "pic_symbolic_operand"
411 (match_code "const")
412 {
413 op = XEXP (op, 0);
414 if (TARGET_64BIT)
415 {
416 if (GET_CODE (op) == UNSPEC
417 && XINT (op, 1) == UNSPEC_GOTPCREL)
418 return 1;
419 if (GET_CODE (op) == PLUS
420 && GET_CODE (XEXP (op, 0)) == UNSPEC
421 && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL)
422 return 1;
423 }
424 else
425 {
426 if (GET_CODE (op) == UNSPEC)
427 return 1;
428 if (GET_CODE (op) != PLUS
429 || !CONST_INT_P (XEXP (op, 1)))
430 return 0;
431 op = XEXP (op, 0);
432 if (GET_CODE (op) == UNSPEC
433 && XINT (op, 1) != UNSPEC_MACHOPIC_OFFSET)
434 return 1;
435 }
436 return 0;
437 })
438
439 ;; Return true if OP is a symbolic operand that resolves locally.
440 (define_predicate "local_symbolic_operand"
441 (match_code "const,label_ref,symbol_ref")
442 {
443 if (GET_CODE (op) == CONST
444 && GET_CODE (XEXP (op, 0)) == PLUS
445 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
446 op = XEXP (XEXP (op, 0), 0);
447
448 if (GET_CODE (op) == LABEL_REF)
449 return 1;
450
451 if (GET_CODE (op) != SYMBOL_REF)
452 return 0;
453
454 if (SYMBOL_REF_TLS_MODEL (op) != 0)
455 return 0;
456
457 if (SYMBOL_REF_LOCAL_P (op))
458 return 1;
459
460 /* There is, however, a not insubstantial body of code in the rest of
461 the compiler that assumes it can just stick the results of
462 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
463 /* ??? This is a hack. Should update the body of the compiler to
464 always create a DECL an invoke targetm.encode_section_info. */
465 if (strncmp (XSTR (op, 0), internal_label_prefix,
466 internal_label_prefix_len) == 0)
467 return 1;
468
469 return 0;
470 })
471
472 ;; Test for a legitimate @GOTOFF operand.
473 ;;
474 ;; VxWorks does not impose a fixed gap between segments; the run-time
475 ;; gap can be different from the object-file gap. We therefore can't
476 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
477 ;; same segment as the GOT. Unfortunately, the flexibility of linker
478 ;; scripts means that we can't be sure of that in general, so assume
479 ;; that @GOTOFF is never valid on VxWorks.
480 (define_predicate "gotoff_operand"
481 (and (match_test "!TARGET_VXWORKS_RTP")
482 (match_operand 0 "local_symbolic_operand")))
483
484 ;; Test for various thread-local symbols.
485 (define_predicate "tls_symbolic_operand"
486 (and (match_code "symbol_ref")
487 (match_test "SYMBOL_REF_TLS_MODEL (op) != 0")))
488
489 (define_predicate "tls_modbase_operand"
490 (and (match_code "symbol_ref")
491 (match_test "op == ix86_tls_module_base ()")))
492
493 (define_predicate "tp_or_register_operand"
494 (ior (match_operand 0 "register_operand")
495 (and (match_code "unspec")
496 (match_test "XINT (op, 1) == UNSPEC_TP"))))
497
498 ;; Test for a pc-relative call operand
499 (define_predicate "constant_call_address_operand"
500 (match_code "symbol_ref")
501 {
502 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC)
503 return false;
504 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
505 return false;
506 return true;
507 })
508
509 ;; True for any non-virtual or eliminable register. Used in places where
510 ;; instantiation of such a register may cause the pattern to not be recognized.
511 (define_predicate "register_no_elim_operand"
512 (match_operand 0 "register_operand")
513 {
514 if (GET_CODE (op) == SUBREG)
515 op = SUBREG_REG (op);
516 return !(op == arg_pointer_rtx
517 || op == frame_pointer_rtx
518 || IN_RANGE (REGNO (op),
519 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
520 })
521
522 ;; Similarly, but include the stack pointer. This is used to prevent esp
523 ;; from being used as an index reg.
524 (define_predicate "index_register_operand"
525 (match_operand 0 "register_operand")
526 {
527 if (GET_CODE (op) == SUBREG)
528 op = SUBREG_REG (op);
529 if (reload_in_progress || reload_completed)
530 return REG_OK_FOR_INDEX_STRICT_P (op);
531 else
532 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
533 })
534
535 ;; Return false if this is any eliminable register. Otherwise general_operand.
536 (define_predicate "general_no_elim_operand"
537 (if_then_else (match_code "reg,subreg")
538 (match_operand 0 "register_no_elim_operand")
539 (match_operand 0 "general_operand")))
540
541 ;; Return false if this is any eliminable register. Otherwise
542 ;; register_operand or a constant.
543 (define_predicate "nonmemory_no_elim_operand"
544 (ior (match_operand 0 "register_no_elim_operand")
545 (match_operand 0 "immediate_operand")))
546
547 ;; Test for a valid operand for a call instruction.
548 (define_predicate "call_insn_operand"
549 (ior (match_operand 0 "constant_call_address_operand")
550 (ior (match_operand 0 "register_no_elim_operand")
551 (match_operand 0 "memory_operand"))))
552
553 ;; Similarly, but for tail calls, in which we cannot allow memory references.
554 (define_predicate "sibcall_insn_operand"
555 (ior (match_operand 0 "constant_call_address_operand")
556 (match_operand 0 "register_no_elim_operand")))
557
558 ;; Match exactly zero.
559 (define_predicate "const0_operand"
560 (match_code "const_int,const_double,const_vector")
561 {
562 if (mode == VOIDmode)
563 mode = GET_MODE (op);
564 return op == CONST0_RTX (mode);
565 })
566
567 ;; Match exactly one.
568 (define_predicate "const1_operand"
569 (and (match_code "const_int")
570 (match_test "op == const1_rtx")))
571
572 ;; Match exactly eight.
573 (define_predicate "const8_operand"
574 (and (match_code "const_int")
575 (match_test "INTVAL (op) == 8")))
576
577 ;; Match 2, 4, or 8. Used for leal multiplicands.
578 (define_predicate "const248_operand"
579 (match_code "const_int")
580 {
581 HOST_WIDE_INT i = INTVAL (op);
582 return i == 2 || i == 4 || i == 8;
583 })
584
585 ;; Match 0 or 1.
586 (define_predicate "const_0_to_1_operand"
587 (and (match_code "const_int")
588 (match_test "op == const0_rtx || op == const1_rtx")))
589
590 ;; Match 0 to 3.
591 (define_predicate "const_0_to_3_operand"
592 (and (match_code "const_int")
593 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
594
595 ;; Match 0 to 7.
596 (define_predicate "const_0_to_7_operand"
597 (and (match_code "const_int")
598 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
599
600 ;; Match 0 to 15.
601 (define_predicate "const_0_to_15_operand"
602 (and (match_code "const_int")
603 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
604
605 ;; Match 0 to 31.
606 (define_predicate "const_0_to_31_operand"
607 (and (match_code "const_int")
608 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
609
610 ;; Match 0 to 63.
611 (define_predicate "const_0_to_63_operand"
612 (and (match_code "const_int")
613 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
614
615 ;; Match 0 to 255.
616 (define_predicate "const_0_to_255_operand"
617 (and (match_code "const_int")
618 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
619
620 ;; Match (0 to 255) * 8
621 (define_predicate "const_0_to_255_mul_8_operand"
622 (match_code "const_int")
623 {
624 unsigned HOST_WIDE_INT val = INTVAL (op);
625 return val <= 255*8 && val % 8 == 0;
626 })
627
628 ;; Return nonzero if OP is CONST_INT >= 1 and <= 31 (a valid operand
629 ;; for shift & compare patterns, as shifting by 0 does not change flags).
630 (define_predicate "const_1_to_31_operand"
631 (and (match_code "const_int")
632 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
633
634 ;; Return nonzero if OP is CONST_INT >= 1 and <= 63 (a valid operand
635 ;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
636 (define_predicate "const_1_to_63_operand"
637 (and (match_code "const_int")
638 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
639
640 ;; Match 2 or 3.
641 (define_predicate "const_2_to_3_operand"
642 (and (match_code "const_int")
643 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
644
645 ;; Match 4 to 5.
646 (define_predicate "const_4_to_5_operand"
647 (and (match_code "const_int")
648 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
649
650 ;; Match 4 to 7.
651 (define_predicate "const_4_to_7_operand"
652 (and (match_code "const_int")
653 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
654
655 ;; Match 6 to 7.
656 (define_predicate "const_6_to_7_operand"
657 (and (match_code "const_int")
658 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
659
660 ;; Match 8 to 11.
661 (define_predicate "const_8_to_11_operand"
662 (and (match_code "const_int")
663 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
664
665 ;; Match 12 to 15.
666 (define_predicate "const_12_to_15_operand"
667 (and (match_code "const_int")
668 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
669
670 ;; Match exactly one bit in 2-bit mask.
671 (define_predicate "const_pow2_1_to_2_operand"
672 (and (match_code "const_int")
673 (match_test "INTVAL (op) == 1 || INTVAL (op) == 2")))
674
675 ;; Match exactly one bit in 4-bit mask.
676 (define_predicate "const_pow2_1_to_8_operand"
677 (match_code "const_int")
678 {
679 unsigned int log = exact_log2 (INTVAL (op));
680 return log <= 3;
681 })
682
683 ;; Match exactly one bit in 8-bit mask.
684 (define_predicate "const_pow2_1_to_128_operand"
685 (match_code "const_int")
686 {
687 unsigned int log = exact_log2 (INTVAL (op));
688 return log <= 7;
689 })
690
691 ;; Match exactly one bit in 16-bit mask.
692 (define_predicate "const_pow2_1_to_32768_operand"
693 (match_code "const_int")
694 {
695 unsigned int log = exact_log2 (INTVAL (op));
696 return log <= 15;
697 })
698
699 ;; True if this is a constant appropriate for an increment or decrement.
700 (define_predicate "incdec_operand"
701 (match_code "const_int")
702 {
703 /* On Pentium4, the inc and dec operations causes extra dependency on flag
704 registers, since carry flag is not set. */
705 if (!TARGET_USE_INCDEC && !optimize_size)
706 return 0;
707 return op == const1_rtx || op == constm1_rtx;
708 })
709
710 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
711 (define_predicate "reg_or_pm1_operand"
712 (ior (match_operand 0 "register_operand")
713 (and (match_code "const_int")
714 (match_test "op == const1_rtx || op == constm1_rtx"))))
715
716 ;; True if OP is acceptable as operand of DImode shift expander.
717 (define_predicate "shiftdi_operand"
718 (if_then_else (match_test "TARGET_64BIT")
719 (match_operand 0 "nonimmediate_operand")
720 (match_operand 0 "register_operand")))
721
722 (define_predicate "ashldi_input_operand"
723 (if_then_else (match_test "TARGET_64BIT")
724 (match_operand 0 "nonimmediate_operand")
725 (match_operand 0 "reg_or_pm1_operand")))
726
727 ;; Return true if OP is a vector load from the constant pool with just
728 ;; the first element nonzero.
729 (define_predicate "zero_extended_scalar_load_operand"
730 (match_code "mem")
731 {
732 unsigned n_elts;
733 op = maybe_get_pool_constant (op);
734 if (!op)
735 return 0;
736 if (GET_CODE (op) != CONST_VECTOR)
737 return 0;
738 n_elts =
739 (GET_MODE_SIZE (GET_MODE (op)) /
740 GET_MODE_SIZE (GET_MODE_INNER (GET_MODE (op))));
741 for (n_elts--; n_elts > 0; n_elts--)
742 {
743 rtx elt = CONST_VECTOR_ELT (op, n_elts);
744 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
745 return 0;
746 }
747 return 1;
748 })
749
750 /* Return true if operand is a vector constant that is all ones. */
751 (define_predicate "vector_all_ones_operand"
752 (match_code "const_vector")
753 {
754 int nunits = GET_MODE_NUNITS (mode);
755
756 if (GET_CODE (op) == CONST_VECTOR
757 && CONST_VECTOR_NUNITS (op) == nunits)
758 {
759 int i;
760 for (i = 0; i < nunits; ++i)
761 {
762 rtx x = CONST_VECTOR_ELT (op, i);
763 if (x != constm1_rtx)
764 return 0;
765 }
766 return 1;
767 }
768
769 return 0;
770 })
771
772 ; Return 1 when OP is operand acceptable for standard SSE move.
773 (define_predicate "vector_move_operand"
774 (ior (match_operand 0 "nonimmediate_operand")
775 (match_operand 0 "const0_operand")))
776
777 ;; Return 1 when OP is nonimmediate or standard SSE constant.
778 (define_predicate "nonimmediate_or_sse_const_operand"
779 (match_operand 0 "general_operand")
780 {
781 if (nonimmediate_operand (op, mode))
782 return 1;
783 if (standard_sse_constant_p (op) > 0)
784 return 1;
785 return 0;
786 })
787
788 ;; Return true if OP is a register or a zero.
789 (define_predicate "reg_or_0_operand"
790 (ior (match_operand 0 "register_operand")
791 (match_operand 0 "const0_operand")))
792
793 ;; Return true if op if a valid address, and does not contain
794 ;; a segment override.
795 (define_special_predicate "no_seg_address_operand"
796 (match_operand 0 "address_operand")
797 {
798 struct ix86_address parts;
799 int ok;
800
801 ok = ix86_decompose_address (op, &parts);
802 gcc_assert (ok);
803 return parts.seg == SEG_DEFAULT;
804 })
805
806 ;; Return nonzero if the rtx is known to be at least 32 bits aligned.
807 (define_predicate "aligned_operand"
808 (match_operand 0 "general_operand")
809 {
810 struct ix86_address parts;
811 int ok;
812
813 /* Registers and immediate operands are always "aligned". */
814 if (GET_CODE (op) != MEM)
815 return 1;
816
817 /* All patterns using aligned_operand on memory operands ends up
818 in promoting memory operand to 64bit and thus causing memory mismatch. */
819 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_size)
820 return 0;
821
822 /* Don't even try to do any aligned optimizations with volatiles. */
823 if (MEM_VOLATILE_P (op))
824 return 0;
825
826 if (MEM_ALIGN (op) >= 32)
827 return 1;
828
829 op = XEXP (op, 0);
830
831 /* Pushes and pops are only valid on the stack pointer. */
832 if (GET_CODE (op) == PRE_DEC
833 || GET_CODE (op) == POST_INC)
834 return 1;
835
836 /* Decode the address. */
837 ok = ix86_decompose_address (op, &parts);
838 gcc_assert (ok);
839
840 /* Look for some component that isn't known to be aligned. */
841 if (parts.index)
842 {
843 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
844 return 0;
845 }
846 if (parts.base)
847 {
848 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
849 return 0;
850 }
851 if (parts.disp)
852 {
853 if (!CONST_INT_P (parts.disp)
854 || (INTVAL (parts.disp) & 3) != 0)
855 return 0;
856 }
857
858 /* Didn't find one -- this must be an aligned address. */
859 return 1;
860 })
861
862 ;; Returns 1 if OP is memory operand with a displacement.
863 (define_predicate "memory_displacement_operand"
864 (match_operand 0 "memory_operand")
865 {
866 struct ix86_address parts;
867 int ok;
868
869 ok = ix86_decompose_address (XEXP (op, 0), &parts);
870 gcc_assert (ok);
871 return parts.disp != NULL_RTX;
872 })
873
874 ;; Returns 1 if OP is memory operand with a displacement only.
875 (define_predicate "memory_displacement_only_operand"
876 (match_operand 0 "memory_operand")
877 {
878 struct ix86_address parts;
879 int ok;
880
881 ok = ix86_decompose_address (XEXP (op, 0), &parts);
882 gcc_assert (ok);
883
884 if (parts.base || parts.index)
885 return 0;
886
887 return parts.disp != NULL_RTX;
888 })
889
890 ;; Returns 1 if OP is memory operand which will need zero or
891 ;; one register at most, not counting stack pointer or frame pointer.
892 (define_predicate "cmpxchg8b_pic_memory_operand"
893 (match_operand 0 "memory_operand")
894 {
895 struct ix86_address parts;
896 int ok;
897
898 ok = ix86_decompose_address (XEXP (op, 0), &parts);
899 gcc_assert (ok);
900 if (parts.base == NULL_RTX
901 || parts.base == arg_pointer_rtx
902 || parts.base == frame_pointer_rtx
903 || parts.base == hard_frame_pointer_rtx
904 || parts.base == stack_pointer_rtx)
905 return 1;
906
907 if (parts.index == NULL_RTX
908 || parts.index == arg_pointer_rtx
909 || parts.index == frame_pointer_rtx
910 || parts.index == hard_frame_pointer_rtx
911 || parts.index == stack_pointer_rtx)
912 return 1;
913
914 return 0;
915 })
916
917
918 ;; Returns 1 if OP is memory operand that cannot be represented
919 ;; by the modRM array.
920 (define_predicate "long_memory_operand"
921 (and (match_operand 0 "memory_operand")
922 (match_test "memory_address_length (op) != 0")))
923
924 ;; Return 1 if OP is a comparison operator that can be issued by fcmov.
925 (define_predicate "fcmov_comparison_operator"
926 (match_operand 0 "comparison_operator")
927 {
928 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
929 enum rtx_code code = GET_CODE (op);
930
931 if (inmode == CCFPmode || inmode == CCFPUmode)
932 {
933 enum rtx_code second_code, bypass_code;
934 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
935 if (bypass_code != UNKNOWN || second_code != UNKNOWN)
936 return 0;
937 code = ix86_fp_compare_code_to_integer (code);
938 }
939 /* i387 supports just limited amount of conditional codes. */
940 switch (code)
941 {
942 case LTU: case GTU: case LEU: case GEU:
943 if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode
944 || inmode == CCCmode)
945 return 1;
946 return 0;
947 case ORDERED: case UNORDERED:
948 case EQ: case NE:
949 return 1;
950 default:
951 return 0;
952 }
953 })
954
955 ;; Return 1 if OP is a comparison that can be used in the CMPSS/CMPPS insns.
956 ;; The first set are supported directly; the second set can't be done with
957 ;; full IEEE support, i.e. NaNs.
958 ;;
959 ;; ??? It would seem that we have a lot of uses of this predicate that pass
960 ;; it the wrong mode. We got away with this because the old function didn't
961 ;; check the mode at all. Mirror that for now by calling this a special
962 ;; predicate.
963
964 (define_special_predicate "sse_comparison_operator"
965 (match_code "eq,lt,le,unordered,ne,unge,ungt,ordered"))
966
967 ;; Return 1 if OP is a comparison operator that can be issued by
968 ;; avx predicate generation instructions
969 (define_predicate "avx_comparison_float_operator"
970 (match_code "ne,eq,ge,gt,le,lt,unordered,ordered,uneq,unge,ungt,unle,unlt,ltgt"))
971
972 ;; Return 1 if OP is a comparison operator that can be issued by sse predicate
973 ;; generation instructions
974 (define_predicate "sse5_comparison_float_operator"
975 (and (match_test "TARGET_SSE5")
976 (match_code "ne,eq,ge,gt,le,lt,unordered,ordered,uneq,unge,ungt,unle,unlt,ltgt")))
977
978 (define_predicate "ix86_comparison_int_operator"
979 (match_code "ne,eq,ge,gt,le,lt"))
980
981 (define_predicate "ix86_comparison_uns_operator"
982 (match_code "ne,eq,geu,gtu,leu,ltu"))
983
984 (define_predicate "bt_comparison_operator"
985 (match_code "ne,eq"))
986
987 ;; Return 1 if OP is a valid comparison operator in valid mode.
988 (define_predicate "ix86_comparison_operator"
989 (match_operand 0 "comparison_operator")
990 {
991 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
992 enum rtx_code code = GET_CODE (op);
993
994 if (inmode == CCFPmode || inmode == CCFPUmode)
995 {
996 enum rtx_code second_code, bypass_code;
997 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
998 return (bypass_code == UNKNOWN && second_code == UNKNOWN);
999 }
1000 switch (code)
1001 {
1002 case EQ: case NE:
1003 return 1;
1004 case LT: case GE:
1005 if (inmode == CCmode || inmode == CCGCmode
1006 || inmode == CCGOCmode || inmode == CCNOmode)
1007 return 1;
1008 return 0;
1009 case LTU: case GTU: case LEU: case GEU:
1010 if (inmode == CCmode || inmode == CCCmode)
1011 return 1;
1012 return 0;
1013 case ORDERED: case UNORDERED:
1014 if (inmode == CCmode)
1015 return 1;
1016 return 0;
1017 case GT: case LE:
1018 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1019 return 1;
1020 return 0;
1021 default:
1022 return 0;
1023 }
1024 })
1025
1026 ;; Return 1 if OP is a valid comparison operator testing carry flag to be set.
1027 (define_predicate "ix86_carry_flag_operator"
1028 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1029 {
1030 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1031 enum rtx_code code = GET_CODE (op);
1032
1033 if (!REG_P (XEXP (op, 0))
1034 || REGNO (XEXP (op, 0)) != FLAGS_REG
1035 || XEXP (op, 1) != const0_rtx)
1036 return 0;
1037
1038 if (inmode == CCFPmode || inmode == CCFPUmode)
1039 {
1040 enum rtx_code second_code, bypass_code;
1041 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
1042 if (bypass_code != UNKNOWN || second_code != UNKNOWN)
1043 return 0;
1044 code = ix86_fp_compare_code_to_integer (code);
1045 }
1046 else if (inmode == CCCmode)
1047 return code == LTU || code == GTU;
1048 else if (inmode != CCmode)
1049 return 0;
1050
1051 return code == LTU;
1052 })
1053
1054 ;; Nearly general operand, but accept any const_double, since we wish
1055 ;; to be able to drop them into memory rather than have them get pulled
1056 ;; into registers.
1057 (define_predicate "cmp_fp_expander_operand"
1058 (ior (match_code "const_double")
1059 (match_operand 0 "general_operand")))
1060
1061 ;; Return true if this is a valid binary floating-point operation.
1062 (define_predicate "binary_fp_operator"
1063 (match_code "plus,minus,mult,div"))
1064
1065 ;; Return true if this is a multiply operation.
1066 (define_predicate "mult_operator"
1067 (match_code "mult"))
1068
1069 ;; Return true if this is a division operation.
1070 (define_predicate "div_operator"
1071 (match_code "div"))
1072
1073 ;; Return true if this is a float extend operation.
1074 (define_predicate "float_operator"
1075 (match_code "float"))
1076
1077 ;; Return true for ARITHMETIC_P.
1078 (define_predicate "arith_or_logical_operator"
1079 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1080 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1081
1082 ;; Return true for COMMUTATIVE_P.
1083 (define_predicate "commutative_operator"
1084 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1085
1086 ;; Return 1 if OP is a binary operator that can be promoted to wider mode.
1087 (define_predicate "promotable_binary_operator"
1088 (ior (match_code "plus,and,ior,xor,ashift")
1089 (and (match_code "mult")
1090 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1091
1092 ;; To avoid problems when jump re-emits comparisons like testqi_ext_ccno_0,
1093 ;; re-recognize the operand to avoid a copy_to_mode_reg that will fail.
1094 ;;
1095 ;; ??? It seems likely that this will only work because cmpsi is an
1096 ;; expander, and no actual insns use this.
1097
1098 (define_predicate "cmpsi_operand"
1099 (ior (match_operand 0 "nonimmediate_operand")
1100 (and (match_code "and")
1101 (match_code "zero_extract" "0")
1102 (match_code "const_int" "1")
1103 (match_code "const_int" "01")
1104 (match_code "const_int" "02")
1105 (match_test "INTVAL (XEXP (XEXP (op, 0), 1)) == 8")
1106 (match_test "INTVAL (XEXP (XEXP (op, 0), 2)) == 8")
1107 )))
1108
1109 (define_predicate "compare_operator"
1110 (match_code "compare"))
1111
1112 (define_predicate "absneg_operator"
1113 (match_code "abs,neg"))
1114
1115 ;; Return 1 if OP is misaligned memory operand
1116 (define_predicate "misaligned_operand"
1117 (and (match_code "mem")
1118 (match_test "MEM_ALIGN (op) < GET_MODE_ALIGNMENT (mode)")))
1119
1120 ;; Return 1 if OP is a vzeroall operation, known to be a PARALLEL.
1121 (define_predicate "vzeroall_operation"
1122 (match_code "parallel")
1123 {
1124 int nregs = TARGET_64BIT ? 16 : 8;
1125
1126 if (XVECLEN (op, 0) != nregs + 1)
1127 return 0;
1128
1129 return 1;
1130 })