comparison gcc/config/i386/predicates.md @ 131:84e7813d76e9

gcc-8.2
author mir3636
date Thu, 25 Oct 2018 07:37:49 +0900
parents 04ced10e8804
children 1830386684a0
comparison
equal deleted inserted replaced
111:04ced10e8804 131:84e7813d76e9
1 ;; Predicate definitions for IA-32 and x86-64. 1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004-2017 Free Software Foundation, Inc. 2 ;; Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 ;; 3 ;;
4 ;; This file is part of GCC. 4 ;; This file is part of GCC.
5 ;; 5 ;;
6 ;; GCC is free software; you can redistribute it and/or modify 6 ;; GCC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by 7 ;; it under the terms of the GNU General Public License as published by
598 598
599 ;; Test for a pc-relative call operand 599 ;; Test for a pc-relative call operand
600 (define_predicate "constant_call_address_operand" 600 (define_predicate "constant_call_address_operand"
601 (match_code "symbol_ref") 601 (match_code "symbol_ref")
602 { 602 {
603 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC) 603 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC
604 || flag_force_indirect_call)
604 return false; 605 return false;
605 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op)) 606 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
606 return false; 607 return false;
607 return true; 608 return true;
608 }) 609 })
662 (match_operand 0 "immediate_operand"))) 663 (match_operand 0 "immediate_operand")))
663 664
664 ;; Test for a valid operand for indirect branch. 665 ;; Test for a valid operand for indirect branch.
665 (define_predicate "indirect_branch_operand" 666 (define_predicate "indirect_branch_operand"
666 (ior (match_operand 0 "register_operand") 667 (ior (match_operand 0 "register_operand")
667 (and (not (match_test "TARGET_X32")) 668 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
669 (not (match_test "TARGET_X32"))
668 (match_operand 0 "memory_operand")))) 670 (match_operand 0 "memory_operand"))))
669 671
670 ;; Return true if OP is a memory operands that can be used in sibcalls. 672 ;; Return true if OP is a memory operands that can be used in sibcalls.
671 ;; Since sibcall never returns, we can only use call-clobbered register 673 ;; Since sibcall never returns, we can only use call-clobbered register
672 ;; as GOT base. Allow GOT slot here only with pseudo register as GOT 674 ;; as GOT base. Allow GOT slot here only with pseudo register as GOT
705 ;; Allow constant call address operands in Pmode only. 707 ;; Allow constant call address operands in Pmode only.
706 (define_special_predicate "call_insn_operand" 708 (define_special_predicate "call_insn_operand"
707 (ior (match_test "constant_call_address_operand 709 (ior (match_test "constant_call_address_operand
708 (op, mode == VOIDmode ? mode : Pmode)") 710 (op, mode == VOIDmode ? mode : Pmode)")
709 (match_operand 0 "call_register_no_elim_operand") 711 (match_operand 0 "call_register_no_elim_operand")
710 (ior (and (not (match_test "TARGET_X32")) 712 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
711 (match_operand 0 "memory_operand")) 713 (ior (and (not (match_test "TARGET_X32"))
712 (and (match_test "TARGET_X32 && Pmode == DImode") 714 (match_operand 0 "memory_operand"))
713 (match_operand 0 "GOT_memory_operand"))))) 715 (and (match_test "TARGET_X32 && Pmode == DImode")
716 (match_operand 0 "GOT_memory_operand"))))))
714 717
715 ;; Similarly, but for tail calls, in which we cannot allow memory references. 718 ;; Similarly, but for tail calls, in which we cannot allow memory references.
716 (define_special_predicate "sibcall_insn_operand" 719 (define_special_predicate "sibcall_insn_operand"
717 (ior (match_test "constant_call_address_operand 720 (ior (match_test "constant_call_address_operand
718 (op, mode == VOIDmode ? mode : Pmode)") 721 (op, mode == VOIDmode ? mode : Pmode)")
719 (match_operand 0 "register_no_elim_operand") 722 (match_operand 0 "register_no_elim_operand")
720 (ior (and (not (match_test "TARGET_X32")) 723 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
721 (match_operand 0 "sibcall_memory_operand")) 724 (ior (and (not (match_test "TARGET_X32"))
722 (and (match_test "TARGET_X32 && Pmode == DImode") 725 (match_operand 0 "sibcall_memory_operand"))
723 (match_operand 0 "GOT_memory_operand"))))) 726 (and (match_test "TARGET_X32 && Pmode == DImode")
727 (match_operand 0 "GOT_memory_operand"))))))
724 728
725 ;; Return true if OP is a 32-bit GOT symbol operand. 729 ;; Return true if OP is a 32-bit GOT symbol operand.
726 (define_predicate "GOT32_symbol_operand" 730 (define_predicate "GOT32_symbol_operand"
727 (match_test "GET_CODE (op) == CONST 731 (match_test "GET_CODE (op) == CONST
728 && GET_CODE (XEXP (op, 0)) == UNSPEC 732 && GET_CODE (XEXP (op, 0)) == UNSPEC
1002 ;; the first element nonzero. 1006 ;; the first element nonzero.
1003 (define_predicate "zero_extended_scalar_load_operand" 1007 (define_predicate "zero_extended_scalar_load_operand"
1004 (match_code "mem") 1008 (match_code "mem")
1005 { 1009 {
1006 unsigned n_elts; 1010 unsigned n_elts;
1007 op = maybe_get_pool_constant (op); 1011 op = avoid_constant_pool_reference (op);
1008 1012
1009 if (!(op && GET_CODE (op) == CONST_VECTOR)) 1013 if (GET_CODE (op) != CONST_VECTOR)
1010 return false; 1014 return false;
1011 1015
1012 n_elts = CONST_VECTOR_NUNITS (op); 1016 n_elts = CONST_VECTOR_NUNITS (op);
1013 1017
1014 for (n_elts--; n_elts > 0; n_elts--) 1018 for (n_elts--; n_elts > 0; n_elts--)
1036 ; Return true when OP is register_operand or vector_memory_operand. 1040 ; Return true when OP is register_operand or vector_memory_operand.
1037 (define_predicate "vector_operand" 1041 (define_predicate "vector_operand"
1038 (ior (match_operand 0 "register_operand") 1042 (ior (match_operand 0 "register_operand")
1039 (match_operand 0 "vector_memory_operand"))) 1043 (match_operand 0 "vector_memory_operand")))
1040 1044
1041 ; Return true when OP is operand acceptable for standard SSE move.
1042 (define_predicate "vector_move_operand"
1043 (ior (match_operand 0 "nonimmediate_operand")
1044 (match_operand 0 "const0_operand")))
1045
1046 ;; Return true when OP is either nonimmediate operand, or any 1045 ;; Return true when OP is either nonimmediate operand, or any
1047 ;; CONST_VECTOR. 1046 ;; CONST_VECTOR.
1048 (define_predicate "nonimmediate_or_const_vector_operand" 1047 (define_predicate "nonimmediate_or_const_vector_operand"
1049 (ior (match_operand 0 "nonimmediate_operand") 1048 (ior (match_operand 0 "nonimmediate_operand")
1050 (match_code "const_vector"))) 1049 (match_code "const_vector")))
1055 (match_test "standard_sse_constant_p (op, mode)"))) 1054 (match_test "standard_sse_constant_p (op, mode)")))
1056 1055
1057 ;; Return true if OP is a register or a zero. 1056 ;; Return true if OP is a register or a zero.
1058 (define_predicate "reg_or_0_operand" 1057 (define_predicate "reg_or_0_operand"
1059 (ior (match_operand 0 "register_operand") 1058 (ior (match_operand 0 "register_operand")
1059 (match_operand 0 "const0_operand")))
1060
1061 ; Return true when OP is a nonimmediate or zero.
1062 (define_predicate "nonimm_or_0_operand"
1063 (ior (match_operand 0 "nonimmediate_operand")
1060 (match_operand 0 "const0_operand"))) 1064 (match_operand 0 "const0_operand")))
1061 1065
1062 (define_predicate "norex_memory_operand" 1066 (define_predicate "norex_memory_operand"
1063 (and (match_operand 0 "memory_operand") 1067 (and (match_operand 0 "memory_operand")
1064 (not (match_test "x86_extended_reg_mentioned_p (op)")))) 1068 (not (match_test "x86_extended_reg_mentioned_p (op)"))))
1126 } 1130 }
1127 1131
1128 return true; 1132 return true;
1129 }) 1133 })
1130 1134
1131 ;; Return true if op is valid MPX address operand without base
1132 (define_predicate "address_mpx_no_base_operand"
1133 (match_test "address_operand (op, VOIDmode)")
1134 {
1135 struct ix86_address parts;
1136 int ok;
1137
1138 ok = ix86_decompose_address (op, &parts);
1139 gcc_assert (ok);
1140
1141 if (parts.index && parts.base)
1142 return false;
1143
1144 if (parts.seg != ADDR_SPACE_GENERIC)
1145 return false;
1146
1147 /* Do not support (%rip). */
1148 if (parts.disp && flag_pic && TARGET_64BIT
1149 && SYMBOLIC_CONST (parts.disp))
1150 {
1151 if (GET_CODE (parts.disp) != CONST
1152 || GET_CODE (XEXP (parts.disp, 0)) != PLUS
1153 || GET_CODE (XEXP (XEXP (parts.disp, 0), 0)) != UNSPEC
1154 || !CONST_INT_P (XEXP (XEXP (parts.disp, 0), 1))
1155 || (XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_DTPOFF
1156 && XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_NTPOFF))
1157 return false;
1158 }
1159
1160 return true;
1161 })
1162
1163 ;; Return true if op is valid MPX address operand without index
1164 (define_predicate "address_mpx_no_index_operand"
1165 (match_test "address_operand (op, VOIDmode)")
1166 {
1167 struct ix86_address parts;
1168 int ok;
1169
1170 ok = ix86_decompose_address (op, &parts);
1171 gcc_assert (ok);
1172
1173 if (parts.index)
1174 return false;
1175
1176 if (parts.seg != ADDR_SPACE_GENERIC)
1177 return false;
1178
1179 /* Do not support (%rip). */
1180 if (parts.disp && flag_pic && TARGET_64BIT
1181 && SYMBOLIC_CONST (parts.disp)
1182 && (GET_CODE (parts.disp) != CONST
1183 || GET_CODE (XEXP (parts.disp, 0)) != PLUS
1184 || GET_CODE (XEXP (XEXP (parts.disp, 0), 0)) != UNSPEC
1185 || !CONST_INT_P (XEXP (XEXP (parts.disp, 0), 1))
1186 || (XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_DTPOFF
1187 && XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_NTPOFF)))
1188 return false;
1189
1190 return true;
1191 })
1192
1193 (define_predicate "vsib_mem_operator" 1135 (define_predicate "vsib_mem_operator"
1194 (match_code "mem"))
1195
1196 (define_predicate "bnd_mem_operator"
1197 (match_code "mem")) 1136 (match_code "mem"))
1198 1137
1199 ;; Return true if the rtx is known to be at least 32 bits aligned. 1138 ;; Return true if the rtx is known to be at least 32 bits aligned.
1200 (define_predicate "aligned_operand" 1139 (define_predicate "aligned_operand"
1201 (match_operand 0 "general_operand") 1140 (match_operand 0 "general_operand")
1299 (match_operand 0 "comparison_operator") 1238 (match_operand 0 "comparison_operator")
1300 { 1239 {
1301 machine_mode inmode = GET_MODE (XEXP (op, 0)); 1240 machine_mode inmode = GET_MODE (XEXP (op, 0));
1302 enum rtx_code code = GET_CODE (op); 1241 enum rtx_code code = GET_CODE (op);
1303 1242
1304 if (inmode == CCFPmode || inmode == CCFPUmode) 1243 if (inmode == CCFPmode)
1305 { 1244 {
1306 if (!ix86_trivial_fp_comparison_operator (op, mode)) 1245 if (!ix86_trivial_fp_comparison_operator (op, mode))
1307 return false; 1246 return false;
1308 code = ix86_fp_compare_code_to_integer (code); 1247 code = ix86_fp_compare_code_to_integer (code);
1309 } 1248 }
1310 /* i387 supports just limited amount of conditional codes. */ 1249 /* i387 supports just limited amount of conditional codes. */
1311 switch (code) 1250 switch (code)
1312 { 1251 {
1313 case LTU: case GTU: case LEU: case GEU: 1252 case LTU: case GTU: case LEU: case GEU:
1314 if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode 1253 if (inmode == CCmode || inmode == CCFPmode || inmode == CCCmode)
1315 || inmode == CCCmode)
1316 return true; 1254 return true;
1317 return false; 1255 return false;
1318 case ORDERED: case UNORDERED: 1256 case ORDERED: case UNORDERED:
1319 case EQ: case NE: 1257 case EQ: case NE:
1320 return true; 1258 return true;
1346 (match_operand 0 "comparison_operator") 1284 (match_operand 0 "comparison_operator")
1347 { 1285 {
1348 machine_mode inmode = GET_MODE (XEXP (op, 0)); 1286 machine_mode inmode = GET_MODE (XEXP (op, 0));
1349 enum rtx_code code = GET_CODE (op); 1287 enum rtx_code code = GET_CODE (op);
1350 1288
1351 if (inmode == CCFPmode || inmode == CCFPUmode) 1289 if (inmode == CCFPmode)
1352 return ix86_trivial_fp_comparison_operator (op, mode); 1290 return ix86_trivial_fp_comparison_operator (op, mode);
1353 1291
1354 switch (code) 1292 switch (code)
1355 { 1293 {
1356 case EQ: case NE: 1294 case EQ: case NE:
1389 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq") 1327 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1390 { 1328 {
1391 machine_mode inmode = GET_MODE (XEXP (op, 0)); 1329 machine_mode inmode = GET_MODE (XEXP (op, 0));
1392 enum rtx_code code = GET_CODE (op); 1330 enum rtx_code code = GET_CODE (op);
1393 1331
1394 if (inmode == CCFPmode || inmode == CCFPUmode) 1332 if (inmode == CCFPmode)
1395 { 1333 {
1396 if (!ix86_trivial_fp_comparison_operator (op, mode)) 1334 if (!ix86_trivial_fp_comparison_operator (op, mode))
1397 return false; 1335 return false;
1398 code = ix86_fp_compare_code_to_integer (code); 1336 code = ix86_fp_compare_code_to_integer (code);
1399 } 1337 }
1466 ;; less than its natural alignment. 1404 ;; less than its natural alignment.
1467 (define_predicate "misaligned_operand" 1405 (define_predicate "misaligned_operand"
1468 (and (match_code "mem") 1406 (and (match_code "mem")
1469 (match_test "MEM_ALIGN (op) < GET_MODE_BITSIZE (mode)"))) 1407 (match_test "MEM_ALIGN (op) < GET_MODE_BITSIZE (mode)")))
1470 1408
1471 ;; Return true if OP is a emms operation, known to be a PARALLEL.
1472 (define_predicate "emms_operation"
1473 (match_code "parallel")
1474 {
1475 unsigned i;
1476
1477 if (XVECLEN (op, 0) != 17)
1478 return false;
1479
1480 for (i = 0; i < 8; i++)
1481 {
1482 rtx elt = XVECEXP (op, 0, i+1);
1483
1484 if (GET_CODE (elt) != CLOBBER
1485 || GET_CODE (SET_DEST (elt)) != REG
1486 || GET_MODE (SET_DEST (elt)) != XFmode
1487 || REGNO (SET_DEST (elt)) != FIRST_STACK_REG + i)
1488 return false;
1489
1490 elt = XVECEXP (op, 0, i+9);
1491
1492 if (GET_CODE (elt) != CLOBBER
1493 || GET_CODE (SET_DEST (elt)) != REG
1494 || GET_MODE (SET_DEST (elt)) != DImode
1495 || REGNO (SET_DEST (elt)) != FIRST_MMX_REG + i)
1496 return false;
1497 }
1498 return true;
1499 })
1500
1501 ;; Return true if OP is a vzeroall operation, known to be a PARALLEL. 1409 ;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
1502 (define_predicate "vzeroall_operation" 1410 (define_predicate "vzeroall_operation"
1503 (match_code "parallel") 1411 (match_code "parallel")
1504 { 1412 {
1505 unsigned i, nregs = TARGET_64BIT ? 16 : 8; 1413 unsigned i, nregs = TARGET_64BIT ? 16 : 8;
1512 rtx elt = XVECEXP (op, 0, i+1); 1420 rtx elt = XVECEXP (op, 0, i+1);
1513 1421
1514 if (GET_CODE (elt) != SET 1422 if (GET_CODE (elt) != SET
1515 || GET_CODE (SET_DEST (elt)) != REG 1423 || GET_CODE (SET_DEST (elt)) != REG
1516 || GET_MODE (SET_DEST (elt)) != V8SImode 1424 || GET_MODE (SET_DEST (elt)) != V8SImode
1517 || REGNO (SET_DEST (elt)) != SSE_REGNO (i) 1425 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
1518 || SET_SRC (elt) != CONST0_RTX (V8SImode)) 1426 || SET_SRC (elt) != CONST0_RTX (V8SImode))
1519 return false; 1427 return false;
1520 } 1428 }
1521 return true; 1429 return true;
1522 }) 1430 })
1523 1431
1524 ;; return true if OP is a vzeroupper operation. 1432 ;; return true if OP is a vzeroall pattern.
1525 (define_predicate "vzeroupper_operation" 1433 (define_predicate "vzeroall_pattern"
1434 (and (match_code "parallel")
1435 (match_code "unspec_volatile" "a")
1436 (match_test "XINT (XVECEXP (op, 0, 0), 1) == UNSPECV_VZEROALL")))
1437
1438 ;; return true if OP is a vzeroupper pattern.
1439 (define_predicate "vzeroupper_pattern"
1526 (and (match_code "unspec_volatile") 1440 (and (match_code "unspec_volatile")
1527 (match_test "XINT (op, 1) == UNSPECV_VZEROUPPER"))) 1441 (match_test "XINT (op, 1) == UNSPECV_VZEROUPPER")))
1528 1442
1529 ;; Return true if OP is an addsub vec_merge operation 1443 ;; Return true if OP is an addsub vec_merge operation
1530 (define_predicate "addsub_vm_operator" 1444 (define_predicate "addsub_vm_operator"