Mercurial > hg > CbC > CbC_gcc
comparison gcc/combine.c @ 55:77e2b8dfacca gcc-4.4.5
update it from 4.4.3 to 4.5.0
author | ryoma <e075725@ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 12 Feb 2010 23:39:51 +0900 |
parents | 3bfb6c00c1e0 |
children | b7f97abdc517 |
comparison
equal
deleted
inserted
replaced
52:c156f1bd5cd9 | 55:77e2b8dfacca |
---|---|
319 | 319 |
320 /* Incremented for each basic block. */ | 320 /* Incremented for each basic block. */ |
321 | 321 |
322 static int label_tick; | 322 static int label_tick; |
323 | 323 |
324 /* Reset to label_tick for each label. */ | 324 /* Reset to label_tick for each extended basic block in scanning order. */ |
325 | 325 |
326 static int label_tick_ebb_start; | 326 static int label_tick_ebb_start; |
327 | 327 |
328 /* Mode used to compute significance in reg_stat[].nonzero_bits. It is the | 328 /* Mode used to compute significance in reg_stat[].nonzero_bits. It is the |
329 largest integer mode that can fit in HOST_BITS_PER_WIDE_INT. */ | 329 largest integer mode that can fit in HOST_BITS_PER_WIDE_INT. */ |
340 | 340 |
341 | 341 |
342 /* Record one modification to rtl structure | 342 /* Record one modification to rtl structure |
343 to be undone by storing old_contents into *where. */ | 343 to be undone by storing old_contents into *where. */ |
344 | 344 |
345 enum undo_kind { UNDO_RTX, UNDO_INT, UNDO_MODE }; | |
346 | |
345 struct undo | 347 struct undo |
346 { | 348 { |
347 struct undo *next; | 349 struct undo *next; |
348 enum { UNDO_RTX, UNDO_INT, UNDO_MODE } kind; | 350 enum undo_kind kind; |
349 union { rtx r; int i; enum machine_mode m; } old_contents; | 351 union { rtx r; int i; enum machine_mode m; } old_contents; |
350 union { rtx *r; int *i; } where; | 352 union { rtx *r; int *i; } where; |
351 }; | 353 }; |
352 | 354 |
353 /* Record a bunch of changes to be undone, up to MAX_UNDO of them. | 355 /* Record a bunch of changes to be undone, up to MAX_UNDO of them. |
603 and last insn referencing DEST. */ | 605 and last insn referencing DEST. */ |
604 | 606 |
605 static rtx * | 607 static rtx * |
606 find_single_use (rtx dest, rtx insn, rtx *ploc) | 608 find_single_use (rtx dest, rtx insn, rtx *ploc) |
607 { | 609 { |
610 basic_block bb; | |
608 rtx next; | 611 rtx next; |
609 rtx *result; | 612 rtx *result; |
610 rtx link; | 613 rtx link; |
611 | 614 |
612 #ifdef HAVE_cc0 | 615 #ifdef HAVE_cc0 |
625 #endif | 628 #endif |
626 | 629 |
627 if (!REG_P (dest)) | 630 if (!REG_P (dest)) |
628 return 0; | 631 return 0; |
629 | 632 |
630 for (next = next_nonnote_insn (insn); | 633 bb = BLOCK_FOR_INSN (insn); |
631 next != 0 && !LABEL_P (next); | 634 for (next = NEXT_INSN (insn); |
632 next = next_nonnote_insn (next)) | 635 next && BLOCK_FOR_INSN (next) == bb; |
636 next = NEXT_INSN (next)) | |
633 if (INSN_P (next) && dead_or_set_p (next, dest)) | 637 if (INSN_P (next) && dead_or_set_p (next, dest)) |
634 { | 638 { |
635 for (link = LOG_LINKS (next); link; link = XEXP (link, 1)) | 639 for (link = LOG_LINKS (next); link; link = XEXP (link, 1)) |
636 if (XEXP (link, 0) == insn) | 640 if (XEXP (link, 0) == insn) |
637 break; | 641 break; |
667 possible. Unfortunately, there are way too many mode changes | 671 possible. Unfortunately, there are way too many mode changes |
668 that are perfectly valid, so we'd waste too much effort for | 672 that are perfectly valid, so we'd waste too much effort for |
669 little gain doing the checks here. Focus on catching invalid | 673 little gain doing the checks here. Focus on catching invalid |
670 transformations involving integer constants. */ | 674 transformations involving integer constants. */ |
671 if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT | 675 if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT |
672 && GET_CODE (newval) == CONST_INT) | 676 && CONST_INT_P (newval)) |
673 { | 677 { |
674 /* Sanity check that we're replacing oldval with a CONST_INT | 678 /* Sanity check that we're replacing oldval with a CONST_INT |
675 that is a valid sign-extension for the original mode. */ | 679 that is a valid sign-extension for the original mode. */ |
676 gcc_assert (INTVAL (newval) | 680 gcc_assert (INTVAL (newval) |
677 == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval))); | 681 == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval))); |
681 original mode would be gone. Unfortunately, we can't tell | 685 original mode would be gone. Unfortunately, we can't tell |
682 when do_SUBST is called to replace the operand thereof, so we | 686 when do_SUBST is called to replace the operand thereof, so we |
683 perform this test on oldval instead, checking whether an | 687 perform this test on oldval instead, checking whether an |
684 invalid replacement took place before we got here. */ | 688 invalid replacement took place before we got here. */ |
685 gcc_assert (!(GET_CODE (oldval) == SUBREG | 689 gcc_assert (!(GET_CODE (oldval) == SUBREG |
686 && GET_CODE (SUBREG_REG (oldval)) == CONST_INT)); | 690 && CONST_INT_P (SUBREG_REG (oldval)))); |
687 gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND | 691 gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND |
688 && GET_CODE (XEXP (oldval, 0)) == CONST_INT)); | 692 && CONST_INT_P (XEXP (oldval, 0)))); |
689 } | 693 } |
690 | 694 |
691 if (undobuf.frees) | 695 if (undobuf.frees) |
692 buf = undobuf.frees, undobuf.frees = buf->next; | 696 buf = undobuf.frees, undobuf.frees = buf->next; |
693 else | 697 else |
761 #define SUBST_MODE(INTO, NEWVAL) do_SUBST_MODE(&(INTO), (NEWVAL)) | 765 #define SUBST_MODE(INTO, NEWVAL) do_SUBST_MODE(&(INTO), (NEWVAL)) |
762 | 766 |
763 /* Subroutine of try_combine. Determine whether the combine replacement | 767 /* Subroutine of try_combine. Determine whether the combine replacement |
764 patterns NEWPAT, NEWI2PAT and NEWOTHERPAT are cheaper according to | 768 patterns NEWPAT, NEWI2PAT and NEWOTHERPAT are cheaper according to |
765 insn_rtx_cost that the original instruction sequence I1, I2, I3 and | 769 insn_rtx_cost that the original instruction sequence I1, I2, I3 and |
766 undobuf.other_insn. Note that I1 and/or NEWI2PAT may be NULL_RTX. | 770 undobuf.other_insn. Note that I1 and/or NEWI2PAT may be NULL_RTX. |
767 NEWOTHERPAT and undobuf.other_insn may also both be NULL_RTX. This | 771 NEWOTHERPAT and undobuf.other_insn may also both be NULL_RTX. This |
768 function returns false, if the costs of all instructions can be | 772 function returns false, if the costs of all instructions can be |
769 estimated, and the replacements are more expensive than the original | 773 estimated, and the replacements are more expensive than the original |
770 sequence. */ | 774 sequence. */ |
771 | 775 |
906 | 910 |
907 /* Pass through each block from the end, recording the uses of each | 911 /* Pass through each block from the end, recording the uses of each |
908 register and establishing log links when def is encountered. | 912 register and establishing log links when def is encountered. |
909 Note that we do not clear next_use array in order to save time, | 913 Note that we do not clear next_use array in order to save time, |
910 so we have to test whether the use is in the same basic block as def. | 914 so we have to test whether the use is in the same basic block as def. |
911 | 915 |
912 There are a few cases below when we do not consider the definition or | 916 There are a few cases below when we do not consider the definition or |
913 usage -- these are taken from original flow.c did. Don't ask me why it is | 917 usage -- these are taken from original flow.c did. Don't ask me why it is |
914 done this way; I don't know and if it works, I don't want to know. */ | 918 done this way; I don't know and if it works, I don't want to know. */ |
915 | 919 |
916 FOR_EACH_BB (bb) | 920 FOR_EACH_BB (bb) |
917 { | 921 { |
918 FOR_BB_INSNS_REVERSE (bb, insn) | 922 FOR_BB_INSNS_REVERSE (bb, insn) |
919 { | 923 { |
920 if (!INSN_P (insn)) | 924 if (!NONDEBUG_INSN_P (insn)) |
921 continue; | 925 continue; |
922 | 926 |
923 /* Log links are created only once. */ | 927 /* Log links are created only once. */ |
924 gcc_assert (!LOG_LINKS (insn)); | 928 gcc_assert (!LOG_LINKS (insn)); |
925 | 929 |
1004 | 1008 |
1005 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | 1009 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
1006 if (INSN_P (insn)) | 1010 if (INSN_P (insn)) |
1007 free_INSN_LIST_list (&LOG_LINKS (insn)); | 1011 free_INSN_LIST_list (&LOG_LINKS (insn)); |
1008 } | 1012 } |
1009 | |
1010 | |
1011 | |
1012 | 1013 |
1013 /* Main entry point for combiner. F is the first insn of the function. | 1014 /* Main entry point for combiner. F is the first insn of the function. |
1014 NREGS is the first unused pseudo-reg number. | 1015 NREGS is the first unused pseudo-reg number. |
1015 | 1016 |
1016 Return nonzero if the combiner has turned an indirect jump | 1017 Return nonzero if the combiner has turned an indirect jump |
1022 #ifdef HAVE_cc0 | 1023 #ifdef HAVE_cc0 |
1023 rtx prev; | 1024 rtx prev; |
1024 #endif | 1025 #endif |
1025 rtx links, nextlinks; | 1026 rtx links, nextlinks; |
1026 rtx first; | 1027 rtx first; |
1028 basic_block last_bb; | |
1027 | 1029 |
1028 int new_direct_jump_p = 0; | 1030 int new_direct_jump_p = 0; |
1029 | 1031 |
1030 for (first = f; first && !INSN_P (first); ) | 1032 for (first = f; first && !INSN_P (first); ) |
1031 first = NEXT_INSN (first); | 1033 first = NEXT_INSN (first); |
1052 | 1054 |
1053 /* Don't use reg_stat[].nonzero_bits when computing it. This can cause | 1055 /* Don't use reg_stat[].nonzero_bits when computing it. This can cause |
1054 problems when, for example, we have j <<= 1 in a loop. */ | 1056 problems when, for example, we have j <<= 1 in a loop. */ |
1055 | 1057 |
1056 nonzero_sign_valid = 0; | 1058 nonzero_sign_valid = 0; |
1059 label_tick = label_tick_ebb_start = 1; | |
1057 | 1060 |
1058 /* Scan all SETs and see if we can deduce anything about what | 1061 /* Scan all SETs and see if we can deduce anything about what |
1059 bits are known to be zero for some registers and how many copies | 1062 bits are known to be zero for some registers and how many copies |
1060 of the sign bit are known to exist for those registers. | 1063 of the sign bit are known to exist for those registers. |
1061 | 1064 |
1062 Also set any known values so that we can use it while searching | 1065 Also set any known values so that we can use it while searching |
1063 for what bits are known to be set. */ | 1066 for what bits are known to be set. */ |
1064 | 1067 |
1065 label_tick = label_tick_ebb_start = 1; | |
1066 | |
1067 setup_incoming_promotions (first); | 1068 setup_incoming_promotions (first); |
1069 /* Allow the entry block and the first block to fall into the same EBB. | |
1070 Conceptually the incoming promotions are assigned to the entry block. */ | |
1071 last_bb = ENTRY_BLOCK_PTR; | |
1068 | 1072 |
1069 create_log_links (); | 1073 create_log_links (); |
1070 FOR_EACH_BB (this_basic_block) | 1074 FOR_EACH_BB (this_basic_block) |
1071 { | 1075 { |
1072 optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block); | 1076 optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block); |
1073 last_call_luid = 0; | 1077 last_call_luid = 0; |
1074 mem_last_set = -1; | 1078 mem_last_set = -1; |
1079 | |
1075 label_tick++; | 1080 label_tick++; |
1081 if (!single_pred_p (this_basic_block) | |
1082 || single_pred (this_basic_block) != last_bb) | |
1083 label_tick_ebb_start = label_tick; | |
1084 last_bb = this_basic_block; | |
1085 | |
1076 FOR_BB_INSNS (this_basic_block, insn) | 1086 FOR_BB_INSNS (this_basic_block, insn) |
1077 if (INSN_P (insn) && BLOCK_FOR_INSN (insn)) | 1087 if (INSN_P (insn) && BLOCK_FOR_INSN (insn)) |
1078 { | 1088 { |
1079 subst_low_luid = DF_INSN_LUID (insn); | 1089 subst_low_luid = DF_INSN_LUID (insn); |
1080 subst_insn = insn; | 1090 subst_insn = insn; |
1096 optimize_this_for_speed_p); | 1106 optimize_this_for_speed_p); |
1097 if (dump_file) | 1107 if (dump_file) |
1098 fprintf(dump_file, "insn_cost %d: %d\n", | 1108 fprintf(dump_file, "insn_cost %d: %d\n", |
1099 INSN_UID (insn), INSN_COST (insn)); | 1109 INSN_UID (insn), INSN_COST (insn)); |
1100 } | 1110 } |
1101 else if (LABEL_P (insn)) | |
1102 label_tick_ebb_start = label_tick; | |
1103 } | 1111 } |
1104 | 1112 |
1105 nonzero_sign_valid = 1; | 1113 nonzero_sign_valid = 1; |
1106 | 1114 |
1107 /* Now scan all the insns in forward order. */ | 1115 /* Now scan all the insns in forward order. */ |
1108 | |
1109 label_tick = label_tick_ebb_start = 1; | 1116 label_tick = label_tick_ebb_start = 1; |
1110 init_reg_last (); | 1117 init_reg_last (); |
1111 setup_incoming_promotions (first); | 1118 setup_incoming_promotions (first); |
1119 last_bb = ENTRY_BLOCK_PTR; | |
1112 | 1120 |
1113 FOR_EACH_BB (this_basic_block) | 1121 FOR_EACH_BB (this_basic_block) |
1114 { | 1122 { |
1115 optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block); | 1123 optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block); |
1116 last_call_luid = 0; | 1124 last_call_luid = 0; |
1117 mem_last_set = -1; | 1125 mem_last_set = -1; |
1126 | |
1118 label_tick++; | 1127 label_tick++; |
1128 if (!single_pred_p (this_basic_block) | |
1129 || single_pred (this_basic_block) != last_bb) | |
1130 label_tick_ebb_start = label_tick; | |
1131 last_bb = this_basic_block; | |
1132 | |
1119 rtl_profile_for_bb (this_basic_block); | 1133 rtl_profile_for_bb (this_basic_block); |
1120 for (insn = BB_HEAD (this_basic_block); | 1134 for (insn = BB_HEAD (this_basic_block); |
1121 insn != NEXT_INSN (BB_END (this_basic_block)); | 1135 insn != NEXT_INSN (BB_END (this_basic_block)); |
1122 insn = next ? next : NEXT_INSN (insn)) | 1136 insn = next ? next : NEXT_INSN (insn)) |
1123 { | 1137 { |
1124 next = 0; | 1138 next = 0; |
1125 if (INSN_P (insn)) | 1139 if (NONDEBUG_INSN_P (insn)) |
1126 { | 1140 { |
1127 /* See if we know about function return values before this | 1141 /* See if we know about function return values before this |
1128 insn based upon SUBREG flags. */ | 1142 insn based upon SUBREG flags. */ |
1129 check_promoted_subreg (insn, PATTERN (insn)); | 1143 check_promoted_subreg (insn, PATTERN (insn)); |
1130 | 1144 |
1266 record_dead_and_set_regs (insn); | 1280 record_dead_and_set_regs (insn); |
1267 | 1281 |
1268 retry: | 1282 retry: |
1269 ; | 1283 ; |
1270 } | 1284 } |
1271 else if (LABEL_P (insn)) | |
1272 label_tick_ebb_start = label_tick; | |
1273 } | 1285 } |
1274 } | 1286 } |
1275 | 1287 |
1276 default_rtl_profile (); | 1288 default_rtl_profile (); |
1277 clear_log_links (); | 1289 clear_log_links (); |
1326 setup_incoming_promotions (rtx first) | 1338 setup_incoming_promotions (rtx first) |
1327 { | 1339 { |
1328 tree arg; | 1340 tree arg; |
1329 bool strictly_local = false; | 1341 bool strictly_local = false; |
1330 | 1342 |
1331 if (!targetm.calls.promote_function_args (TREE_TYPE (cfun->decl))) | |
1332 return; | |
1333 | |
1334 for (arg = DECL_ARGUMENTS (current_function_decl); arg; | 1343 for (arg = DECL_ARGUMENTS (current_function_decl); arg; |
1335 arg = TREE_CHAIN (arg)) | 1344 arg = TREE_CHAIN (arg)) |
1336 { | 1345 { |
1337 rtx reg = DECL_INCOMING_RTL (arg); | 1346 rtx x, reg = DECL_INCOMING_RTL (arg); |
1338 int uns1, uns3; | 1347 int uns1, uns3; |
1339 enum machine_mode mode1, mode2, mode3, mode4; | 1348 enum machine_mode mode1, mode2, mode3, mode4; |
1340 | 1349 |
1341 /* Only continue if the incoming argument is in a register. */ | 1350 /* Only continue if the incoming argument is in a register. */ |
1342 if (!REG_P (reg)) | 1351 if (!REG_P (reg)) |
1356 /* The mode and signedness of the argument after any source language and | 1365 /* The mode and signedness of the argument after any source language and |
1357 TARGET_PROMOTE_PROTOTYPES-driven promotions. */ | 1366 TARGET_PROMOTE_PROTOTYPES-driven promotions. */ |
1358 mode2 = TYPE_MODE (DECL_ARG_TYPE (arg)); | 1367 mode2 = TYPE_MODE (DECL_ARG_TYPE (arg)); |
1359 uns3 = TYPE_UNSIGNED (DECL_ARG_TYPE (arg)); | 1368 uns3 = TYPE_UNSIGNED (DECL_ARG_TYPE (arg)); |
1360 | 1369 |
1361 /* The mode and signedness of the argument as it is actually passed, | 1370 /* The mode and signedness of the argument as it is actually passed, |
1362 after any TARGET_PROMOTE_FUNCTION_ARGS-driven ABI promotions. */ | 1371 after any TARGET_PROMOTE_FUNCTION_ARGS-driven ABI promotions. */ |
1363 mode3 = promote_mode (DECL_ARG_TYPE (arg), mode2, &uns3, 1); | 1372 mode3 = promote_function_mode (DECL_ARG_TYPE (arg), mode2, &uns3, |
1373 TREE_TYPE (cfun->decl), 0); | |
1364 | 1374 |
1365 /* The mode of the register in which the argument is being passed. */ | 1375 /* The mode of the register in which the argument is being passed. */ |
1366 mode4 = GET_MODE (reg); | 1376 mode4 = GET_MODE (reg); |
1367 | 1377 |
1368 /* Eliminate sign extensions in the callee when possible. Only | 1378 /* Eliminate sign extensions in the callee when: |
1369 do this when: | 1379 (a) A mode promotion has occurred; */ |
1370 (a) a mode promotion has occurred; | 1380 if (mode1 == mode3) |
1371 (b) the mode of the register is the same as the mode of | 1381 continue; |
1372 the argument as it is passed; and | 1382 /* (b) The mode of the register is the same as the mode of |
1373 (c) the signedness does not change across any of the promotions; and | 1383 the argument as it is passed; */ |
1374 (d) when no language-level promotions (which we cannot guarantee | 1384 if (mode3 != mode4) |
1375 will have been done by an external caller) are necessary, | 1385 continue; |
1376 unless we know that this function is only ever called from | 1386 /* (c) There's no language level extension; */ |
1377 the current compilation unit -- all of whose call sites will | 1387 if (mode1 == mode2) |
1378 do the mode1 --> mode2 promotion. */ | 1388 ; |
1379 if (mode1 != mode3 | 1389 /* (c.1) All callers are from the current compilation unit. If that's |
1380 && mode3 == mode4 | 1390 the case we don't have to rely on an ABI, we only have to know |
1381 && uns1 == uns3 | 1391 what we're generating right now, and we know that we will do the |
1382 && (mode1 == mode2 || strictly_local)) | 1392 mode1 to mode2 promotion with the given sign. */ |
1383 { | 1393 else if (!strictly_local) |
1384 /* Record that the value was promoted from mode1 to mode3, | 1394 continue; |
1385 so that any sign extension at the head of the current | 1395 /* (c.2) The combination of the two promotions is useful. This is |
1386 function may be eliminated. */ | 1396 true when the signs match, or if the first promotion is unsigned. |
1387 rtx x; | 1397 In the later case, (sign_extend (zero_extend x)) is the same as |
1388 x = gen_rtx_CLOBBER (mode1, const0_rtx); | 1398 (zero_extend (zero_extend x)), so make sure to force UNS3 true. */ |
1389 x = gen_rtx_fmt_e ((uns3 ? ZERO_EXTEND : SIGN_EXTEND), mode3, x); | 1399 else if (uns1) |
1390 record_value_for_reg (reg, first, x); | 1400 uns3 = true; |
1391 } | 1401 else if (uns3) |
1402 continue; | |
1403 | |
1404 /* Record that the value was promoted from mode1 to mode3, | |
1405 so that any sign extension at the head of the current | |
1406 function may be eliminated. */ | |
1407 x = gen_rtx_CLOBBER (mode1, const0_rtx); | |
1408 x = gen_rtx_fmt_e ((uns3 ? ZERO_EXTEND : SIGN_EXTEND), mode3, x); | |
1409 record_value_for_reg (reg, first, x); | |
1392 } | 1410 } |
1393 } | 1411 } |
1394 | 1412 |
1395 /* Called via note_stores. If X is a pseudo that is narrower than | 1413 /* Called via note_stores. If X is a pseudo that is narrower than |
1396 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero. | 1414 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero. |
1483 | 1501 |
1484 ??? For 2.5, try to tighten up the MD files in this regard | 1502 ??? For 2.5, try to tighten up the MD files in this regard |
1485 instead of this kludge. */ | 1503 instead of this kludge. */ |
1486 | 1504 |
1487 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD | 1505 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD |
1488 && GET_CODE (src) == CONST_INT | 1506 && CONST_INT_P (src) |
1489 && INTVAL (src) > 0 | 1507 && INTVAL (src) > 0 |
1490 && 0 != (INTVAL (src) | 1508 && 0 != (INTVAL (src) |
1491 & ((HOST_WIDE_INT) 1 | 1509 & ((HOST_WIDE_INT) 1 |
1492 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) | 1510 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) |
1493 src = GEN_INT (INTVAL (src) | 1511 src = GEN_INT (INTVAL (src) |
1557 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET) | 1575 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET) |
1558 { | 1576 { |
1559 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++) | 1577 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++) |
1560 { | 1578 { |
1561 rtx elt = XVECEXP (PATTERN (insn), 0, i); | 1579 rtx elt = XVECEXP (PATTERN (insn), 0, i); |
1562 rtx note; | |
1563 | 1580 |
1564 switch (GET_CODE (elt)) | 1581 switch (GET_CODE (elt)) |
1565 { | 1582 { |
1566 /* This is important to combine floating point insns | 1583 /* This is important to combine floating point insns |
1567 for the SH4 port. */ | 1584 for the SH4 port. */ |
1608 | 1625 |
1609 case SET: | 1626 case SET: |
1610 /* Ignore SETs whose result isn't used but not those that | 1627 /* Ignore SETs whose result isn't used but not those that |
1611 have side-effects. */ | 1628 have side-effects. */ |
1612 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt)) | 1629 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt)) |
1613 && (!(note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) | 1630 && insn_nothrow_p (insn) |
1614 || INTVAL (XEXP (note, 0)) <= 0) | 1631 && !side_effects_p (elt)) |
1615 && ! side_effects_p (elt)) | |
1616 break; | 1632 break; |
1617 | 1633 |
1618 /* If we have already found a SET, this is a second one and | 1634 /* If we have already found a SET, this is a second one and |
1619 so we cannot combine with this insn. */ | 1635 so we cannot combine with this insn. */ |
1620 if (set) | 1636 if (set) |
1955 { | 1971 { |
1956 case MOD: case DIV: case UMOD: case UDIV: | 1972 case MOD: case DIV: case UMOD: case UDIV: |
1957 return 1; | 1973 return 1; |
1958 | 1974 |
1959 case MULT: | 1975 case MULT: |
1960 return ! (GET_CODE (XEXP (x, 1)) == CONST_INT | 1976 return ! (CONST_INT_P (XEXP (x, 1)) |
1961 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0); | 1977 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0); |
1962 default: | 1978 default: |
1963 if (BINARY_P (x)) | 1979 if (BINARY_P (x)) |
1964 return contains_muldiv (XEXP (x, 0)) | 1980 return contains_muldiv (XEXP (x, 0)) |
1965 || contains_muldiv (XEXP (x, 1)); | 1981 || contains_muldiv (XEXP (x, 1)); |
2154 x = XEXP (x, 0); | 2170 x = XEXP (x, 0); |
2155 | 2171 |
2156 return GET_CODE (x) == SUBREG | 2172 return GET_CODE (x) == SUBREG |
2157 && SUBREG_REG (x) == reg | 2173 && SUBREG_REG (x) == reg |
2158 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT; | 2174 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT; |
2175 } | |
2176 | |
2177 #ifdef AUTO_INC_DEC | |
2178 /* Replace auto-increment addressing modes with explicit operations to | |
2179 access the same addresses without modifying the corresponding | |
2180 registers. If AFTER holds, SRC is meant to be reused after the | |
2181 side effect, otherwise it is to be reused before that. */ | |
2182 | |
2183 static rtx | |
2184 cleanup_auto_inc_dec (rtx src, bool after, enum machine_mode mem_mode) | |
2185 { | |
2186 rtx x = src; | |
2187 const RTX_CODE code = GET_CODE (x); | |
2188 int i; | |
2189 const char *fmt; | |
2190 | |
2191 switch (code) | |
2192 { | |
2193 case REG: | |
2194 case CONST_INT: | |
2195 case CONST_DOUBLE: | |
2196 case CONST_FIXED: | |
2197 case CONST_VECTOR: | |
2198 case SYMBOL_REF: | |
2199 case CODE_LABEL: | |
2200 case PC: | |
2201 case CC0: | |
2202 case SCRATCH: | |
2203 /* SCRATCH must be shared because they represent distinct values. */ | |
2204 return x; | |
2205 case CLOBBER: | |
2206 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER) | |
2207 return x; | |
2208 break; | |
2209 | |
2210 case CONST: | |
2211 if (shared_const_p (x)) | |
2212 return x; | |
2213 break; | |
2214 | |
2215 case MEM: | |
2216 mem_mode = GET_MODE (x); | |
2217 break; | |
2218 | |
2219 case PRE_INC: | |
2220 case PRE_DEC: | |
2221 case POST_INC: | |
2222 case POST_DEC: | |
2223 gcc_assert (mem_mode != VOIDmode && mem_mode != BLKmode); | |
2224 if (after == (code == PRE_INC || code == PRE_DEC)) | |
2225 x = cleanup_auto_inc_dec (XEXP (x, 0), after, mem_mode); | |
2226 else | |
2227 x = gen_rtx_PLUS (GET_MODE (x), | |
2228 cleanup_auto_inc_dec (XEXP (x, 0), after, mem_mode), | |
2229 GEN_INT ((code == PRE_INC || code == POST_INC) | |
2230 ? GET_MODE_SIZE (mem_mode) | |
2231 : -GET_MODE_SIZE (mem_mode))); | |
2232 return x; | |
2233 | |
2234 case PRE_MODIFY: | |
2235 case POST_MODIFY: | |
2236 if (after == (code == PRE_MODIFY)) | |
2237 x = XEXP (x, 0); | |
2238 else | |
2239 x = XEXP (x, 1); | |
2240 return cleanup_auto_inc_dec (x, after, mem_mode); | |
2241 | |
2242 default: | |
2243 break; | |
2244 } | |
2245 | |
2246 /* Copy the various flags, fields, and other information. We assume | |
2247 that all fields need copying, and then clear the fields that should | |
2248 not be copied. That is the sensible default behavior, and forces | |
2249 us to explicitly document why we are *not* copying a flag. */ | |
2250 x = shallow_copy_rtx (x); | |
2251 | |
2252 /* We do not copy the USED flag, which is used as a mark bit during | |
2253 walks over the RTL. */ | |
2254 RTX_FLAG (x, used) = 0; | |
2255 | |
2256 /* We do not copy FRAME_RELATED for INSNs. */ | |
2257 if (INSN_P (x)) | |
2258 RTX_FLAG (x, frame_related) = 0; | |
2259 | |
2260 fmt = GET_RTX_FORMAT (code); | |
2261 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2262 if (fmt[i] == 'e') | |
2263 XEXP (x, i) = cleanup_auto_inc_dec (XEXP (x, i), after, mem_mode); | |
2264 else if (fmt[i] == 'E' || fmt[i] == 'V') | |
2265 { | |
2266 int j; | |
2267 XVEC (x, i) = rtvec_alloc (XVECLEN (x, i)); | |
2268 for (j = 0; j < XVECLEN (x, i); j++) | |
2269 XVECEXP (x, i, j) | |
2270 = cleanup_auto_inc_dec (XVECEXP (src, i, j), after, mem_mode); | |
2271 } | |
2272 | |
2273 return x; | |
2274 } | |
2275 | |
2276 /* Auxiliary data structure for propagate_for_debug_stmt. */ | |
2277 | |
2278 struct rtx_subst_pair | |
2279 { | |
2280 rtx to; | |
2281 bool adjusted; | |
2282 bool after; | |
2283 }; | |
2284 | |
2285 /* DATA points to an rtx_subst_pair. Return the value that should be | |
2286 substituted. */ | |
2287 | |
2288 static rtx | |
2289 propagate_for_debug_subst (rtx from ATTRIBUTE_UNUSED, void *data) | |
2290 { | |
2291 struct rtx_subst_pair *pair = (struct rtx_subst_pair *)data; | |
2292 | |
2293 if (!pair->adjusted) | |
2294 { | |
2295 pair->adjusted = true; | |
2296 pair->to = cleanup_auto_inc_dec (pair->to, pair->after, VOIDmode); | |
2297 return pair->to; | |
2298 } | |
2299 return copy_rtx (pair->to); | |
2300 } | |
2301 #endif | |
2302 | |
2303 /* Replace occurrences of DEST with SRC in DEBUG_INSNs between INSN | |
2304 and LAST. If MOVE holds, debug insns must also be moved past | |
2305 LAST. */ | |
2306 | |
2307 static void | |
2308 propagate_for_debug (rtx insn, rtx last, rtx dest, rtx src, bool move) | |
2309 { | |
2310 rtx next, move_pos = move ? last : NULL_RTX, loc; | |
2311 | |
2312 #ifdef AUTO_INC_DEC | |
2313 struct rtx_subst_pair p; | |
2314 p.to = src; | |
2315 p.adjusted = false; | |
2316 p.after = move; | |
2317 #endif | |
2318 | |
2319 next = NEXT_INSN (insn); | |
2320 while (next != last) | |
2321 { | |
2322 insn = next; | |
2323 next = NEXT_INSN (insn); | |
2324 if (DEBUG_INSN_P (insn)) | |
2325 { | |
2326 #ifdef AUTO_INC_DEC | |
2327 loc = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn), | |
2328 dest, propagate_for_debug_subst, &p); | |
2329 #else | |
2330 loc = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn), dest, src); | |
2331 #endif | |
2332 if (loc == INSN_VAR_LOCATION_LOC (insn)) | |
2333 continue; | |
2334 INSN_VAR_LOCATION_LOC (insn) = loc; | |
2335 if (move_pos) | |
2336 { | |
2337 remove_insn (insn); | |
2338 PREV_INSN (insn) = NEXT_INSN (insn) = NULL_RTX; | |
2339 move_pos = emit_debug_insn_after (insn, move_pos); | |
2340 } | |
2341 else | |
2342 df_insn_rescan (insn); | |
2343 } | |
2344 } | |
2345 } | |
2346 | |
2347 /* Delete the unconditional jump INSN and adjust the CFG correspondingly. | |
2348 Note that the INSN should be deleted *after* removing dead edges, so | |
2349 that the kept edge is the fallthrough edge for a (set (pc) (pc)) | |
2350 but not for a (set (pc) (label_ref FOO)). */ | |
2351 | |
2352 static void | |
2353 update_cfg_for_uncondjump (rtx insn) | |
2354 { | |
2355 basic_block bb = BLOCK_FOR_INSN (insn); | |
2356 bool at_end = (BB_END (bb) == insn); | |
2357 | |
2358 if (at_end) | |
2359 purge_dead_edges (bb); | |
2360 | |
2361 delete_insn (insn); | |
2362 if (at_end && EDGE_COUNT (bb->succs) == 1) | |
2363 single_succ_edge (bb)->flags |= EDGE_FALLTHRU; | |
2159 } | 2364 } |
2160 | 2365 |
2161 | 2366 |
2162 /* Try to combine the insns I1 and I2 into I3. | 2367 /* Try to combine the insns I1 and I2 into I3. |
2163 Here I1 and I2 appear earlier than I3. | 2368 Here I1 and I2 appear earlier than I3. |
2193 /* Contains I3 if the destination of I3 is used in its source, which means | 2398 /* Contains I3 if the destination of I3 is used in its source, which means |
2194 that the old life of I3 is being killed. If that usage is placed into | 2399 that the old life of I3 is being killed. If that usage is placed into |
2195 I2 and not in I3, a REG_DEAD note must be made. */ | 2400 I2 and not in I3, a REG_DEAD note must be made. */ |
2196 rtx i3dest_killed = 0; | 2401 rtx i3dest_killed = 0; |
2197 /* SET_DEST and SET_SRC of I2 and I1. */ | 2402 /* SET_DEST and SET_SRC of I2 and I1. */ |
2198 rtx i2dest, i2src, i1dest = 0, i1src = 0; | 2403 rtx i2dest = 0, i2src = 0, i1dest = 0, i1src = 0; |
2404 /* Set if I2DEST was reused as a scratch register. */ | |
2405 bool i2scratch = false; | |
2199 /* PATTERN (I1) and PATTERN (I2), or a copy of it in certain cases. */ | 2406 /* PATTERN (I1) and PATTERN (I2), or a copy of it in certain cases. */ |
2200 rtx i1pat = 0, i2pat = 0; | 2407 rtx i1pat = 0, i2pat = 0; |
2201 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */ | 2408 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */ |
2202 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0; | 2409 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0; |
2203 int i2dest_killed = 0, i1dest_killed = 0; | 2410 int i2dest_killed = 0, i1dest_killed = 0; |
2230 undobuf.other_insn = 0; | 2437 undobuf.other_insn = 0; |
2231 | 2438 |
2232 /* Reset the hard register usage information. */ | 2439 /* Reset the hard register usage information. */ |
2233 CLEAR_HARD_REG_SET (newpat_used_regs); | 2440 CLEAR_HARD_REG_SET (newpat_used_regs); |
2234 | 2441 |
2442 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2443 { | |
2444 if (i1) | |
2445 fprintf (dump_file, "\nTrying %d, %d -> %d:\n", | |
2446 INSN_UID (i1), INSN_UID (i2), INSN_UID (i3)); | |
2447 else | |
2448 fprintf (dump_file, "\nTrying %d -> %d:\n", | |
2449 INSN_UID (i2), INSN_UID (i3)); | |
2450 } | |
2451 | |
2235 /* If I1 and I2 both feed I3, they can be in any order. To simplify the | 2452 /* If I1 and I2 both feed I3, they can be in any order. To simplify the |
2236 code below, set I1 to be the earlier of the two insns. */ | 2453 code below, set I1 to be the earlier of the two insns. */ |
2237 if (i1 && DF_INSN_LUID (i1) > DF_INSN_LUID (i2)) | 2454 if (i1 && DF_INSN_LUID (i1) > DF_INSN_LUID (i2)) |
2238 temp = i1, i1 = i2, i2 = temp; | 2455 temp = i1, i1 = i2, i2 = temp; |
2239 | 2456 |
2267 the resulting insn isn't likely to be recognized anyway. */ | 2484 the resulting insn isn't likely to be recognized anyway. */ |
2268 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT | 2485 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT |
2269 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART | 2486 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART |
2270 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)), | 2487 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)), |
2271 SET_DEST (PATTERN (i3))) | 2488 SET_DEST (PATTERN (i3))) |
2272 && next_real_insn (i2) == i3) | 2489 && next_active_insn (i2) == i3) |
2273 { | 2490 { |
2274 rtx p2 = PATTERN (i2); | 2491 rtx p2 = PATTERN (i2); |
2275 | 2492 |
2276 /* Make sure that the destination of I3, | 2493 /* Make sure that the destination of I3, |
2277 which we are going to substitute into one output of I2, | 2494 which we are going to substitute into one output of I2, |
2300 | 2517 |
2301 subst_insn = i3; | 2518 subst_insn = i3; |
2302 subst_low_luid = DF_INSN_LUID (i2); | 2519 subst_low_luid = DF_INSN_LUID (i2); |
2303 | 2520 |
2304 added_sets_2 = added_sets_1 = 0; | 2521 added_sets_2 = added_sets_1 = 0; |
2522 i2src = SET_DEST (PATTERN (i3)); | |
2305 i2dest = SET_SRC (PATTERN (i3)); | 2523 i2dest = SET_SRC (PATTERN (i3)); |
2306 i2dest_killed = dead_or_set_p (i2, i2dest); | 2524 i2dest_killed = dead_or_set_p (i2, i2dest); |
2307 | 2525 |
2308 /* Replace the dest in I2 with our dest and make the resulting | 2526 /* Replace the dest in I2 with our dest and make the resulting |
2309 insn the new pattern for I3. Then skip to where we | 2527 insn the new pattern for I3. Then skip to where we |
2320 /* If I2 is setting a pseudo to a constant and I3 is setting some | 2538 /* If I2 is setting a pseudo to a constant and I3 is setting some |
2321 sub-part of it to another constant, merge them by making a new | 2539 sub-part of it to another constant, merge them by making a new |
2322 constant. */ | 2540 constant. */ |
2323 if (i1 == 0 | 2541 if (i1 == 0 |
2324 && (temp = single_set (i2)) != 0 | 2542 && (temp = single_set (i2)) != 0 |
2325 && (GET_CODE (SET_SRC (temp)) == CONST_INT | 2543 && (CONST_INT_P (SET_SRC (temp)) |
2326 || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE) | 2544 || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE) |
2327 && GET_CODE (PATTERN (i3)) == SET | 2545 && GET_CODE (PATTERN (i3)) == SET |
2328 && (GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT | 2546 && (CONST_INT_P (SET_SRC (PATTERN (i3))) |
2329 || GET_CODE (SET_SRC (PATTERN (i3))) == CONST_DOUBLE) | 2547 || GET_CODE (SET_SRC (PATTERN (i3))) == CONST_DOUBLE) |
2330 && reg_subword_p (SET_DEST (PATTERN (i3)), SET_DEST (temp))) | 2548 && reg_subword_p (SET_DEST (PATTERN (i3)), SET_DEST (temp))) |
2331 { | 2549 { |
2332 rtx dest = SET_DEST (PATTERN (i3)); | 2550 rtx dest = SET_DEST (PATTERN (i3)); |
2333 int offset = -1; | 2551 int offset = -1; |
2334 int width = 0; | 2552 int width = 0; |
2335 | 2553 |
2336 if (GET_CODE (dest) == ZERO_EXTRACT) | 2554 if (GET_CODE (dest) == ZERO_EXTRACT) |
2337 { | 2555 { |
2338 if (GET_CODE (XEXP (dest, 1)) == CONST_INT | 2556 if (CONST_INT_P (XEXP (dest, 1)) |
2339 && GET_CODE (XEXP (dest, 2)) == CONST_INT) | 2557 && CONST_INT_P (XEXP (dest, 2))) |
2340 { | 2558 { |
2341 width = INTVAL (XEXP (dest, 1)); | 2559 width = INTVAL (XEXP (dest, 1)); |
2342 offset = INTVAL (XEXP (dest, 2)); | 2560 offset = INTVAL (XEXP (dest, 2)); |
2343 dest = XEXP (dest, 0); | 2561 dest = XEXP (dest, 0); |
2344 if (BITS_BIG_ENDIAN) | 2562 if (BITS_BIG_ENDIAN) |
2374 HOST_WIDE_INT mhi, ohi, ihi; | 2592 HOST_WIDE_INT mhi, ohi, ihi; |
2375 HOST_WIDE_INT mlo, olo, ilo; | 2593 HOST_WIDE_INT mlo, olo, ilo; |
2376 rtx inner = SET_SRC (PATTERN (i3)); | 2594 rtx inner = SET_SRC (PATTERN (i3)); |
2377 rtx outer = SET_SRC (temp); | 2595 rtx outer = SET_SRC (temp); |
2378 | 2596 |
2379 if (GET_CODE (outer) == CONST_INT) | 2597 if (CONST_INT_P (outer)) |
2380 { | 2598 { |
2381 olo = INTVAL (outer); | 2599 olo = INTVAL (outer); |
2382 ohi = olo < 0 ? -1 : 0; | 2600 ohi = olo < 0 ? -1 : 0; |
2383 } | 2601 } |
2384 else | 2602 else |
2385 { | 2603 { |
2386 olo = CONST_DOUBLE_LOW (outer); | 2604 olo = CONST_DOUBLE_LOW (outer); |
2387 ohi = CONST_DOUBLE_HIGH (outer); | 2605 ohi = CONST_DOUBLE_HIGH (outer); |
2388 } | 2606 } |
2389 | 2607 |
2390 if (GET_CODE (inner) == CONST_INT) | 2608 if (CONST_INT_P (inner)) |
2391 { | 2609 { |
2392 ilo = INTVAL (inner); | 2610 ilo = INTVAL (inner); |
2393 ihi = ilo < 0 ? -1 : 0; | 2611 ihi = ilo < 0 ? -1 : 0; |
2394 } | 2612 } |
2395 else | 2613 else |
2443 subst_low_luid = DF_INSN_LUID (i2); | 2661 subst_low_luid = DF_INSN_LUID (i2); |
2444 added_sets_2 = added_sets_1 = 0; | 2662 added_sets_2 = added_sets_1 = 0; |
2445 i2dest = SET_DEST (temp); | 2663 i2dest = SET_DEST (temp); |
2446 i2dest_killed = dead_or_set_p (i2, i2dest); | 2664 i2dest_killed = dead_or_set_p (i2, i2dest); |
2447 | 2665 |
2448 /* Replace the source in I2 with the new constant and make the | |
2449 resulting insn the new pattern for I3. Then skip to | |
2450 where we validate the pattern. Everything was set up above. */ | |
2451 SUBST (SET_SRC (temp), | 2666 SUBST (SET_SRC (temp), |
2452 immed_double_const (olo, ohi, GET_MODE (SET_DEST (temp)))); | 2667 immed_double_const (olo, ohi, GET_MODE (SET_DEST (temp)))); |
2453 | 2668 |
2454 newpat = PATTERN (i2); | 2669 newpat = PATTERN (i2); |
2455 | |
2456 /* The dest of I3 has been replaced with the dest of I2. */ | |
2457 changed_i3_dest = 1; | |
2458 goto validate_replacement; | 2670 goto validate_replacement; |
2459 } | 2671 } |
2460 } | 2672 } |
2461 | 2673 |
2462 #ifndef HAVE_cc0 | 2674 #ifndef HAVE_cc0 |
2824 XVECEXP (newpat, 0, --total_sets) | 3036 XVECEXP (newpat, 0, --total_sets) |
2825 = subst (i2pat, i1dest, i1src, 0, 0); | 3037 = subst (i2pat, i1dest, i1src, 0, 0); |
2826 } | 3038 } |
2827 } | 3039 } |
2828 | 3040 |
3041 /* We come here when we are replacing a destination in I2 with the | |
3042 destination of I3. */ | |
2829 validate_replacement: | 3043 validate_replacement: |
2830 | 3044 |
2831 /* Note which hard regs this insn has as inputs. */ | 3045 /* Note which hard regs this insn has as inputs. */ |
2832 mark_used_regs_combine (newpat); | 3046 mark_used_regs_combine (newpat); |
2833 | 3047 |
2872 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET | 3086 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET |
2873 && asm_noperands (newpat) < 0) | 3087 && asm_noperands (newpat) < 0) |
2874 { | 3088 { |
2875 rtx set0 = XVECEXP (newpat, 0, 0); | 3089 rtx set0 = XVECEXP (newpat, 0, 0); |
2876 rtx set1 = XVECEXP (newpat, 0, 1); | 3090 rtx set1 = XVECEXP (newpat, 0, 1); |
2877 rtx note; | |
2878 | 3091 |
2879 if (((REG_P (SET_DEST (set1)) | 3092 if (((REG_P (SET_DEST (set1)) |
2880 && find_reg_note (i3, REG_UNUSED, SET_DEST (set1))) | 3093 && find_reg_note (i3, REG_UNUSED, SET_DEST (set1))) |
2881 || (GET_CODE (SET_DEST (set1)) == SUBREG | 3094 || (GET_CODE (SET_DEST (set1)) == SUBREG |
2882 && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1))))) | 3095 && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1))))) |
2883 && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX)) | 3096 && insn_nothrow_p (i3) |
2884 || INTVAL (XEXP (note, 0)) <= 0) | 3097 && !side_effects_p (SET_SRC (set1))) |
2885 && ! side_effects_p (SET_SRC (set1))) | |
2886 { | 3098 { |
2887 newpat = set0; | 3099 newpat = set0; |
2888 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); | 3100 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
2889 } | 3101 } |
2890 | 3102 |
2891 else if (((REG_P (SET_DEST (set0)) | 3103 else if (((REG_P (SET_DEST (set0)) |
2892 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0))) | 3104 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0))) |
2893 || (GET_CODE (SET_DEST (set0)) == SUBREG | 3105 || (GET_CODE (SET_DEST (set0)) == SUBREG |
2894 && find_reg_note (i3, REG_UNUSED, | 3106 && find_reg_note (i3, REG_UNUSED, |
2895 SUBREG_REG (SET_DEST (set0))))) | 3107 SUBREG_REG (SET_DEST (set0))))) |
2896 && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX)) | 3108 && insn_nothrow_p (i3) |
2897 || INTVAL (XEXP (note, 0)) <= 0) | 3109 && !side_effects_p (SET_SRC (set0))) |
2898 && ! side_effects_p (SET_SRC (set0))) | |
2899 { | 3110 { |
2900 newpat = set1; | 3111 newpat = set1; |
2901 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); | 3112 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
2902 | 3113 |
2903 if (insn_code_number >= 0) | 3114 if (insn_code_number >= 0) |
2976 undobuf.undos = buf->next; | 3187 undobuf.undos = buf->next; |
2977 buf->next = undobuf.frees; | 3188 buf->next = undobuf.frees; |
2978 undobuf.frees = buf; | 3189 undobuf.frees = buf; |
2979 } | 3190 } |
2980 } | 3191 } |
3192 | |
3193 i2scratch = m_split != 0; | |
2981 } | 3194 } |
2982 | 3195 |
2983 /* If recog_for_combine has discarded clobbers, try to use them | 3196 /* If recog_for_combine has discarded clobbers, try to use them |
2984 again for the split. */ | 3197 again for the split. */ |
2985 if (m_split == 0 && newpat_vec_with_clobbers) | 3198 if (m_split == 0 && newpat_vec_with_clobbers) |
3070 enum rtx_code split_code = GET_CODE (*split); | 3283 enum rtx_code split_code = GET_CODE (*split); |
3071 enum machine_mode split_mode = GET_MODE (*split); | 3284 enum machine_mode split_mode = GET_MODE (*split); |
3072 bool subst_done = false; | 3285 bool subst_done = false; |
3073 newi2pat = NULL_RTX; | 3286 newi2pat = NULL_RTX; |
3074 | 3287 |
3288 i2scratch = true; | |
3289 | |
3075 /* Get NEWDEST as a register in the proper mode. We have already | 3290 /* Get NEWDEST as a register in the proper mode. We have already |
3076 validated that we can do this. */ | 3291 validated that we can do this. */ |
3077 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode) | 3292 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode) |
3078 { | 3293 { |
3079 if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER) | 3294 if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER) |
3087 | 3302 |
3088 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to | 3303 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to |
3089 an ASHIFT. This can occur if it was inside a PLUS and hence | 3304 an ASHIFT. This can occur if it was inside a PLUS and hence |
3090 appeared to be a memory address. This is a kludge. */ | 3305 appeared to be a memory address. This is a kludge. */ |
3091 if (split_code == MULT | 3306 if (split_code == MULT |
3092 && GET_CODE (XEXP (*split, 1)) == CONST_INT | 3307 && CONST_INT_P (XEXP (*split, 1)) |
3093 && INTVAL (XEXP (*split, 1)) > 0 | 3308 && INTVAL (XEXP (*split, 1)) > 0 |
3094 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0) | 3309 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0) |
3095 { | 3310 { |
3096 SUBST (*split, gen_rtx_ASHIFT (split_mode, | 3311 SUBST (*split, gen_rtx_ASHIFT (split_mode, |
3097 XEXP (*split, 0), GEN_INT (i))); | 3312 XEXP (*split, 0), GEN_INT (i))); |
3372 { | 3587 { |
3373 undo_all (); | 3588 undo_all (); |
3374 return 0; | 3589 return 0; |
3375 } | 3590 } |
3376 | 3591 |
3592 if (MAY_HAVE_DEBUG_INSNS) | |
3593 { | |
3594 struct undo *undo; | |
3595 | |
3596 for (undo = undobuf.undos; undo; undo = undo->next) | |
3597 if (undo->kind == UNDO_MODE) | |
3598 { | |
3599 rtx reg = *undo->where.r; | |
3600 enum machine_mode new_mode = GET_MODE (reg); | |
3601 enum machine_mode old_mode = undo->old_contents.m; | |
3602 | |
3603 /* Temporarily revert mode back. */ | |
3604 adjust_reg_mode (reg, old_mode); | |
3605 | |
3606 if (reg == i2dest && i2scratch) | |
3607 { | |
3608 /* If we used i2dest as a scratch register with a | |
3609 different mode, substitute it for the original | |
3610 i2src while its original mode is temporarily | |
3611 restored, and then clear i2scratch so that we don't | |
3612 do it again later. */ | |
3613 propagate_for_debug (i2, i3, reg, i2src, false); | |
3614 i2scratch = false; | |
3615 /* Put back the new mode. */ | |
3616 adjust_reg_mode (reg, new_mode); | |
3617 } | |
3618 else | |
3619 { | |
3620 rtx tempreg = gen_raw_REG (old_mode, REGNO (reg)); | |
3621 rtx first, last; | |
3622 | |
3623 if (reg == i2dest) | |
3624 { | |
3625 first = i2; | |
3626 last = i3; | |
3627 } | |
3628 else | |
3629 { | |
3630 first = i3; | |
3631 last = undobuf.other_insn; | |
3632 gcc_assert (last); | |
3633 } | |
3634 | |
3635 /* We're dealing with a reg that changed mode but not | |
3636 meaning, so we want to turn it into a subreg for | |
3637 the new mode. However, because of REG sharing and | |
3638 because its mode had already changed, we have to do | |
3639 it in two steps. First, replace any debug uses of | |
3640 reg, with its original mode temporarily restored, | |
3641 with this copy we have created; then, replace the | |
3642 copy with the SUBREG of the original shared reg, | |
3643 once again changed to the new mode. */ | |
3644 propagate_for_debug (first, last, reg, tempreg, false); | |
3645 adjust_reg_mode (reg, new_mode); | |
3646 propagate_for_debug (first, last, tempreg, | |
3647 lowpart_subreg (old_mode, reg, new_mode), | |
3648 false); | |
3649 } | |
3650 } | |
3651 } | |
3652 | |
3377 /* If we will be able to accept this, we have made a | 3653 /* If we will be able to accept this, we have made a |
3378 change to the destination of I3. This requires us to | 3654 change to the destination of I3. This requires us to |
3379 do a few adjustments. */ | 3655 do a few adjustments. */ |
3380 | 3656 |
3381 if (changed_i3_dest) | 3657 if (changed_i3_dest) |
3562 LOG_LINKS (i2) = 0; | 3838 LOG_LINKS (i2) = 0; |
3563 REG_NOTES (i2) = 0; | 3839 REG_NOTES (i2) = 0; |
3564 | 3840 |
3565 if (newi2pat) | 3841 if (newi2pat) |
3566 { | 3842 { |
3843 if (MAY_HAVE_DEBUG_INSNS && i2scratch) | |
3844 propagate_for_debug (i2, i3, i2dest, i2src, false); | |
3567 INSN_CODE (i2) = i2_code_number; | 3845 INSN_CODE (i2) = i2_code_number; |
3568 PATTERN (i2) = newi2pat; | 3846 PATTERN (i2) = newi2pat; |
3569 } | 3847 } |
3570 else | 3848 else |
3571 SET_INSN_DELETED (i2); | 3849 { |
3850 if (MAY_HAVE_DEBUG_INSNS && i2src) | |
3851 propagate_for_debug (i2, i3, i2dest, i2src, i3_subst_into_i2); | |
3852 SET_INSN_DELETED (i2); | |
3853 } | |
3572 | 3854 |
3573 if (i1) | 3855 if (i1) |
3574 { | 3856 { |
3575 LOG_LINKS (i1) = 0; | 3857 LOG_LINKS (i1) = 0; |
3576 REG_NOTES (i1) = 0; | 3858 REG_NOTES (i1) = 0; |
3859 if (MAY_HAVE_DEBUG_INSNS) | |
3860 propagate_for_debug (i1, i3, i1dest, i1src, false); | |
3577 SET_INSN_DELETED (i1); | 3861 SET_INSN_DELETED (i1); |
3578 } | 3862 } |
3579 | 3863 |
3580 /* Get death notes for everything that is now used in either I3 or | 3864 /* Get death notes for everything that is now used in either I3 or |
3581 I2 and used to die in a previous insn. If we built two new | 3865 I2 and used to die in a previous insn. If we built two new |
3609 know these are REG_UNUSED and want them to go to the desired insn, | 3893 know these are REG_UNUSED and want them to go to the desired insn, |
3610 so we always pass it as i3. */ | 3894 so we always pass it as i3. */ |
3611 | 3895 |
3612 if (newi2pat && new_i2_notes) | 3896 if (newi2pat && new_i2_notes) |
3613 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX); | 3897 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX); |
3614 | 3898 |
3615 if (new_i3_notes) | 3899 if (new_i3_notes) |
3616 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX); | 3900 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX); |
3617 | 3901 |
3618 /* If I3DEST was used in I3SRC, it really died in I3. We may need to | 3902 /* If I3DEST was used in I3SRC, it really died in I3. We may need to |
3619 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets | 3903 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets |
3623 we discard it in distribute_notes, we will decrement it again. */ | 3907 we discard it in distribute_notes, we will decrement it again. */ |
3624 | 3908 |
3625 if (i3dest_killed) | 3909 if (i3dest_killed) |
3626 { | 3910 { |
3627 if (newi2pat && reg_set_p (i3dest_killed, newi2pat)) | 3911 if (newi2pat && reg_set_p (i3dest_killed, newi2pat)) |
3628 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed, | 3912 distribute_notes (alloc_reg_note (REG_DEAD, i3dest_killed, |
3629 NULL_RTX), | 3913 NULL_RTX), |
3630 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1); | 3914 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1); |
3631 else | 3915 else |
3632 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed, | 3916 distribute_notes (alloc_reg_note (REG_DEAD, i3dest_killed, |
3633 NULL_RTX), | 3917 NULL_RTX), |
3634 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, | 3918 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, |
3635 elim_i2, elim_i1); | 3919 elim_i2, elim_i1); |
3636 } | 3920 } |
3637 | 3921 |
3638 if (i2dest_in_i2src) | 3922 if (i2dest_in_i2src) |
3639 { | 3923 { |
3640 if (newi2pat && reg_set_p (i2dest, newi2pat)) | 3924 if (newi2pat && reg_set_p (i2dest, newi2pat)) |
3641 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX), | 3925 distribute_notes (alloc_reg_note (REG_DEAD, i2dest, NULL_RTX), |
3642 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX); | 3926 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX); |
3643 else | 3927 else |
3644 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX), | 3928 distribute_notes (alloc_reg_note (REG_DEAD, i2dest, NULL_RTX), |
3645 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, | 3929 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, |
3646 NULL_RTX, NULL_RTX); | 3930 NULL_RTX, NULL_RTX); |
3647 } | 3931 } |
3648 | 3932 |
3649 if (i1dest_in_i1src) | 3933 if (i1dest_in_i1src) |
3650 { | 3934 { |
3651 if (newi2pat && reg_set_p (i1dest, newi2pat)) | 3935 if (newi2pat && reg_set_p (i1dest, newi2pat)) |
3652 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX), | 3936 distribute_notes (alloc_reg_note (REG_DEAD, i1dest, NULL_RTX), |
3653 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX); | 3937 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX); |
3654 else | 3938 else |
3655 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX), | 3939 distribute_notes (alloc_reg_note (REG_DEAD, i1dest, NULL_RTX), |
3656 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, | 3940 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, |
3657 NULL_RTX, NULL_RTX); | 3941 NULL_RTX, NULL_RTX); |
3658 } | 3942 } |
3659 | 3943 |
3660 distribute_links (i3links); | 3944 distribute_links (i3links); |
3714 set_nonzero_bits_and_sign_copies() is important. Because newi2pat | 3998 set_nonzero_bits_and_sign_copies() is important. Because newi2pat |
3715 can affect nonzero_bits of newpat */ | 3999 can affect nonzero_bits of newpat */ |
3716 if (newi2pat) | 4000 if (newi2pat) |
3717 note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL); | 4001 note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL); |
3718 note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL); | 4002 note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL); |
3719 | |
3720 /* Set new_direct_jump_p if a new return or simple jump instruction | |
3721 has been created. | |
3722 | |
3723 If I3 is now an unconditional jump, ensure that it has a | |
3724 BARRIER following it since it may have initially been a | |
3725 conditional jump. It may also be the last nonnote insn. */ | |
3726 | |
3727 if (returnjump_p (i3) || any_uncondjump_p (i3)) | |
3728 { | |
3729 *new_direct_jump_p = 1; | |
3730 mark_jump_label (PATTERN (i3), i3, 0); | |
3731 | |
3732 if ((temp = next_nonnote_insn (i3)) == NULL_RTX | |
3733 || !BARRIER_P (temp)) | |
3734 emit_barrier_after (i3); | |
3735 } | |
3736 | |
3737 if (undobuf.other_insn != NULL_RTX | |
3738 && (returnjump_p (undobuf.other_insn) | |
3739 || any_uncondjump_p (undobuf.other_insn))) | |
3740 { | |
3741 *new_direct_jump_p = 1; | |
3742 | |
3743 if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX | |
3744 || !BARRIER_P (temp)) | |
3745 emit_barrier_after (undobuf.other_insn); | |
3746 } | |
3747 | |
3748 /* An NOOP jump does not need barrier, but it does need cleaning up | |
3749 of CFG. */ | |
3750 if (GET_CODE (newpat) == SET | |
3751 && SET_SRC (newpat) == pc_rtx | |
3752 && SET_DEST (newpat) == pc_rtx) | |
3753 *new_direct_jump_p = 1; | |
3754 } | 4003 } |
3755 | 4004 |
3756 if (undobuf.other_insn != NULL_RTX) | 4005 if (undobuf.other_insn != NULL_RTX) |
3757 { | 4006 { |
3758 if (dump_file) | 4007 if (dump_file) |
3759 { | 4008 { |
3760 fprintf (dump_file, "modifying other_insn "); | 4009 fprintf (dump_file, "modifying other_insn "); |
3790 fprintf (dump_file, "modifying insn i3 "); | 4039 fprintf (dump_file, "modifying insn i3 "); |
3791 dump_insn_slim (dump_file, i3); | 4040 dump_insn_slim (dump_file, i3); |
3792 } | 4041 } |
3793 df_insn_rescan (i3); | 4042 df_insn_rescan (i3); |
3794 } | 4043 } |
3795 | 4044 |
4045 /* Set new_direct_jump_p if a new return or simple jump instruction | |
4046 has been created. Adjust the CFG accordingly. */ | |
4047 | |
4048 if (returnjump_p (i3) || any_uncondjump_p (i3)) | |
4049 { | |
4050 *new_direct_jump_p = 1; | |
4051 mark_jump_label (PATTERN (i3), i3, 0); | |
4052 update_cfg_for_uncondjump (i3); | |
4053 } | |
4054 | |
4055 if (undobuf.other_insn != NULL_RTX | |
4056 && (returnjump_p (undobuf.other_insn) | |
4057 || any_uncondjump_p (undobuf.other_insn))) | |
4058 { | |
4059 *new_direct_jump_p = 1; | |
4060 update_cfg_for_uncondjump (undobuf.other_insn); | |
4061 } | |
4062 | |
4063 /* A noop might also need cleaning up of CFG, if it comes from the | |
4064 simplification of a jump. */ | |
4065 if (GET_CODE (newpat) == SET | |
4066 && SET_SRC (newpat) == pc_rtx | |
4067 && SET_DEST (newpat) == pc_rtx) | |
4068 { | |
4069 *new_direct_jump_p = 1; | |
4070 update_cfg_for_uncondjump (i3); | |
4071 } | |
4072 | |
3796 combine_successes++; | 4073 combine_successes++; |
3797 undo_commit (); | 4074 undo_commit (); |
3798 | 4075 |
3799 if (added_links_insn | 4076 if (added_links_insn |
3800 && (newi2pat == 0 || DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (i2)) | 4077 && (newi2pat == 0 || DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (i2)) |
3888 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it | 4165 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it |
3889 using LO_SUM and HIGH. */ | 4166 using LO_SUM and HIGH. */ |
3890 if (GET_CODE (XEXP (x, 0)) == CONST | 4167 if (GET_CODE (XEXP (x, 0)) == CONST |
3891 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF) | 4168 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF) |
3892 { | 4169 { |
4170 enum machine_mode address_mode | |
4171 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x)); | |
4172 | |
3893 SUBST (XEXP (x, 0), | 4173 SUBST (XEXP (x, 0), |
3894 gen_rtx_LO_SUM (Pmode, | 4174 gen_rtx_LO_SUM (address_mode, |
3895 gen_rtx_HIGH (Pmode, XEXP (x, 0)), | 4175 gen_rtx_HIGH (address_mode, XEXP (x, 0)), |
3896 XEXP (x, 0))); | 4176 XEXP (x, 0))); |
3897 return &XEXP (XEXP (x, 0), 0); | 4177 return &XEXP (XEXP (x, 0), 0); |
3898 } | 4178 } |
3899 #endif | 4179 #endif |
3900 | 4180 |
3902 address is not valid, perhaps will can split it up using | 4182 address is not valid, perhaps will can split it up using |
3903 the machine-specific way to split large constants. We use | 4183 the machine-specific way to split large constants. We use |
3904 the first pseudo-reg (one of the virtual regs) as a placeholder; | 4184 the first pseudo-reg (one of the virtual regs) as a placeholder; |
3905 it will not remain in the result. */ | 4185 it will not remain in the result. */ |
3906 if (GET_CODE (XEXP (x, 0)) == PLUS | 4186 if (GET_CODE (XEXP (x, 0)) == PLUS |
3907 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | 4187 && CONST_INT_P (XEXP (XEXP (x, 0), 1)) |
3908 && ! memory_address_p (GET_MODE (x), XEXP (x, 0))) | 4188 && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0), |
4189 MEM_ADDR_SPACE (x))) | |
3909 { | 4190 { |
3910 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER]; | 4191 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER]; |
3911 rtx seq = combine_split_insns (gen_rtx_SET (VOIDmode, reg, | 4192 rtx seq = combine_split_insns (gen_rtx_SET (VOIDmode, reg, |
3912 XEXP (x, 0)), | 4193 XEXP (x, 0)), |
3913 subst_insn); | 4194 subst_insn); |
3926 && ! reg_mentioned_p (reg, | 4207 && ! reg_mentioned_p (reg, |
3927 SET_SRC (PATTERN (seq))) | 4208 SET_SRC (PATTERN (seq))) |
3928 && NONJUMP_INSN_P (NEXT_INSN (seq)) | 4209 && NONJUMP_INSN_P (NEXT_INSN (seq)) |
3929 && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET | 4210 && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET |
3930 && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg | 4211 && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg |
3931 && memory_address_p (GET_MODE (x), | 4212 && memory_address_addr_space_p |
3932 SET_SRC (PATTERN (NEXT_INSN (seq))))) | 4213 (GET_MODE (x), SET_SRC (PATTERN (NEXT_INSN (seq))), |
4214 MEM_ADDR_SPACE (x))) | |
3933 { | 4215 { |
3934 rtx src1 = SET_SRC (PATTERN (seq)); | 4216 rtx src1 = SET_SRC (PATTERN (seq)); |
3935 rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq))); | 4217 rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq))); |
3936 | 4218 |
3937 /* Replace the placeholder in SRC2 with SRC1. If we can | 4219 /* Replace the placeholder in SRC2 with SRC1. If we can |
3966 } | 4248 } |
3967 | 4249 |
3968 /* If we have a PLUS whose first operand is complex, try computing it | 4250 /* If we have a PLUS whose first operand is complex, try computing it |
3969 separately by making a split there. */ | 4251 separately by making a split there. */ |
3970 if (GET_CODE (XEXP (x, 0)) == PLUS | 4252 if (GET_CODE (XEXP (x, 0)) == PLUS |
3971 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)) | 4253 && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0), |
4254 MEM_ADDR_SPACE (x)) | |
3972 && ! OBJECT_P (XEXP (XEXP (x, 0), 0)) | 4255 && ! OBJECT_P (XEXP (XEXP (x, 0), 0)) |
3973 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG | 4256 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG |
3974 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0))))) | 4257 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0))))) |
3975 return &XEXP (XEXP (x, 0), 0); | 4258 return &XEXP (XEXP (x, 0), 0); |
3976 break; | 4259 break; |
4004 /* See if this is a bitfield assignment with everything constant. If | 4287 /* See if this is a bitfield assignment with everything constant. If |
4005 so, this is an IOR of an AND, so split it into that. */ | 4288 so, this is an IOR of an AND, so split it into that. */ |
4006 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT | 4289 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT |
4007 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))) | 4290 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))) |
4008 <= HOST_BITS_PER_WIDE_INT) | 4291 <= HOST_BITS_PER_WIDE_INT) |
4009 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT | 4292 && CONST_INT_P (XEXP (SET_DEST (x), 1)) |
4010 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT | 4293 && CONST_INT_P (XEXP (SET_DEST (x), 2)) |
4011 && GET_CODE (SET_SRC (x)) == CONST_INT | 4294 && CONST_INT_P (SET_SRC (x)) |
4012 && ((INTVAL (XEXP (SET_DEST (x), 1)) | 4295 && ((INTVAL (XEXP (SET_DEST (x), 1)) |
4013 + INTVAL (XEXP (SET_DEST (x), 2))) | 4296 + INTVAL (XEXP (SET_DEST (x), 2))) |
4014 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))) | 4297 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))) |
4015 && ! side_effects_p (XEXP (SET_DEST (x), 0))) | 4298 && ! side_effects_p (XEXP (SET_DEST (x), 0))) |
4016 { | 4299 { |
4060 have taken more than one insn to make. If the constant were | 4343 have taken more than one insn to make. If the constant were |
4061 not a valid argument to the AND but took only one insn to make, | 4344 not a valid argument to the AND but took only one insn to make, |
4062 this is no worse, but if it took more than one insn, it will | 4345 this is no worse, but if it took more than one insn, it will |
4063 be better. */ | 4346 be better. */ |
4064 | 4347 |
4065 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT | 4348 if (CONST_INT_P (XEXP (SET_SRC (x), 1)) |
4066 && REG_P (XEXP (SET_SRC (x), 0)) | 4349 && REG_P (XEXP (SET_SRC (x), 0)) |
4067 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7 | 4350 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7 |
4068 && REG_P (SET_DEST (x)) | 4351 && REG_P (SET_DEST (x)) |
4069 && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0 | 4352 && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0 |
4070 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE) | 4353 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE) |
4120 unsignedp = 0; | 4403 unsignedp = 0; |
4121 break; | 4404 break; |
4122 | 4405 |
4123 case SIGN_EXTRACT: | 4406 case SIGN_EXTRACT: |
4124 case ZERO_EXTRACT: | 4407 case ZERO_EXTRACT: |
4125 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT | 4408 if (CONST_INT_P (XEXP (SET_SRC (x), 1)) |
4126 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT) | 4409 && CONST_INT_P (XEXP (SET_SRC (x), 2))) |
4127 { | 4410 { |
4128 inner = XEXP (SET_SRC (x), 0); | 4411 inner = XEXP (SET_SRC (x), 0); |
4129 len = INTVAL (XEXP (SET_SRC (x), 1)); | 4412 len = INTVAL (XEXP (SET_SRC (x), 1)); |
4130 pos = INTVAL (XEXP (SET_SRC (x), 2)); | 4413 pos = INTVAL (XEXP (SET_SRC (x), 2)); |
4131 | 4414 |
4497 | 4780 |
4498 if (GET_CODE (new_rtx) == CLOBBER && XEXP (new_rtx, 0) == const0_rtx) | 4781 if (GET_CODE (new_rtx) == CLOBBER && XEXP (new_rtx, 0) == const0_rtx) |
4499 return new_rtx; | 4782 return new_rtx; |
4500 | 4783 |
4501 if (GET_CODE (x) == SUBREG | 4784 if (GET_CODE (x) == SUBREG |
4502 && (GET_CODE (new_rtx) == CONST_INT | 4785 && (CONST_INT_P (new_rtx) |
4503 || GET_CODE (new_rtx) == CONST_DOUBLE)) | 4786 || GET_CODE (new_rtx) == CONST_DOUBLE)) |
4504 { | 4787 { |
4505 enum machine_mode mode = GET_MODE (x); | 4788 enum machine_mode mode = GET_MODE (x); |
4506 | 4789 |
4507 x = simplify_subreg (GET_MODE (x), new_rtx, | 4790 x = simplify_subreg (GET_MODE (x), new_rtx, |
4508 GET_MODE (SUBREG_REG (x)), | 4791 GET_MODE (SUBREG_REG (x)), |
4509 SUBREG_BYTE (x)); | 4792 SUBREG_BYTE (x)); |
4510 if (! x) | 4793 if (! x) |
4511 x = gen_rtx_CLOBBER (mode, const0_rtx); | 4794 x = gen_rtx_CLOBBER (mode, const0_rtx); |
4512 } | 4795 } |
4513 else if (GET_CODE (new_rtx) == CONST_INT | 4796 else if (CONST_INT_P (new_rtx) |
4514 && GET_CODE (x) == ZERO_EXTEND) | 4797 && GET_CODE (x) == ZERO_EXTEND) |
4515 { | 4798 { |
4516 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x), | 4799 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x), |
4517 new_rtx, GET_MODE (XEXP (x, 0))); | 4800 new_rtx, GET_MODE (XEXP (x, 0))); |
4518 gcc_assert (x); | 4801 gcc_assert (x); |
4647 x = simplify_gen_relational (reversed, mode, VOIDmode, | 4930 x = simplify_gen_relational (reversed, mode, VOIDmode, |
4648 cond, cop1); | 4931 cond, cop1); |
4649 | 4932 |
4650 /* Likewise, we can make the negate of a comparison operation | 4933 /* Likewise, we can make the negate of a comparison operation |
4651 if the result values are - STORE_FLAG_VALUE and zero. */ | 4934 if the result values are - STORE_FLAG_VALUE and zero. */ |
4652 else if (GET_CODE (true_rtx) == CONST_INT | 4935 else if (CONST_INT_P (true_rtx) |
4653 && INTVAL (true_rtx) == - STORE_FLAG_VALUE | 4936 && INTVAL (true_rtx) == - STORE_FLAG_VALUE |
4654 && false_rtx == const0_rtx) | 4937 && false_rtx == const0_rtx) |
4655 x = simplify_gen_unary (NEG, mode, | 4938 x = simplify_gen_unary (NEG, mode, |
4656 simplify_gen_relational (cond_code, | 4939 simplify_gen_relational (cond_code, |
4657 mode, VOIDmode, | 4940 mode, VOIDmode, |
4658 cond, cop1), | 4941 cond, cop1), |
4659 mode); | 4942 mode); |
4660 else if (GET_CODE (false_rtx) == CONST_INT | 4943 else if (CONST_INT_P (false_rtx) |
4661 && INTVAL (false_rtx) == - STORE_FLAG_VALUE | 4944 && INTVAL (false_rtx) == - STORE_FLAG_VALUE |
4662 && true_rtx == const0_rtx | 4945 && true_rtx == const0_rtx |
4663 && ((reversed = reversed_comparison_code_parts | 4946 && ((reversed = reversed_comparison_code_parts |
4664 (cond_code, cond, cop1, NULL)) | 4947 (cond_code, cond, cop1, NULL)) |
4665 != UNKNOWN)) | 4948 != UNKNOWN)) |
4834 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be | 5117 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be |
4835 replaced by (lshiftrt X C). This will convert | 5118 replaced by (lshiftrt X C). This will convert |
4836 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */ | 5119 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */ |
4837 | 5120 |
4838 if (GET_CODE (temp) == ASHIFTRT | 5121 if (GET_CODE (temp) == ASHIFTRT |
4839 && GET_CODE (XEXP (temp, 1)) == CONST_INT | 5122 && CONST_INT_P (XEXP (temp, 1)) |
4840 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1) | 5123 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1) |
4841 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0), | 5124 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0), |
4842 INTVAL (XEXP (temp, 1))); | 5125 INTVAL (XEXP (temp, 1))); |
4843 | 5126 |
4844 /* If X has only a single bit that might be nonzero, say, bit I, convert | 5127 /* If X has only a single bit that might be nonzero, say, bit I, convert |
4874 because we don't know the real bitsize of the partial | 5157 because we don't know the real bitsize of the partial |
4875 integer mode. */ | 5158 integer mode. */ |
4876 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT) | 5159 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT) |
4877 break; | 5160 break; |
4878 | 5161 |
4879 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | 5162 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) |
4880 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), | |
4881 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))) | |
4882 SUBST (XEXP (x, 0), | 5163 SUBST (XEXP (x, 0), |
4883 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)), | 5164 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)), |
4884 GET_MODE_MASK (mode), 0)); | 5165 GET_MODE_MASK (mode), 0)); |
5166 | |
5167 /* We can truncate a constant value and return it. */ | |
5168 if (CONST_INT_P (XEXP (x, 0))) | |
5169 return gen_int_mode (INTVAL (XEXP (x, 0)), mode); | |
4885 | 5170 |
4886 /* Similarly to what we do in simplify-rtx.c, a truncate of a register | 5171 /* Similarly to what we do in simplify-rtx.c, a truncate of a register |
4887 whose value is a comparison can be replaced with a subreg if | 5172 whose value is a comparison can be replaced with a subreg if |
4888 STORE_FLAG_VALUE permits. */ | 5173 STORE_FLAG_VALUE permits. */ |
4889 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | 5174 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
4890 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0 | 5175 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0 |
4891 && (temp = get_last_value (XEXP (x, 0))) | 5176 && (temp = get_last_value (XEXP (x, 0))) |
4892 && COMPARISON_P (temp)) | 5177 && COMPARISON_P (temp)) |
4893 return gen_lowpart (mode, XEXP (x, 0)); | 5178 return gen_lowpart (mode, XEXP (x, 0)); |
4894 break; | 5179 break; |
4895 | |
4896 #ifdef HAVE_cc0 | |
4897 case COMPARE: | |
4898 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't | |
4899 using cc0, in which case we want to leave it as a COMPARE | |
4900 so we can distinguish it from a register-register-copy. */ | |
4901 if (XEXP (x, 1) == const0_rtx) | |
4902 return XEXP (x, 0); | |
4903 | |
4904 /* x - 0 is the same as x unless x's mode has signed zeros and | |
4905 allows rounding towards -infinity. Under those conditions, | |
4906 0 - 0 is -0. */ | |
4907 if (!(HONOR_SIGNED_ZEROS (GET_MODE (XEXP (x, 0))) | |
4908 && HONOR_SIGN_DEPENDENT_ROUNDING (GET_MODE (XEXP (x, 0)))) | |
4909 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0)))) | |
4910 return XEXP (x, 0); | |
4911 break; | |
4912 #endif | |
4913 | 5180 |
4914 case CONST: | 5181 case CONST: |
4915 /* (const (const X)) can become (const X). Do it this way rather than | 5182 /* (const (const X)) can become (const X). Do it this way rather than |
4916 returning the inner CONST since CONST can be shared with a | 5183 returning the inner CONST since CONST can be shared with a |
4917 REG_EQUAL note. */ | 5184 REG_EQUAL note. */ |
4935 when c is (const_int (pow2 + 1) / 2) is a sign extension of a | 5202 when c is (const_int (pow2 + 1) / 2) is a sign extension of a |
4936 bit-field and can be replaced by either a sign_extend or a | 5203 bit-field and can be replaced by either a sign_extend or a |
4937 sign_extract. The `and' may be a zero_extend and the two | 5204 sign_extract. The `and' may be a zero_extend and the two |
4938 <c>, -<c> constants may be reversed. */ | 5205 <c>, -<c> constants may be reversed. */ |
4939 if (GET_CODE (XEXP (x, 0)) == XOR | 5206 if (GET_CODE (XEXP (x, 0)) == XOR |
4940 && GET_CODE (XEXP (x, 1)) == CONST_INT | 5207 && CONST_INT_P (XEXP (x, 1)) |
4941 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | 5208 && CONST_INT_P (XEXP (XEXP (x, 0), 1)) |
4942 && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1)) | 5209 && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1)) |
4943 && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0 | 5210 && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0 |
4944 || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0) | 5211 || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0) |
4945 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | 5212 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
4946 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND | 5213 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND |
4947 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT | 5214 && CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 0), 1)) |
4948 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) | 5215 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) |
4949 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1)) | 5216 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1)) |
4950 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND | 5217 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND |
4951 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0))) | 5218 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0))) |
4952 == (unsigned int) i + 1)))) | 5219 == (unsigned int) i + 1)))) |
4995 | 5262 |
4996 case MINUS: | 5263 case MINUS: |
4997 /* (minus <foo> (and <foo> (const_int -pow2))) becomes | 5264 /* (minus <foo> (and <foo> (const_int -pow2))) becomes |
4998 (and <foo> (const_int pow2-1)) */ | 5265 (and <foo> (const_int pow2-1)) */ |
4999 if (GET_CODE (XEXP (x, 1)) == AND | 5266 if (GET_CODE (XEXP (x, 1)) == AND |
5000 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT | 5267 && CONST_INT_P (XEXP (XEXP (x, 1), 1)) |
5001 && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0 | 5268 && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0 |
5002 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0))) | 5269 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0))) |
5003 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0), | 5270 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0), |
5004 -INTVAL (XEXP (XEXP (x, 1), 1)) - 1); | 5271 -INTVAL (XEXP (XEXP (x, 1), 1)) - 1); |
5005 break; | 5272 break; |
5015 if (result) | 5282 if (result) |
5016 return result; | 5283 return result; |
5017 } | 5284 } |
5018 | 5285 |
5019 /* Try simplify a*(b/c) as (a*b)/c. */ | 5286 /* Try simplify a*(b/c) as (a*b)/c. */ |
5020 if (FLOAT_MODE_P (mode) && flag_associative_math | 5287 if (FLOAT_MODE_P (mode) && flag_associative_math |
5021 && GET_CODE (XEXP (x, 0)) == DIV) | 5288 && GET_CODE (XEXP (x, 0)) == DIV) |
5022 { | 5289 { |
5023 rtx tem = simplify_binary_operation (MULT, mode, | 5290 rtx tem = simplify_binary_operation (MULT, mode, |
5024 XEXP (XEXP (x, 0), 0), | 5291 XEXP (XEXP (x, 0), 0), |
5025 XEXP (x, 1)); | 5292 XEXP (x, 1)); |
5029 break; | 5296 break; |
5030 | 5297 |
5031 case UDIV: | 5298 case UDIV: |
5032 /* If this is a divide by a power of two, treat it as a shift if | 5299 /* If this is a divide by a power of two, treat it as a shift if |
5033 its first operand is a shift. */ | 5300 its first operand is a shift. */ |
5034 if (GET_CODE (XEXP (x, 1)) == CONST_INT | 5301 if (CONST_INT_P (XEXP (x, 1)) |
5035 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0 | 5302 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0 |
5036 && (GET_CODE (XEXP (x, 0)) == ASHIFT | 5303 && (GET_CODE (XEXP (x, 0)) == ASHIFT |
5037 || GET_CODE (XEXP (x, 0)) == LSHIFTRT | 5304 || GET_CODE (XEXP (x, 0)) == LSHIFTRT |
5038 || GET_CODE (XEXP (x, 0)) == ASHIFTRT | 5305 || GET_CODE (XEXP (x, 0)) == ASHIFTRT |
5039 || GET_CODE (XEXP (x, 0)) == ROTATE | 5306 || GET_CODE (XEXP (x, 0)) == ROTATE |
5223 case LSHIFTRT: | 5490 case LSHIFTRT: |
5224 case ASHIFTRT: | 5491 case ASHIFTRT: |
5225 case ROTATE: | 5492 case ROTATE: |
5226 case ROTATERT: | 5493 case ROTATERT: |
5227 /* If this is a shift by a constant amount, simplify it. */ | 5494 /* If this is a shift by a constant amount, simplify it. */ |
5228 if (GET_CODE (XEXP (x, 1)) == CONST_INT) | 5495 if (CONST_INT_P (XEXP (x, 1))) |
5229 return simplify_shift_const (x, code, mode, XEXP (x, 0), | 5496 return simplify_shift_const (x, code, mode, XEXP (x, 0), |
5230 INTVAL (XEXP (x, 1))); | 5497 INTVAL (XEXP (x, 1))); |
5231 | 5498 |
5232 else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1))) | 5499 else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1))) |
5233 SUBST (XEXP (x, 1), | 5500 SUBST (XEXP (x, 1), |
5342 | 5609 |
5343 if (comparison_p | 5610 if (comparison_p |
5344 && reversed_comparison_code (cond, NULL) != UNKNOWN | 5611 && reversed_comparison_code (cond, NULL) != UNKNOWN |
5345 && (true_rtx == pc_rtx | 5612 && (true_rtx == pc_rtx |
5346 || (CONSTANT_P (true_rtx) | 5613 || (CONSTANT_P (true_rtx) |
5347 && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx) | 5614 && !CONST_INT_P (false_rtx) && false_rtx != pc_rtx) |
5348 || true_rtx == const0_rtx | 5615 || true_rtx == const0_rtx |
5349 || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx)) | 5616 || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx)) |
5350 || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx)) | 5617 || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx)) |
5351 && !OBJECT_P (false_rtx)) | 5618 && !OBJECT_P (false_rtx)) |
5352 || reg_mentioned_p (true_rtx, false_rtx) | 5619 || reg_mentioned_p (true_rtx, false_rtx) |
5559 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the | 5826 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the |
5560 negation of a single bit, we can convert this operation to a shift. We | 5827 negation of a single bit, we can convert this operation to a shift. We |
5561 can actually do this more generally, but it doesn't seem worth it. */ | 5828 can actually do this more generally, but it doesn't seem worth it. */ |
5562 | 5829 |
5563 if (true_code == NE && XEXP (cond, 1) == const0_rtx | 5830 if (true_code == NE && XEXP (cond, 1) == const0_rtx |
5564 && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT | 5831 && false_rtx == const0_rtx && CONST_INT_P (true_rtx) |
5565 && ((1 == nonzero_bits (XEXP (cond, 0), mode) | 5832 && ((1 == nonzero_bits (XEXP (cond, 0), mode) |
5566 && (i = exact_log2 (INTVAL (true_rtx))) >= 0) | 5833 && (i = exact_log2 (INTVAL (true_rtx))) >= 0) |
5567 || ((num_sign_bit_copies (XEXP (cond, 0), mode) | 5834 || ((num_sign_bit_copies (XEXP (cond, 0), mode) |
5568 == GET_MODE_BITSIZE (mode)) | 5835 == GET_MODE_BITSIZE (mode)) |
5569 && (i = exact_log2 (-INTVAL (true_rtx))) >= 0))) | 5836 && (i = exact_log2 (-INTVAL (true_rtx))) >= 0))) |
5571 simplify_shift_const (NULL_RTX, ASHIFT, mode, | 5838 simplify_shift_const (NULL_RTX, ASHIFT, mode, |
5572 gen_lowpart (mode, XEXP (cond, 0)), i); | 5839 gen_lowpart (mode, XEXP (cond, 0)), i); |
5573 | 5840 |
5574 /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8. */ | 5841 /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8. */ |
5575 if (true_code == NE && XEXP (cond, 1) == const0_rtx | 5842 if (true_code == NE && XEXP (cond, 1) == const0_rtx |
5576 && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT | 5843 && false_rtx == const0_rtx && CONST_INT_P (true_rtx) |
5577 && GET_MODE (XEXP (cond, 0)) == mode | 5844 && GET_MODE (XEXP (cond, 0)) == mode |
5578 && (INTVAL (true_rtx) & GET_MODE_MASK (mode)) | 5845 && (INTVAL (true_rtx) & GET_MODE_MASK (mode)) |
5579 == nonzero_bits (XEXP (cond, 0), mode) | 5846 == nonzero_bits (XEXP (cond, 0), mode) |
5580 && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0) | 5847 && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0) |
5581 return XEXP (cond, 0); | 5848 return XEXP (cond, 0); |
5742 } | 6009 } |
5743 } | 6010 } |
5744 | 6011 |
5745 if (other_changed) | 6012 if (other_changed) |
5746 undobuf.other_insn = other_insn; | 6013 undobuf.other_insn = other_insn; |
5747 | |
5748 #ifdef HAVE_cc0 | |
5749 /* If we are now comparing against zero, change our source if | |
5750 needed. If we do not use cc0, we always have a COMPARE. */ | |
5751 if (op1 == const0_rtx && dest == cc0_rtx) | |
5752 { | |
5753 SUBST (SET_SRC (x), op0); | |
5754 src = op0; | |
5755 } | |
5756 else | |
5757 #endif | |
5758 | 6014 |
5759 /* Otherwise, if we didn't previously have a COMPARE in the | 6015 /* Otherwise, if we didn't previously have a COMPARE in the |
5760 correct mode, we need one. */ | 6016 correct mode, we need one. */ |
5761 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode) | 6017 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode) |
5762 { | 6018 { |
5949 { | 6205 { |
5950 case AND: | 6206 case AND: |
5951 /* We can call simplify_and_const_int only if we don't lose | 6207 /* We can call simplify_and_const_int only if we don't lose |
5952 any (sign) bits when converting INTVAL (op1) to | 6208 any (sign) bits when converting INTVAL (op1) to |
5953 "unsigned HOST_WIDE_INT". */ | 6209 "unsigned HOST_WIDE_INT". */ |
5954 if (GET_CODE (op1) == CONST_INT | 6210 if (CONST_INT_P (op1) |
5955 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | 6211 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
5956 || INTVAL (op1) > 0)) | 6212 || INTVAL (op1) > 0)) |
5957 { | 6213 { |
5958 x = simplify_and_const_int (x, mode, op0, INTVAL (op1)); | 6214 x = simplify_and_const_int (x, mode, op0, INTVAL (op1)); |
5959 if (GET_CODE (x) != AND) | 6215 if (GET_CODE (x) != AND) |
6038 we can't tell whether the implicit way is right. | 6294 we can't tell whether the implicit way is right. |
6039 | 6295 |
6040 Even for a mode that is no wider than a const_int, | 6296 Even for a mode that is no wider than a const_int, |
6041 we can't win, because we need to sign extend one of its bits through | 6297 we can't win, because we need to sign extend one of its bits through |
6042 the rest of it, and we don't know which bit. */ | 6298 the rest of it, and we don't know which bit. */ |
6043 if (GET_CODE (XEXP (x, 0)) == CONST_INT) | 6299 if (CONST_INT_P (XEXP (x, 0))) |
6044 return x; | 6300 return x; |
6045 | 6301 |
6046 /* Return if (subreg:MODE FROM 0) is not a safe replacement for | 6302 /* Return if (subreg:MODE FROM 0) is not a safe replacement for |
6047 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM | 6303 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM |
6048 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be | 6304 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be |
6078 case SIGN_EXTRACT: | 6334 case SIGN_EXTRACT: |
6079 /* If the operand is a CLOBBER, just return it. */ | 6335 /* If the operand is a CLOBBER, just return it. */ |
6080 if (GET_CODE (XEXP (x, 0)) == CLOBBER) | 6336 if (GET_CODE (XEXP (x, 0)) == CLOBBER) |
6081 return XEXP (x, 0); | 6337 return XEXP (x, 0); |
6082 | 6338 |
6083 if (GET_CODE (XEXP (x, 1)) != CONST_INT | 6339 if (!CONST_INT_P (XEXP (x, 1)) |
6084 || GET_CODE (XEXP (x, 2)) != CONST_INT | 6340 || !CONST_INT_P (XEXP (x, 2)) |
6085 || GET_MODE (XEXP (x, 0)) == VOIDmode) | 6341 || GET_MODE (XEXP (x, 0)) == VOIDmode) |
6086 return x; | 6342 return x; |
6087 | 6343 |
6088 /* Reject MODEs that aren't scalar integers because turning vector | 6344 /* Reject MODEs that aren't scalar integers because turning vector |
6089 or complex modes into shifts causes problems. */ | 6345 or complex modes into shifts causes problems. */ |
6250 inner = SUBREG_REG (XEXP (SET_DEST (x), 0)); | 6506 inner = SUBREG_REG (XEXP (SET_DEST (x), 0)); |
6251 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))); | 6507 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))); |
6252 pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0))); | 6508 pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0))); |
6253 } | 6509 } |
6254 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT | 6510 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT |
6255 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT) | 6511 && CONST_INT_P (XEXP (SET_DEST (x), 1))) |
6256 { | 6512 { |
6257 inner = XEXP (SET_DEST (x), 0); | 6513 inner = XEXP (SET_DEST (x), 0); |
6258 len = INTVAL (XEXP (SET_DEST (x), 1)); | 6514 len = INTVAL (XEXP (SET_DEST (x), 1)); |
6259 pos = XEXP (SET_DEST (x), 2); | 6515 pos = XEXP (SET_DEST (x), 2); |
6260 | 6516 |
6261 /* A constant position should stay within the width of INNER. */ | 6517 /* A constant position should stay within the width of INNER. */ |
6262 if (GET_CODE (pos) == CONST_INT | 6518 if (CONST_INT_P (pos) |
6263 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner))) | 6519 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner))) |
6264 break; | 6520 break; |
6265 | 6521 |
6266 if (BITS_BIG_ENDIAN) | 6522 if (BITS_BIG_ENDIAN) |
6267 { | 6523 { |
6268 if (GET_CODE (pos) == CONST_INT) | 6524 if (CONST_INT_P (pos)) |
6269 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len | 6525 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len |
6270 - INTVAL (pos)); | 6526 - INTVAL (pos)); |
6271 else if (GET_CODE (pos) == MINUS | 6527 else if (GET_CODE (pos) == MINUS |
6272 && GET_CODE (XEXP (pos, 1)) == CONST_INT | 6528 && CONST_INT_P (XEXP (pos, 1)) |
6273 && (INTVAL (XEXP (pos, 1)) | 6529 && (INTVAL (XEXP (pos, 1)) |
6274 == GET_MODE_BITSIZE (GET_MODE (inner)) - len)) | 6530 == GET_MODE_BITSIZE (GET_MODE (inner)) - len)) |
6275 /* If position is ADJUST - X, new position is X. */ | 6531 /* If position is ADJUST - X, new position is X. */ |
6276 pos = XEXP (pos, 0); | 6532 pos = XEXP (pos, 0); |
6277 else | 6533 else |
6404 if (MEM_P (SUBREG_REG (inner))) | 6660 if (MEM_P (SUBREG_REG (inner))) |
6405 is_mode = GET_MODE (SUBREG_REG (inner)); | 6661 is_mode = GET_MODE (SUBREG_REG (inner)); |
6406 inner = SUBREG_REG (inner); | 6662 inner = SUBREG_REG (inner); |
6407 } | 6663 } |
6408 else if (GET_CODE (inner) == ASHIFT | 6664 else if (GET_CODE (inner) == ASHIFT |
6409 && GET_CODE (XEXP (inner, 1)) == CONST_INT | 6665 && CONST_INT_P (XEXP (inner, 1)) |
6410 && pos_rtx == 0 && pos == 0 | 6666 && pos_rtx == 0 && pos == 0 |
6411 && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1))) | 6667 && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1))) |
6412 { | 6668 { |
6413 /* We're extracting the least significant bits of an rtx | 6669 /* We're extracting the least significant bits of an rtx |
6414 (ashift X (const_int C)), where LEN > C. Extract the | 6670 (ashift X (const_int C)), where LEN > C. Extract the |
6421 return gen_rtx_ASHIFT (mode, new_rtx, XEXP (inner, 1)); | 6677 return gen_rtx_ASHIFT (mode, new_rtx, XEXP (inner, 1)); |
6422 } | 6678 } |
6423 | 6679 |
6424 inner_mode = GET_MODE (inner); | 6680 inner_mode = GET_MODE (inner); |
6425 | 6681 |
6426 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT) | 6682 if (pos_rtx && CONST_INT_P (pos_rtx)) |
6427 pos = INTVAL (pos_rtx), pos_rtx = 0; | 6683 pos = INTVAL (pos_rtx), pos_rtx = 0; |
6428 | 6684 |
6429 /* See if this can be done without an extraction. We never can if the | 6685 /* See if this can be done without an extraction. We never can if the |
6430 width of the field is not the same as that of some integer mode. For | 6686 width of the field is not the same as that of some integer mode. For |
6431 registers, we can only avoid the extraction if the position is at the | 6687 registers, we can only avoid the extraction if the position is at the |
6529 : gen_rtx_STRICT_LOW_PART (VOIDmode, new_rtx))); | 6785 : gen_rtx_STRICT_LOW_PART (VOIDmode, new_rtx))); |
6530 | 6786 |
6531 if (mode == tmode) | 6787 if (mode == tmode) |
6532 return new_rtx; | 6788 return new_rtx; |
6533 | 6789 |
6534 if (GET_CODE (new_rtx) == CONST_INT) | 6790 if (CONST_INT_P (new_rtx)) |
6535 return gen_int_mode (INTVAL (new_rtx), mode); | 6791 return gen_int_mode (INTVAL (new_rtx), mode); |
6536 | 6792 |
6537 /* If we know that no extraneous bits are set, and that the high | 6793 /* If we know that no extraneous bits are set, and that the high |
6538 bit is not set, convert the extraction to the cheaper of | 6794 bit is not set, convert the extraction to the cheaper of |
6539 sign and zero extension, that are equivalent in these cases. */ | 6795 sign and zero extension, that are equivalent in these cases. */ |
6698 - GET_MODE_SIZE (wanted_inner_mode) - offset); | 6954 - GET_MODE_SIZE (wanted_inner_mode) - offset); |
6699 | 6955 |
6700 inner = adjust_address_nv (inner, wanted_inner_mode, offset); | 6956 inner = adjust_address_nv (inner, wanted_inner_mode, offset); |
6701 } | 6957 } |
6702 | 6958 |
6703 /* If INNER is not memory, we can always get it into the proper mode. If we | 6959 /* If INNER is not memory, get it into the proper mode. If we are changing |
6704 are changing its mode, POS must be a constant and smaller than the size | 6960 its mode, POS must be a constant and smaller than the size of the new |
6705 of the new mode. */ | 6961 mode. */ |
6706 else if (!MEM_P (inner)) | 6962 else if (!MEM_P (inner)) |
6707 { | 6963 { |
6964 /* On the LHS, don't create paradoxical subregs implicitely truncating | |
6965 the register unless TRULY_NOOP_TRUNCATION. */ | |
6966 if (in_dest | |
6967 && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (inner)), | |
6968 GET_MODE_BITSIZE (wanted_inner_mode))) | |
6969 return NULL_RTX; | |
6970 | |
6708 if (GET_MODE (inner) != wanted_inner_mode | 6971 if (GET_MODE (inner) != wanted_inner_mode |
6709 && (pos_rtx != 0 | 6972 && (pos_rtx != 0 |
6710 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode))) | 6973 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode))) |
6711 return 0; | 6974 return NULL_RTX; |
6712 | 6975 |
6713 if (orig_pos < 0) | 6976 if (orig_pos < 0) |
6714 return 0; | 6977 return NULL_RTX; |
6715 | 6978 |
6716 inner = force_to_mode (inner, wanted_inner_mode, | 6979 inner = force_to_mode (inner, wanted_inner_mode, |
6717 pos_rtx | 6980 pos_rtx |
6718 || len + orig_pos >= HOST_BITS_PER_WIDE_INT | 6981 || len + orig_pos >= HOST_BITS_PER_WIDE_INT |
6719 ? ~(unsigned HOST_WIDE_INT) 0 | 6982 ? ~(unsigned HOST_WIDE_INT) 0 |
6787 { | 7050 { |
6788 case ASHIFT: | 7051 case ASHIFT: |
6789 /* This is the shift itself. If it is wide enough, we will return | 7052 /* This is the shift itself. If it is wide enough, we will return |
6790 either the value being shifted if the shift count is equal to | 7053 either the value being shifted if the shift count is equal to |
6791 COUNT or a shift for the difference. */ | 7054 COUNT or a shift for the difference. */ |
6792 if (GET_CODE (XEXP (x, 1)) == CONST_INT | 7055 if (CONST_INT_P (XEXP (x, 1)) |
6793 && INTVAL (XEXP (x, 1)) >= count) | 7056 && INTVAL (XEXP (x, 1)) >= count) |
6794 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), | 7057 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), |
6795 INTVAL (XEXP (x, 1)) - count); | 7058 INTVAL (XEXP (x, 1)) - count); |
6796 break; | 7059 break; |
6797 | 7060 |
6802 break; | 7065 break; |
6803 | 7066 |
6804 case PLUS: case IOR: case XOR: case AND: | 7067 case PLUS: case IOR: case XOR: case AND: |
6805 /* If we can safely shift this constant and we find the inner shift, | 7068 /* If we can safely shift this constant and we find the inner shift, |
6806 make a new operation. */ | 7069 make a new operation. */ |
6807 if (GET_CODE (XEXP (x, 1)) == CONST_INT | 7070 if (CONST_INT_P (XEXP (x, 1)) |
6808 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0 | 7071 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0 |
6809 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0) | 7072 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0) |
6810 return simplify_gen_binary (code, mode, tem, | 7073 return simplify_gen_binary (code, mode, tem, |
6811 GEN_INT (INTVAL (XEXP (x, 1)) >> count)); | 7074 GEN_INT (INTVAL (XEXP (x, 1)) >> count)); |
6812 | 7075 |
6865 switch (code) | 7128 switch (code) |
6866 { | 7129 { |
6867 case ASHIFT: | 7130 case ASHIFT: |
6868 /* Convert shifts by constants into multiplications if inside | 7131 /* Convert shifts by constants into multiplications if inside |
6869 an address. */ | 7132 an address. */ |
6870 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT | 7133 if (in_code == MEM && CONST_INT_P (XEXP (x, 1)) |
6871 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT | 7134 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT |
6872 && INTVAL (XEXP (x, 1)) >= 0) | 7135 && INTVAL (XEXP (x, 1)) >= 0) |
6873 { | 7136 { |
6874 new_rtx = make_compound_operation (XEXP (x, 0), next_code); | 7137 new_rtx = make_compound_operation (XEXP (x, 0), next_code); |
6875 new_rtx = gen_rtx_MULT (mode, new_rtx, | 7138 new_rtx = gen_rtx_MULT (mode, new_rtx, |
6879 break; | 7142 break; |
6880 | 7143 |
6881 case AND: | 7144 case AND: |
6882 /* If the second operand is not a constant, we can't do anything | 7145 /* If the second operand is not a constant, we can't do anything |
6883 with it. */ | 7146 with it. */ |
6884 if (GET_CODE (XEXP (x, 1)) != CONST_INT) | 7147 if (!CONST_INT_P (XEXP (x, 1))) |
6885 break; | 7148 break; |
6886 | 7149 |
6887 /* If the constant is a power of two minus one and the first operand | 7150 /* If the constant is a power of two minus one and the first operand |
6888 is a logical right shift, make an extraction. */ | 7151 is a logical right shift, make an extraction. */ |
6889 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT | 7152 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT |
6924 | 7187 |
6925 /* If we are have (and (rotate X C) M) and C is larger than the number | 7188 /* If we are have (and (rotate X C) M) and C is larger than the number |
6926 of bits in M, this is an extraction. */ | 7189 of bits in M, this is an extraction. */ |
6927 | 7190 |
6928 else if (GET_CODE (XEXP (x, 0)) == ROTATE | 7191 else if (GET_CODE (XEXP (x, 0)) == ROTATE |
6929 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | 7192 && CONST_INT_P (XEXP (XEXP (x, 0), 1)) |
6930 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0 | 7193 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0 |
6931 && i <= INTVAL (XEXP (XEXP (x, 0), 1))) | 7194 && i <= INTVAL (XEXP (XEXP (x, 0), 1))) |
6932 { | 7195 { |
6933 new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code); | 7196 new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code); |
6934 new_rtx = make_extraction (mode, new_rtx, | 7197 new_rtx = make_extraction (mode, new_rtx, |
6941 a logical shift and our mask turns off all the propagated sign | 7204 a logical shift and our mask turns off all the propagated sign |
6942 bits, we can replace the logical shift with an arithmetic shift. */ | 7205 bits, we can replace the logical shift with an arithmetic shift. */ |
6943 else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT | 7206 else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT |
6944 && !have_insn_for (LSHIFTRT, mode) | 7207 && !have_insn_for (LSHIFTRT, mode) |
6945 && have_insn_for (ASHIFTRT, mode) | 7208 && have_insn_for (ASHIFTRT, mode) |
6946 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | 7209 && CONST_INT_P (XEXP (XEXP (x, 0), 1)) |
6947 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 | 7210 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 |
6948 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT | 7211 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT |
6949 && mode_width <= HOST_BITS_PER_WIDE_INT) | 7212 && mode_width <= HOST_BITS_PER_WIDE_INT) |
6950 { | 7213 { |
6951 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); | 7214 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); |
7001 lhs = XEXP (x, 0); | 7264 lhs = XEXP (x, 0); |
7002 rhs = XEXP (x, 1); | 7265 rhs = XEXP (x, 1); |
7003 | 7266 |
7004 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1, | 7267 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1, |
7005 this is a SIGN_EXTRACT. */ | 7268 this is a SIGN_EXTRACT. */ |
7006 if (GET_CODE (rhs) == CONST_INT | 7269 if (CONST_INT_P (rhs) |
7007 && GET_CODE (lhs) == ASHIFT | 7270 && GET_CODE (lhs) == ASHIFT |
7008 && GET_CODE (XEXP (lhs, 1)) == CONST_INT | 7271 && CONST_INT_P (XEXP (lhs, 1)) |
7009 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)) | 7272 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)) |
7010 && INTVAL (rhs) < mode_width) | 7273 && INTVAL (rhs) < mode_width) |
7011 { | 7274 { |
7012 new_rtx = make_compound_operation (XEXP (lhs, 0), next_code); | 7275 new_rtx = make_compound_operation (XEXP (lhs, 0), next_code); |
7013 new_rtx = make_extraction (mode, new_rtx, | 7276 new_rtx = make_extraction (mode, new_rtx, |
7023 seem worth the effort; the case checked for occurs on Alpha. */ | 7286 seem worth the effort; the case checked for occurs on Alpha. */ |
7024 | 7287 |
7025 if (!OBJECT_P (lhs) | 7288 if (!OBJECT_P (lhs) |
7026 && ! (GET_CODE (lhs) == SUBREG | 7289 && ! (GET_CODE (lhs) == SUBREG |
7027 && (OBJECT_P (SUBREG_REG (lhs)))) | 7290 && (OBJECT_P (SUBREG_REG (lhs)))) |
7028 && GET_CODE (rhs) == CONST_INT | 7291 && CONST_INT_P (rhs) |
7029 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT | 7292 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT |
7030 && INTVAL (rhs) < mode_width | 7293 && INTVAL (rhs) < mode_width |
7031 && (new_rtx = extract_left_shift (lhs, INTVAL (rhs))) != 0) | 7294 && (new_rtx = extract_left_shift (lhs, INTVAL (rhs))) != 0) |
7032 new_rtx = make_extraction (mode, make_compound_operation (new_rtx, next_code), | 7295 new_rtx = make_extraction (mode, make_compound_operation (new_rtx, next_code), |
7033 0, NULL_RTX, mode_width - INTVAL (rhs), | 7296 0, NULL_RTX, mode_width - INTVAL (rhs), |
7058 | 7321 |
7059 /* If we have something other than a SUBREG, we might have | 7322 /* If we have something other than a SUBREG, we might have |
7060 done an expansion, so rerun ourselves. */ | 7323 done an expansion, so rerun ourselves. */ |
7061 if (GET_CODE (newer) != SUBREG) | 7324 if (GET_CODE (newer) != SUBREG) |
7062 newer = make_compound_operation (newer, in_code); | 7325 newer = make_compound_operation (newer, in_code); |
7326 | |
7327 /* force_to_mode can expand compounds. If it just re-expanded the | |
7328 compound use gen_lowpart instead to convert to the desired | |
7329 mode. */ | |
7330 if (rtx_equal_p (newer, x)) | |
7331 return gen_lowpart (GET_MODE (x), tem); | |
7063 | 7332 |
7064 return newer; | 7333 return newer; |
7065 } | 7334 } |
7066 | 7335 |
7067 if (simplified) | 7336 if (simplified) |
7237 would need an explicit truncation. */ | 7506 would need an explicit truncation. */ |
7238 | 7507 |
7239 static rtx | 7508 static rtx |
7240 gen_lowpart_or_truncate (enum machine_mode mode, rtx x) | 7509 gen_lowpart_or_truncate (enum machine_mode mode, rtx x) |
7241 { | 7510 { |
7242 if (GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (mode) | 7511 if (!CONST_INT_P (x) |
7243 || TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), | 7512 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)) |
7244 GET_MODE_BITSIZE (GET_MODE (x))) | 7513 && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), |
7245 || (REG_P (x) && reg_truncated_to_mode (mode, x))) | 7514 GET_MODE_BITSIZE (GET_MODE (x))) |
7246 return gen_lowpart (mode, x); | 7515 && !(REG_P (x) && reg_truncated_to_mode (mode, x))) |
7247 else | 7516 { |
7248 return simplify_gen_unary (TRUNCATE, mode, x, GET_MODE (x)); | 7517 /* Bit-cast X into an integer mode. */ |
7518 if (!SCALAR_INT_MODE_P (GET_MODE (x))) | |
7519 x = gen_lowpart (int_mode_for_mode (GET_MODE (x)), x); | |
7520 x = simplify_gen_unary (TRUNCATE, int_mode_for_mode (mode), | |
7521 x, GET_MODE (x)); | |
7522 } | |
7523 | |
7524 return gen_lowpart (mode, x); | |
7249 } | 7525 } |
7250 | 7526 |
7251 /* See if X can be simplified knowing that we will only refer to it in | 7527 /* See if X can be simplified knowing that we will only refer to it in |
7252 MODE and will only refer to those bits that are nonzero in MASK. | 7528 MODE and will only refer to those bits that are nonzero in MASK. |
7253 If other bits are being computed or if masking operations are done | 7529 If other bits are being computed or if masking operations are done |
7313 if (!just_select && (nonzero & mask) == 0 && !side_effects_p (x)) | 7589 if (!just_select && (nonzero & mask) == 0 && !side_effects_p (x)) |
7314 x = const0_rtx; | 7590 x = const0_rtx; |
7315 | 7591 |
7316 /* If X is a CONST_INT, return a new one. Do this here since the | 7592 /* If X is a CONST_INT, return a new one. Do this here since the |
7317 test below will fail. */ | 7593 test below will fail. */ |
7318 if (GET_CODE (x) == CONST_INT) | 7594 if (CONST_INT_P (x)) |
7319 { | 7595 { |
7320 if (SCALAR_INT_MODE_P (mode)) | 7596 if (SCALAR_INT_MODE_P (mode)) |
7321 return gen_int_mode (INTVAL (x) & mask, mode); | 7597 return gen_int_mode (INTVAL (x) & mask, mode); |
7322 else | 7598 else |
7323 { | 7599 { |
7330 get X in the proper mode. */ | 7606 get X in the proper mode. */ |
7331 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode) | 7607 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode) |
7332 && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0) | 7608 && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0) |
7333 return gen_lowpart (mode, x); | 7609 return gen_lowpart (mode, x); |
7334 | 7610 |
7335 /* The arithmetic simplifications here do the wrong thing on vector modes. */ | 7611 /* We can ignore the effect of a SUBREG if it narrows the mode or |
7336 if (VECTOR_MODE_P (mode) || VECTOR_MODE_P (GET_MODE (x))) | 7612 if the constant masks to zero all the bits the mode doesn't have. */ |
7337 return gen_lowpart (mode, x); | 7613 if (GET_CODE (x) == SUBREG |
7614 && subreg_lowpart_p (x) | |
7615 && ((GET_MODE_SIZE (GET_MODE (x)) | |
7616 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
7617 || (0 == (mask | |
7618 & GET_MODE_MASK (GET_MODE (x)) | |
7619 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))))))) | |
7620 return force_to_mode (SUBREG_REG (x), mode, mask, next_select); | |
7621 | |
7622 /* The arithmetic simplifications here only work for scalar integer modes. */ | |
7623 if (!SCALAR_INT_MODE_P (mode) || !SCALAR_INT_MODE_P (GET_MODE (x))) | |
7624 return gen_lowpart_or_truncate (mode, x); | |
7338 | 7625 |
7339 switch (code) | 7626 switch (code) |
7340 { | 7627 { |
7341 case CLOBBER: | 7628 case CLOBBER: |
7342 /* If X is a (clobber (const_int)), return it since we know we are | 7629 /* If X is a (clobber (const_int)), return it since we know we are |
7350 x = expand_compound_operation (x); | 7637 x = expand_compound_operation (x); |
7351 if (GET_CODE (x) != code) | 7638 if (GET_CODE (x) != code) |
7352 return force_to_mode (x, mode, mask, next_select); | 7639 return force_to_mode (x, mode, mask, next_select); |
7353 break; | 7640 break; |
7354 | 7641 |
7355 case SUBREG: | 7642 case TRUNCATE: |
7356 if (subreg_lowpart_p (x) | 7643 /* Similarly for a truncate. */ |
7357 /* We can ignore the effect of this SUBREG if it narrows the mode or | 7644 return force_to_mode (XEXP (x, 0), mode, mask, next_select); |
7358 if the constant masks to zero all the bits the mode doesn't | |
7359 have. */ | |
7360 && ((GET_MODE_SIZE (GET_MODE (x)) | |
7361 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
7362 || (0 == (mask | |
7363 & GET_MODE_MASK (GET_MODE (x)) | |
7364 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))))))) | |
7365 return force_to_mode (SUBREG_REG (x), mode, mask, next_select); | |
7366 break; | |
7367 | 7645 |
7368 case AND: | 7646 case AND: |
7369 /* If this is an AND with a constant, convert it into an AND | 7647 /* If this is an AND with a constant, convert it into an AND |
7370 whose constant is the AND of that constant with MASK. If it | 7648 whose constant is the AND of that constant with MASK. If it |
7371 remains an AND of MASK, delete it since it is redundant. */ | 7649 remains an AND of MASK, delete it since it is redundant. */ |
7372 | 7650 |
7373 if (GET_CODE (XEXP (x, 1)) == CONST_INT) | 7651 if (CONST_INT_P (XEXP (x, 1))) |
7374 { | 7652 { |
7375 x = simplify_and_const_int (x, op_mode, XEXP (x, 0), | 7653 x = simplify_and_const_int (x, op_mode, XEXP (x, 0), |
7376 mask & INTVAL (XEXP (x, 1))); | 7654 mask & INTVAL (XEXP (x, 1))); |
7377 | 7655 |
7378 /* If X is still an AND, see if it is an AND with a mask that | 7656 /* If X is still an AND, see if it is an AND with a mask that |
7379 is just some low-order bits. If so, and it is MASK, we don't | 7657 is just some low-order bits. If so, and it is MASK, we don't |
7380 need it. */ | 7658 need it. */ |
7381 | 7659 |
7382 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT | 7660 if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1)) |
7383 && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x))) | 7661 && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x))) |
7384 == mask)) | 7662 == mask)) |
7385 x = XEXP (x, 0); | 7663 x = XEXP (x, 0); |
7386 | 7664 |
7387 /* If it remains an AND, try making another AND with the bits | 7665 /* If it remains an AND, try making another AND with the bits |
7388 in the mode mask that aren't in MASK turned on. If the | 7666 in the mode mask that aren't in MASK turned on. If the |
7389 constant in the AND is wide enough, this might make a | 7667 constant in the AND is wide enough, this might make a |
7390 cheaper constant. */ | 7668 cheaper constant. */ |
7391 | 7669 |
7392 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT | 7670 if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1)) |
7393 && GET_MODE_MASK (GET_MODE (x)) != mask | 7671 && GET_MODE_MASK (GET_MODE (x)) != mask |
7394 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT) | 7672 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT) |
7395 { | 7673 { |
7396 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1)) | 7674 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1)) |
7397 | (GET_MODE_MASK (GET_MODE (x)) & ~mask)); | 7675 | (GET_MODE_MASK (GET_MODE (x)) & ~mask)); |
7431 | 7709 |
7432 if (width < HOST_BITS_PER_WIDE_INT | 7710 if (width < HOST_BITS_PER_WIDE_INT |
7433 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0) | 7711 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0) |
7434 smask |= (HOST_WIDE_INT) -1 << width; | 7712 smask |= (HOST_WIDE_INT) -1 << width; |
7435 | 7713 |
7436 if (GET_CODE (XEXP (x, 1)) == CONST_INT | 7714 if (CONST_INT_P (XEXP (x, 1)) |
7437 && exact_log2 (- smask) >= 0 | 7715 && exact_log2 (- smask) >= 0 |
7438 && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0 | 7716 && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0 |
7439 && (INTVAL (XEXP (x, 1)) & ~smask) != 0) | 7717 && (INTVAL (XEXP (x, 1)) & ~smask) != 0) |
7440 return force_to_mode (plus_constant (XEXP (x, 0), | 7718 return force_to_mode (plus_constant (XEXP (x, 0), |
7441 (INTVAL (XEXP (x, 1)) & smask)), | 7719 (INTVAL (XEXP (x, 1)) & smask)), |
7452 goto binop; | 7730 goto binop; |
7453 | 7731 |
7454 case MINUS: | 7732 case MINUS: |
7455 /* If X is (minus C Y) where C's least set bit is larger than any bit | 7733 /* If X is (minus C Y) where C's least set bit is larger than any bit |
7456 in the mask, then we may replace with (neg Y). */ | 7734 in the mask, then we may replace with (neg Y). */ |
7457 if (GET_CODE (XEXP (x, 0)) == CONST_INT | 7735 if (CONST_INT_P (XEXP (x, 0)) |
7458 && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0)) | 7736 && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0)) |
7459 & -INTVAL (XEXP (x, 0)))) | 7737 & -INTVAL (XEXP (x, 0)))) |
7460 > mask)) | 7738 > mask)) |
7461 { | 7739 { |
7462 x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1), | 7740 x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1), |
7464 return force_to_mode (x, mode, mask, next_select); | 7742 return force_to_mode (x, mode, mask, next_select); |
7465 } | 7743 } |
7466 | 7744 |
7467 /* Similarly, if C contains every bit in the fuller_mask, then we may | 7745 /* Similarly, if C contains every bit in the fuller_mask, then we may |
7468 replace with (not Y). */ | 7746 replace with (not Y). */ |
7469 if (GET_CODE (XEXP (x, 0)) == CONST_INT | 7747 if (CONST_INT_P (XEXP (x, 0)) |
7470 && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask) | 7748 && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask) |
7471 == INTVAL (XEXP (x, 0)))) | 7749 == INTVAL (XEXP (x, 0)))) |
7472 { | 7750 { |
7473 x = simplify_gen_unary (NOT, GET_MODE (x), | 7751 x = simplify_gen_unary (NOT, GET_MODE (x), |
7474 XEXP (x, 1), GET_MODE (x)); | 7752 XEXP (x, 1), GET_MODE (x)); |
7484 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...) | 7762 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...) |
7485 operation which may be a bitfield extraction. Ensure that the | 7763 operation which may be a bitfield extraction. Ensure that the |
7486 constant we form is not wider than the mode of X. */ | 7764 constant we form is not wider than the mode of X. */ |
7487 | 7765 |
7488 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT | 7766 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT |
7489 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | 7767 && CONST_INT_P (XEXP (XEXP (x, 0), 1)) |
7490 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 | 7768 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 |
7491 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT | 7769 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT |
7492 && GET_CODE (XEXP (x, 1)) == CONST_INT | 7770 && CONST_INT_P (XEXP (x, 1)) |
7493 && ((INTVAL (XEXP (XEXP (x, 0), 1)) | 7771 && ((INTVAL (XEXP (XEXP (x, 0), 1)) |
7494 + floor_log2 (INTVAL (XEXP (x, 1)))) | 7772 + floor_log2 (INTVAL (XEXP (x, 1)))) |
7495 < GET_MODE_BITSIZE (GET_MODE (x))) | 7773 < GET_MODE_BITSIZE (GET_MODE (x))) |
7496 && (INTVAL (XEXP (x, 1)) | 7774 && (INTVAL (XEXP (x, 1)) |
7497 & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0) | 7775 & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0) |
7507 | 7785 |
7508 binop: | 7786 binop: |
7509 /* For most binary operations, just propagate into the operation and | 7787 /* For most binary operations, just propagate into the operation and |
7510 change the mode if we have an operation of that mode. */ | 7788 change the mode if we have an operation of that mode. */ |
7511 | 7789 |
7512 op0 = gen_lowpart_or_truncate (op_mode, | 7790 op0 = force_to_mode (XEXP (x, 0), mode, mask, next_select); |
7513 force_to_mode (XEXP (x, 0), mode, mask, | 7791 op1 = force_to_mode (XEXP (x, 1), mode, mask, next_select); |
7514 next_select)); | 7792 |
7515 op1 = gen_lowpart_or_truncate (op_mode, | 7793 /* If we ended up truncating both operands, truncate the result of the |
7516 force_to_mode (XEXP (x, 1), mode, mask, | 7794 operation instead. */ |
7517 next_select)); | 7795 if (GET_CODE (op0) == TRUNCATE |
7796 && GET_CODE (op1) == TRUNCATE) | |
7797 { | |
7798 op0 = XEXP (op0, 0); | |
7799 op1 = XEXP (op1, 0); | |
7800 } | |
7801 | |
7802 op0 = gen_lowpart_or_truncate (op_mode, op0); | |
7803 op1 = gen_lowpart_or_truncate (op_mode, op1); | |
7518 | 7804 |
7519 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) | 7805 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) |
7520 x = simplify_gen_binary (code, op_mode, op0, op1); | 7806 x = simplify_gen_binary (code, op_mode, op0, op1); |
7521 break; | 7807 break; |
7522 | 7808 |
7525 However, we cannot do anything with shifts where we cannot | 7811 However, we cannot do anything with shifts where we cannot |
7526 guarantee that the counts are smaller than the size of the mode | 7812 guarantee that the counts are smaller than the size of the mode |
7527 because such a count will have a different meaning in a | 7813 because such a count will have a different meaning in a |
7528 wider mode. */ | 7814 wider mode. */ |
7529 | 7815 |
7530 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT | 7816 if (! (CONST_INT_P (XEXP (x, 1)) |
7531 && INTVAL (XEXP (x, 1)) >= 0 | 7817 && INTVAL (XEXP (x, 1)) >= 0 |
7532 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)) | 7818 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)) |
7533 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode | 7819 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode |
7534 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1))) | 7820 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1))) |
7535 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode)))) | 7821 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode)))) |
7536 break; | 7822 break; |
7537 | 7823 |
7538 /* If the shift count is a constant and we can do arithmetic in | 7824 /* If the shift count is a constant and we can do arithmetic in |
7539 the mode of the shift, refine which bits we need. Otherwise, use the | 7825 the mode of the shift, refine which bits we need. Otherwise, use the |
7540 conservative form of the mask. */ | 7826 conservative form of the mask. */ |
7541 if (GET_CODE (XEXP (x, 1)) == CONST_INT | 7827 if (CONST_INT_P (XEXP (x, 1)) |
7542 && INTVAL (XEXP (x, 1)) >= 0 | 7828 && INTVAL (XEXP (x, 1)) >= 0 |
7543 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode) | 7829 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode) |
7544 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT) | 7830 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT) |
7545 mask >>= INTVAL (XEXP (x, 1)); | 7831 mask >>= INTVAL (XEXP (x, 1)); |
7546 else | 7832 else |
7557 case LSHIFTRT: | 7843 case LSHIFTRT: |
7558 /* Here we can only do something if the shift count is a constant, | 7844 /* Here we can only do something if the shift count is a constant, |
7559 this shift constant is valid for the host, and we can do arithmetic | 7845 this shift constant is valid for the host, and we can do arithmetic |
7560 in OP_MODE. */ | 7846 in OP_MODE. */ |
7561 | 7847 |
7562 if (GET_CODE (XEXP (x, 1)) == CONST_INT | 7848 if (CONST_INT_P (XEXP (x, 1)) |
7563 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT | 7849 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT |
7564 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT) | 7850 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT) |
7565 { | 7851 { |
7566 rtx inner = XEXP (x, 0); | 7852 rtx inner = XEXP (x, 0); |
7567 unsigned HOST_WIDE_INT inner_mask; | 7853 unsigned HOST_WIDE_INT inner_mask; |
7584 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the | 7870 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the |
7585 shift and AND produces only copies of the sign bit (C2 is one less | 7871 shift and AND produces only copies of the sign bit (C2 is one less |
7586 than a power of two), we can do this with just a shift. */ | 7872 than a power of two), we can do this with just a shift. */ |
7587 | 7873 |
7588 if (GET_CODE (x) == LSHIFTRT | 7874 if (GET_CODE (x) == LSHIFTRT |
7589 && GET_CODE (XEXP (x, 1)) == CONST_INT | 7875 && CONST_INT_P (XEXP (x, 1)) |
7590 /* The shift puts one of the sign bit copies in the least significant | 7876 /* The shift puts one of the sign bit copies in the least significant |
7591 bit. */ | 7877 bit. */ |
7592 && ((INTVAL (XEXP (x, 1)) | 7878 && ((INTVAL (XEXP (x, 1)) |
7593 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))) | 7879 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))) |
7594 >= GET_MODE_BITSIZE (GET_MODE (x))) | 7880 >= GET_MODE_BITSIZE (GET_MODE (x))) |
7619 MASK only includes those bits, this can be a logical shift, which may | 7905 MASK only includes those bits, this can be a logical shift, which may |
7620 allow simplifications. If MASK is a single-bit field not within | 7906 allow simplifications. If MASK is a single-bit field not within |
7621 those bits, we are requesting a copy of the sign bit and hence can | 7907 those bits, we are requesting a copy of the sign bit and hence can |
7622 shift the sign bit to the appropriate location. */ | 7908 shift the sign bit to the appropriate location. */ |
7623 | 7909 |
7624 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0 | 7910 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0 |
7625 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) | 7911 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) |
7626 { | 7912 { |
7627 int i; | 7913 int i; |
7628 | 7914 |
7629 /* If the considered data is wider than HOST_WIDE_INT, we can't | 7915 /* If the considered data is wider than HOST_WIDE_INT, we can't |
7681 /* If this is a zero- or sign-extension operation that just affects bits | 7967 /* If this is a zero- or sign-extension operation that just affects bits |
7682 we don't care about, remove it. Be sure the call above returned | 7968 we don't care about, remove it. Be sure the call above returned |
7683 something that is still a shift. */ | 7969 something that is still a shift. */ |
7684 | 7970 |
7685 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT) | 7971 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT) |
7686 && GET_CODE (XEXP (x, 1)) == CONST_INT | 7972 && CONST_INT_P (XEXP (x, 1)) |
7687 && INTVAL (XEXP (x, 1)) >= 0 | 7973 && INTVAL (XEXP (x, 1)) >= 0 |
7688 && (INTVAL (XEXP (x, 1)) | 7974 && (INTVAL (XEXP (x, 1)) |
7689 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1)) | 7975 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1)) |
7690 && GET_CODE (XEXP (x, 0)) == ASHIFT | 7976 && GET_CODE (XEXP (x, 0)) == ASHIFT |
7691 && XEXP (XEXP (x, 0), 1) == XEXP (x, 1)) | 7977 && XEXP (XEXP (x, 0), 1) == XEXP (x, 1)) |
7698 case ROTATERT: | 7984 case ROTATERT: |
7699 /* If the shift count is constant and we can do computations | 7985 /* If the shift count is constant and we can do computations |
7700 in the mode of X, compute where the bits we care about are. | 7986 in the mode of X, compute where the bits we care about are. |
7701 Otherwise, we can't do anything. Don't change the mode of | 7987 Otherwise, we can't do anything. Don't change the mode of |
7702 the shift or propagate MODE into the shift, though. */ | 7988 the shift or propagate MODE into the shift, though. */ |
7703 if (GET_CODE (XEXP (x, 1)) == CONST_INT | 7989 if (CONST_INT_P (XEXP (x, 1)) |
7704 && INTVAL (XEXP (x, 1)) >= 0) | 7990 && INTVAL (XEXP (x, 1)) >= 0) |
7705 { | 7991 { |
7706 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE, | 7992 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE, |
7707 GET_MODE (x), GEN_INT (mask), | 7993 GET_MODE (x), GEN_INT (mask), |
7708 XEXP (x, 1)); | 7994 XEXP (x, 1)); |
7709 if (temp && GET_CODE (temp) == CONST_INT) | 7995 if (temp && CONST_INT_P (temp)) |
7710 SUBST (XEXP (x, 0), | 7996 SUBST (XEXP (x, 0), |
7711 force_to_mode (XEXP (x, 0), GET_MODE (x), | 7997 force_to_mode (XEXP (x, 0), GET_MODE (x), |
7712 INTVAL (temp), next_select)); | 7998 INTVAL (temp), next_select)); |
7713 } | 7999 } |
7714 break; | 8000 break; |
7729 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the | 8015 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the |
7730 same as the XOR case above. Ensure that the constant we form is not | 8016 same as the XOR case above. Ensure that the constant we form is not |
7731 wider than the mode of X. */ | 8017 wider than the mode of X. */ |
7732 | 8018 |
7733 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT | 8019 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT |
7734 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | 8020 && CONST_INT_P (XEXP (XEXP (x, 0), 1)) |
7735 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 | 8021 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 |
7736 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask) | 8022 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask) |
7737 < GET_MODE_BITSIZE (GET_MODE (x))) | 8023 < GET_MODE_BITSIZE (GET_MODE (x))) |
7738 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT) | 8024 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT) |
7739 { | 8025 { |
8218 a clear of a one-bit field. We will have changed it to | 8504 a clear of a one-bit field. We will have changed it to |
8219 (and (rotate (const_int -2) POS) DEST), so check for that. Also check | 8505 (and (rotate (const_int -2) POS) DEST), so check for that. Also check |
8220 for a SUBREG. */ | 8506 for a SUBREG. */ |
8221 | 8507 |
8222 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE | 8508 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE |
8223 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT | 8509 && CONST_INT_P (XEXP (XEXP (src, 0), 0)) |
8224 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2 | 8510 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2 |
8225 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1))) | 8511 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1))) |
8226 { | 8512 { |
8227 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1), | 8513 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1), |
8228 1, 1, 1, 0); | 8514 1, 1, 1, 0); |
8234 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG | 8520 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG |
8235 && subreg_lowpart_p (XEXP (src, 0)) | 8521 && subreg_lowpart_p (XEXP (src, 0)) |
8236 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0))) | 8522 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0))) |
8237 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0))))) | 8523 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0))))) |
8238 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE | 8524 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE |
8239 && GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT | 8525 && CONST_INT_P (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) |
8240 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2 | 8526 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2 |
8241 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1))) | 8527 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1))) |
8242 { | 8528 { |
8243 assign = make_extraction (VOIDmode, dest, 0, | 8529 assign = make_extraction (VOIDmode, dest, 0, |
8244 XEXP (SUBREG_REG (XEXP (src, 0)), 1), | 8530 XEXP (SUBREG_REG (XEXP (src, 0)), 1), |
8263 | 8549 |
8264 /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the | 8550 /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the |
8265 SRC is an AND with all bits of that field set, then we can discard | 8551 SRC is an AND with all bits of that field set, then we can discard |
8266 the AND. */ | 8552 the AND. */ |
8267 if (GET_CODE (dest) == ZERO_EXTRACT | 8553 if (GET_CODE (dest) == ZERO_EXTRACT |
8268 && GET_CODE (XEXP (dest, 1)) == CONST_INT | 8554 && CONST_INT_P (XEXP (dest, 1)) |
8269 && GET_CODE (src) == AND | 8555 && GET_CODE (src) == AND |
8270 && GET_CODE (XEXP (src, 1)) == CONST_INT) | 8556 && CONST_INT_P (XEXP (src, 1))) |
8271 { | 8557 { |
8272 HOST_WIDE_INT width = INTVAL (XEXP (dest, 1)); | 8558 HOST_WIDE_INT width = INTVAL (XEXP (dest, 1)); |
8273 unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1)); | 8559 unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1)); |
8274 unsigned HOST_WIDE_INT ze_mask; | 8560 unsigned HOST_WIDE_INT ze_mask; |
8275 | 8561 |
8305 | 8591 |
8306 rhs = expand_compound_operation (XEXP (src, 0)); | 8592 rhs = expand_compound_operation (XEXP (src, 0)); |
8307 lhs = expand_compound_operation (XEXP (src, 1)); | 8593 lhs = expand_compound_operation (XEXP (src, 1)); |
8308 | 8594 |
8309 if (GET_CODE (rhs) == AND | 8595 if (GET_CODE (rhs) == AND |
8310 && GET_CODE (XEXP (rhs, 1)) == CONST_INT | 8596 && CONST_INT_P (XEXP (rhs, 1)) |
8311 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest)) | 8597 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest)) |
8312 c1 = INTVAL (XEXP (rhs, 1)), other = lhs; | 8598 c1 = INTVAL (XEXP (rhs, 1)), other = lhs; |
8313 else if (GET_CODE (lhs) == AND | 8599 else if (GET_CODE (lhs) == AND |
8314 && GET_CODE (XEXP (lhs, 1)) == CONST_INT | 8600 && CONST_INT_P (XEXP (lhs, 1)) |
8315 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest)) | 8601 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest)) |
8316 c1 = INTVAL (XEXP (lhs, 1)), other = rhs; | 8602 c1 = INTVAL (XEXP (lhs, 1)), other = rhs; |
8317 else | 8603 else |
8318 return x; | 8604 return x; |
8319 | 8605 |
8346 0); | 8632 0); |
8347 | 8633 |
8348 /* If SRC is masked by an AND that does not make a difference in | 8634 /* If SRC is masked by an AND that does not make a difference in |
8349 the value being stored, strip it. */ | 8635 the value being stored, strip it. */ |
8350 if (GET_CODE (assign) == ZERO_EXTRACT | 8636 if (GET_CODE (assign) == ZERO_EXTRACT |
8351 && GET_CODE (XEXP (assign, 1)) == CONST_INT | 8637 && CONST_INT_P (XEXP (assign, 1)) |
8352 && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT | 8638 && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT |
8353 && GET_CODE (src) == AND | 8639 && GET_CODE (src) == AND |
8354 && GET_CODE (XEXP (src, 1)) == CONST_INT | 8640 && CONST_INT_P (XEXP (src, 1)) |
8355 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1)) | 8641 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1)) |
8356 == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1)) | 8642 == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1)) |
8357 src = XEXP (src, 0); | 8643 src = XEXP (src, 0); |
8358 | 8644 |
8359 return gen_rtx_SET (VOIDmode, assign, src); | 8645 return gen_rtx_SET (VOIDmode, assign, src); |
8595 if (GET_CODE (varop) == CLOBBER) | 8881 if (GET_CODE (varop) == CLOBBER) |
8596 return varop; | 8882 return varop; |
8597 | 8883 |
8598 /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP | 8884 /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP |
8599 to VAROP and return the new constant. */ | 8885 to VAROP and return the new constant. */ |
8600 if (GET_CODE (varop) == CONST_INT) | 8886 if (CONST_INT_P (varop)) |
8601 return gen_int_mode (INTVAL (varop) & constop, mode); | 8887 return gen_int_mode (INTVAL (varop) & constop, mode); |
8602 | 8888 |
8603 /* See what bits may be nonzero in VAROP. Unlike the general case of | 8889 /* See what bits may be nonzero in VAROP. Unlike the general case of |
8604 a call to nonzero_bits, here we don't care about bits outside | 8890 a call to nonzero_bits, here we don't care about bits outside |
8605 MODE. */ | 8891 MODE. */ |
8751 | 9037 |
8752 ??? For 2.5, try to tighten up the MD files in this regard | 9038 ??? For 2.5, try to tighten up the MD files in this regard |
8753 instead of this kludge. */ | 9039 instead of this kludge. */ |
8754 | 9040 |
8755 if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode) | 9041 if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode) |
8756 && GET_CODE (tem) == CONST_INT | 9042 && CONST_INT_P (tem) |
8757 && INTVAL (tem) > 0 | 9043 && INTVAL (tem) > 0 |
8758 && 0 != (INTVAL (tem) | 9044 && 0 != (INTVAL (tem) |
8759 & ((HOST_WIDE_INT) 1 | 9045 & ((HOST_WIDE_INT) 1 |
8760 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) | 9046 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) |
8761 tem = GEN_INT (INTVAL (tem) | 9047 tem = GEN_INT (INTVAL (tem) |
8974 *pconst0 = trunc_int_for_mode (const0, mode); | 9260 *pconst0 = trunc_int_for_mode (const0, mode); |
8975 | 9261 |
8976 return 1; | 9262 return 1; |
8977 } | 9263 } |
8978 | 9264 |
9265 /* A helper to simplify_shift_const_1 to determine the mode we can perform | |
9266 the shift in. The original shift operation CODE is performed on OP in | |
9267 ORIG_MODE. Return the wider mode MODE if we can perform the operation | |
9268 in that mode. Return ORIG_MODE otherwise. We can also assume that the | |
9269 result of the shift is subject to operation OUTER_CODE with operand | |
9270 OUTER_CONST. */ | |
9271 | |
9272 static enum machine_mode | |
9273 try_widen_shift_mode (enum rtx_code code, rtx op, int count, | |
9274 enum machine_mode orig_mode, enum machine_mode mode, | |
9275 enum rtx_code outer_code, HOST_WIDE_INT outer_const) | |
9276 { | |
9277 if (orig_mode == mode) | |
9278 return mode; | |
9279 gcc_assert (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (orig_mode)); | |
9280 | |
9281 /* In general we can't perform in wider mode for right shift and rotate. */ | |
9282 switch (code) | |
9283 { | |
9284 case ASHIFTRT: | |
9285 /* We can still widen if the bits brought in from the left are identical | |
9286 to the sign bit of ORIG_MODE. */ | |
9287 if (num_sign_bit_copies (op, mode) | |
9288 > (unsigned) (GET_MODE_BITSIZE (mode) | |
9289 - GET_MODE_BITSIZE (orig_mode))) | |
9290 return mode; | |
9291 return orig_mode; | |
9292 | |
9293 case LSHIFTRT: | |
9294 /* Similarly here but with zero bits. */ | |
9295 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
9296 && (nonzero_bits (op, mode) & ~GET_MODE_MASK (orig_mode)) == 0) | |
9297 return mode; | |
9298 | |
9299 /* We can also widen if the bits brought in will be masked off. This | |
9300 operation is performed in ORIG_MODE. */ | |
9301 if (outer_code == AND) | |
9302 { | |
9303 int care_bits = low_bitmask_len (orig_mode, outer_const); | |
9304 | |
9305 if (care_bits >= 0 | |
9306 && GET_MODE_BITSIZE (orig_mode) - care_bits >= count) | |
9307 return mode; | |
9308 } | |
9309 /* fall through */ | |
9310 | |
9311 case ROTATE: | |
9312 return orig_mode; | |
9313 | |
9314 case ROTATERT: | |
9315 gcc_unreachable (); | |
9316 | |
9317 default: | |
9318 return mode; | |
9319 } | |
9320 } | |
9321 | |
8979 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift. | 9322 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift. |
8980 The result of the shift is RESULT_MODE. Return NULL_RTX if we cannot | 9323 The result of the shift is RESULT_MODE. Return NULL_RTX if we cannot |
8981 simplify it. Otherwise, return a simplified value. | 9324 simplify it. Otherwise, return a simplified value. |
8982 | 9325 |
8983 The shift is normally computed in the widest mode we find in VAROP, as | 9326 The shift is normally computed in the widest mode we find in VAROP, as |
9033 count = bitsize / GET_MODE_NUNITS (result_mode) - count; | 9376 count = bitsize / GET_MODE_NUNITS (result_mode) - count; |
9034 else | 9377 else |
9035 count = bitsize - count; | 9378 count = bitsize - count; |
9036 } | 9379 } |
9037 | 9380 |
9038 /* We need to determine what mode we will do the shift in. If the | 9381 shift_mode = try_widen_shift_mode (code, varop, count, result_mode, |
9039 shift is a right shift or a ROTATE, we must always do it in the mode | 9382 mode, outer_op, outer_const); |
9040 it was originally done in. Otherwise, we can do it in MODE, the | |
9041 widest mode encountered. */ | |
9042 shift_mode | |
9043 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE | |
9044 ? result_mode : mode); | |
9045 | 9383 |
9046 /* Handle cases where the count is greater than the size of the mode | 9384 /* Handle cases where the count is greater than the size of the mode |
9047 minus 1. For ASHIFT, use the size minus one as the count (this can | 9385 minus 1. For ASHIFT, use the size minus one as the count (this can |
9048 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates, | 9386 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates, |
9049 take the count modulo the size. For other shifts, the result is | 9387 take the count modulo the size. For other shifts, the result is |
9170 | 9508 |
9171 case MULT: | 9509 case MULT: |
9172 /* Some machines use MULT instead of ASHIFT because MULT | 9510 /* Some machines use MULT instead of ASHIFT because MULT |
9173 is cheaper. But it is still better on those machines to | 9511 is cheaper. But it is still better on those machines to |
9174 merge two shifts into one. */ | 9512 merge two shifts into one. */ |
9175 if (GET_CODE (XEXP (varop, 1)) == CONST_INT | 9513 if (CONST_INT_P (XEXP (varop, 1)) |
9176 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0) | 9514 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0) |
9177 { | 9515 { |
9178 varop | 9516 varop |
9179 = simplify_gen_binary (ASHIFT, GET_MODE (varop), | 9517 = simplify_gen_binary (ASHIFT, GET_MODE (varop), |
9180 XEXP (varop, 0), | 9518 XEXP (varop, 0), |
9184 } | 9522 } |
9185 break; | 9523 break; |
9186 | 9524 |
9187 case UDIV: | 9525 case UDIV: |
9188 /* Similar, for when divides are cheaper. */ | 9526 /* Similar, for when divides are cheaper. */ |
9189 if (GET_CODE (XEXP (varop, 1)) == CONST_INT | 9527 if (CONST_INT_P (XEXP (varop, 1)) |
9190 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0) | 9528 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0) |
9191 { | 9529 { |
9192 varop | 9530 varop |
9193 = simplify_gen_binary (LSHIFTRT, GET_MODE (varop), | 9531 = simplify_gen_binary (LSHIFTRT, GET_MODE (varop), |
9194 XEXP (varop, 0), | 9532 XEXP (varop, 0), |
9218 case LSHIFTRT: | 9556 case LSHIFTRT: |
9219 case ASHIFT: | 9557 case ASHIFT: |
9220 case ROTATE: | 9558 case ROTATE: |
9221 /* Here we have two nested shifts. The result is usually the | 9559 /* Here we have two nested shifts. The result is usually the |
9222 AND of a new shift with a mask. We compute the result below. */ | 9560 AND of a new shift with a mask. We compute the result below. */ |
9223 if (GET_CODE (XEXP (varop, 1)) == CONST_INT | 9561 if (CONST_INT_P (XEXP (varop, 1)) |
9224 && INTVAL (XEXP (varop, 1)) >= 0 | 9562 && INTVAL (XEXP (varop, 1)) >= 0 |
9225 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop)) | 9563 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop)) |
9226 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT | 9564 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT |
9227 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | 9565 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
9228 && !VECTOR_MODE_P (result_mode)) | 9566 && !VECTOR_MODE_P (result_mode)) |
9326 = simplify_const_binary_operation (code, result_mode, mask_rtx, | 9664 = simplify_const_binary_operation (code, result_mode, mask_rtx, |
9327 GEN_INT (count)); | 9665 GEN_INT (count)); |
9328 | 9666 |
9329 /* Give up if we can't compute an outer operation to use. */ | 9667 /* Give up if we can't compute an outer operation to use. */ |
9330 if (mask_rtx == 0 | 9668 if (mask_rtx == 0 |
9331 || GET_CODE (mask_rtx) != CONST_INT | 9669 || !CONST_INT_P (mask_rtx) |
9332 || ! merge_outer_ops (&outer_op, &outer_const, AND, | 9670 || ! merge_outer_ops (&outer_op, &outer_const, AND, |
9333 INTVAL (mask_rtx), | 9671 INTVAL (mask_rtx), |
9334 result_mode, &complement_p)) | 9672 result_mode, &complement_p)) |
9335 break; | 9673 break; |
9336 | 9674 |
9360 /* If we have (A << B << C) for any shift, we can convert this to | 9698 /* If we have (A << B << C) for any shift, we can convert this to |
9361 (A << C << B). This wins if A is a constant. Only try this if | 9699 (A << C << B). This wins if A is a constant. Only try this if |
9362 B is not a constant. */ | 9700 B is not a constant. */ |
9363 | 9701 |
9364 else if (GET_CODE (varop) == code | 9702 else if (GET_CODE (varop) == code |
9365 && GET_CODE (XEXP (varop, 0)) == CONST_INT | 9703 && CONST_INT_P (XEXP (varop, 0)) |
9366 && GET_CODE (XEXP (varop, 1)) != CONST_INT) | 9704 && !CONST_INT_P (XEXP (varop, 1))) |
9367 { | 9705 { |
9368 rtx new_rtx = simplify_const_binary_operation (code, mode, | 9706 rtx new_rtx = simplify_const_binary_operation (code, mode, |
9369 XEXP (varop, 0), | 9707 XEXP (varop, 0), |
9370 GEN_INT (count)); | 9708 GEN_INT (count)); |
9371 varop = gen_rtx_fmt_ee (code, mode, new_rtx, XEXP (varop, 1)); | 9709 varop = gen_rtx_fmt_ee (code, mode, new_rtx, XEXP (varop, 1)); |
9414 to allow it to possibly combine with another logical and the | 9752 to allow it to possibly combine with another logical and the |
9415 shift to combine with another shift. This also canonicalizes to | 9753 shift to combine with another shift. This also canonicalizes to |
9416 what a ZERO_EXTRACT looks like. Also, some machines have | 9754 what a ZERO_EXTRACT looks like. Also, some machines have |
9417 (and (shift)) insns. */ | 9755 (and (shift)) insns. */ |
9418 | 9756 |
9419 if (GET_CODE (XEXP (varop, 1)) == CONST_INT | 9757 if (CONST_INT_P (XEXP (varop, 1)) |
9420 /* We can't do this if we have (ashiftrt (xor)) and the | 9758 /* We can't do this if we have (ashiftrt (xor)) and the |
9421 constant has its sign bit set in shift_mode. */ | 9759 constant has its sign bit set in shift_mode. */ |
9422 && !(code == ASHIFTRT && GET_CODE (varop) == XOR | 9760 && !(code == ASHIFTRT && GET_CODE (varop) == XOR |
9423 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)), | 9761 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)), |
9424 shift_mode)) | 9762 shift_mode)) |
9425 && (new_rtx = simplify_const_binary_operation (code, result_mode, | 9763 && (new_rtx = simplify_const_binary_operation (code, result_mode, |
9426 XEXP (varop, 1), | 9764 XEXP (varop, 1), |
9427 GEN_INT (count))) != 0 | 9765 GEN_INT (count))) != 0 |
9428 && GET_CODE (new_rtx) == CONST_INT | 9766 && CONST_INT_P (new_rtx) |
9429 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop), | 9767 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop), |
9430 INTVAL (new_rtx), result_mode, &complement_p)) | 9768 INTVAL (new_rtx), result_mode, &complement_p)) |
9431 { | 9769 { |
9432 varop = XEXP (varop, 0); | 9770 varop = XEXP (varop, 0); |
9433 continue; | 9771 continue; |
9435 | 9773 |
9436 /* If we can't do that, try to simplify the shift in each arm of the | 9774 /* If we can't do that, try to simplify the shift in each arm of the |
9437 logical expression, make a new logical expression, and apply | 9775 logical expression, make a new logical expression, and apply |
9438 the inverse distributive law. This also can't be done | 9776 the inverse distributive law. This also can't be done |
9439 for some (ashiftrt (xor)). */ | 9777 for some (ashiftrt (xor)). */ |
9440 if (GET_CODE (XEXP (varop, 1)) == CONST_INT | 9778 if (CONST_INT_P (XEXP (varop, 1)) |
9441 && !(code == ASHIFTRT && GET_CODE (varop) == XOR | 9779 && !(code == ASHIFTRT && GET_CODE (varop) == XOR |
9442 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)), | 9780 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)), |
9443 shift_mode))) | 9781 shift_mode))) |
9444 { | 9782 { |
9445 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode, | 9783 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode, |
9547 continue; | 9885 continue; |
9548 } | 9886 } |
9549 | 9887 |
9550 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */ | 9888 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */ |
9551 if (code == ASHIFT | 9889 if (code == ASHIFT |
9552 && GET_CODE (XEXP (varop, 1)) == CONST_INT | 9890 && CONST_INT_P (XEXP (varop, 1)) |
9553 && (new_rtx = simplify_const_binary_operation (ASHIFT, result_mode, | 9891 && (new_rtx = simplify_const_binary_operation (ASHIFT, result_mode, |
9554 XEXP (varop, 1), | 9892 XEXP (varop, 1), |
9555 GEN_INT (count))) != 0 | 9893 GEN_INT (count))) != 0 |
9556 && GET_CODE (new_rtx) == CONST_INT | 9894 && CONST_INT_P (new_rtx) |
9557 && merge_outer_ops (&outer_op, &outer_const, PLUS, | 9895 && merge_outer_ops (&outer_op, &outer_const, PLUS, |
9558 INTVAL (new_rtx), result_mode, &complement_p)) | 9896 INTVAL (new_rtx), result_mode, &complement_p)) |
9559 { | 9897 { |
9560 varop = XEXP (varop, 0); | 9898 varop = XEXP (varop, 0); |
9561 continue; | 9899 continue; |
9565 signbit', and attempt to change the PLUS to an XOR and move it to | 9903 signbit', and attempt to change the PLUS to an XOR and move it to |
9566 the outer operation as is done above in the AND/IOR/XOR case | 9904 the outer operation as is done above in the AND/IOR/XOR case |
9567 leg for shift(logical). See details in logical handling above | 9905 leg for shift(logical). See details in logical handling above |
9568 for reasoning in doing so. */ | 9906 for reasoning in doing so. */ |
9569 if (code == LSHIFTRT | 9907 if (code == LSHIFTRT |
9570 && GET_CODE (XEXP (varop, 1)) == CONST_INT | 9908 && CONST_INT_P (XEXP (varop, 1)) |
9571 && mode_signbit_p (result_mode, XEXP (varop, 1)) | 9909 && mode_signbit_p (result_mode, XEXP (varop, 1)) |
9572 && (new_rtx = simplify_const_binary_operation (code, result_mode, | 9910 && (new_rtx = simplify_const_binary_operation (code, result_mode, |
9573 XEXP (varop, 1), | 9911 XEXP (varop, 1), |
9574 GEN_INT (count))) != 0 | 9912 GEN_INT (count))) != 0 |
9575 && GET_CODE (new_rtx) == CONST_INT | 9913 && CONST_INT_P (new_rtx) |
9576 && merge_outer_ops (&outer_op, &outer_const, XOR, | 9914 && merge_outer_ops (&outer_op, &outer_const, XOR, |
9577 INTVAL (new_rtx), result_mode, &complement_p)) | 9915 INTVAL (new_rtx), result_mode, &complement_p)) |
9578 { | 9916 { |
9579 varop = XEXP (varop, 0); | 9917 varop = XEXP (varop, 0); |
9580 continue; | 9918 continue; |
9592 | 9930 |
9593 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) | 9931 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) |
9594 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT | 9932 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT |
9595 && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1) | 9933 && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1) |
9596 && (code == LSHIFTRT || code == ASHIFTRT) | 9934 && (code == LSHIFTRT || code == ASHIFTRT) |
9597 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT | 9935 && CONST_INT_P (XEXP (XEXP (varop, 0), 1)) |
9598 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count | 9936 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count |
9599 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1))) | 9937 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1))) |
9600 { | 9938 { |
9601 count = 0; | 9939 count = 0; |
9602 varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1), | 9940 varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1), |
9612 case TRUNCATE: | 9950 case TRUNCATE: |
9613 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt)) | 9951 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt)) |
9614 if the truncate does not affect the value. */ | 9952 if the truncate does not affect the value. */ |
9615 if (code == LSHIFTRT | 9953 if (code == LSHIFTRT |
9616 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT | 9954 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT |
9617 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT | 9955 && CONST_INT_P (XEXP (XEXP (varop, 0), 1)) |
9618 && (INTVAL (XEXP (XEXP (varop, 0), 1)) | 9956 && (INTVAL (XEXP (XEXP (varop, 0), 1)) |
9619 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0))) | 9957 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0))) |
9620 - GET_MODE_BITSIZE (GET_MODE (varop))))) | 9958 - GET_MODE_BITSIZE (GET_MODE (varop))))) |
9621 { | 9959 { |
9622 rtx varop_inner = XEXP (varop, 0); | 9960 rtx varop_inner = XEXP (varop, 0); |
9637 } | 9975 } |
9638 | 9976 |
9639 break; | 9977 break; |
9640 } | 9978 } |
9641 | 9979 |
9642 /* We need to determine what mode to do the shift in. If the shift is | 9980 shift_mode = try_widen_shift_mode (code, varop, count, result_mode, mode, |
9643 a right shift or ROTATE, we must always do it in the mode it was | 9981 outer_op, outer_const); |
9644 originally done in. Otherwise, we can do it in MODE, the widest mode | |
9645 encountered. The code we care about is that of the shift that will | |
9646 actually be done, not the shift that was originally requested. */ | |
9647 shift_mode | |
9648 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE | |
9649 ? result_mode : mode); | |
9650 | 9982 |
9651 /* We have now finished analyzing the shift. The result should be | 9983 /* We have now finished analyzing the shift. The result should be |
9652 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If | 9984 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If |
9653 OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied | 9985 OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied |
9654 to the result of the shift. OUTER_CONST is the relevant constant, | 9986 to the result of the shift. OUTER_CONST is the relevant constant, |
9852 i < XVECLEN (newpat, 0); i++) | 10184 i < XVECLEN (newpat, 0); i++) |
9853 { | 10185 { |
9854 if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0)) | 10186 if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0)) |
9855 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn)) | 10187 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn)) |
9856 return -1; | 10188 return -1; |
9857 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) != SCRATCH) | 10189 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) != SCRATCH) |
9858 { | 10190 { |
9859 gcc_assert (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))); | 10191 gcc_assert (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))); |
9860 notes = gen_rtx_EXPR_LIST (REG_UNUSED, | 10192 notes = alloc_reg_note (REG_UNUSED, |
9861 XEXP (XVECEXP (newpat, 0, i), 0), notes); | 10193 XEXP (XVECEXP (newpat, 0, i), 0), notes); |
9862 } | 10194 } |
9863 } | 10195 } |
9864 pat = newpat; | 10196 pat = newpat; |
9865 } | 10197 } |
9866 | 10198 |
9900 | 10232 |
9901 /* We can only support MODE being wider than a word if X is a | 10233 /* We can only support MODE being wider than a word if X is a |
9902 constant integer or has a mode the same size. */ | 10234 constant integer or has a mode the same size. */ |
9903 if (GET_MODE_SIZE (omode) > UNITS_PER_WORD | 10235 if (GET_MODE_SIZE (omode) > UNITS_PER_WORD |
9904 && ! ((imode == VOIDmode | 10236 && ! ((imode == VOIDmode |
9905 && (GET_CODE (x) == CONST_INT | 10237 && (CONST_INT_P (x) |
9906 || GET_CODE (x) == CONST_DOUBLE)) | 10238 || GET_CODE (x) == CONST_DOUBLE)) |
9907 || isize == osize)) | 10239 || isize == osize)) |
9908 goto fail; | 10240 goto fail; |
9909 | 10241 |
9910 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart | 10242 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart |
10018 && GET_CODE (XEXP (op1, 0)) == ASHIFT | 10350 && GET_CODE (XEXP (op1, 0)) == ASHIFT |
10019 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG | 10351 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG |
10020 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG | 10352 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG |
10021 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))) | 10353 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))) |
10022 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0)))) | 10354 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0)))) |
10023 && GET_CODE (XEXP (op0, 1)) == CONST_INT | 10355 && CONST_INT_P (XEXP (op0, 1)) |
10024 && XEXP (op0, 1) == XEXP (op1, 1) | 10356 && XEXP (op0, 1) == XEXP (op1, 1) |
10025 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1) | 10357 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1) |
10026 && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1) | 10358 && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1) |
10027 && (INTVAL (XEXP (op0, 1)) | 10359 && (INTVAL (XEXP (op0, 1)) |
10028 == (GET_MODE_BITSIZE (GET_MODE (op0)) | 10360 == (GET_MODE_BITSIZE (GET_MODE (op0)) |
10044 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT) | 10376 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT) |
10045 && (code != GT && code != LT && code != GE && code != LE)) | 10377 && (code != GT && code != LT && code != GE && code != LE)) |
10046 || (GET_CODE (op0) == ASHIFTRT | 10378 || (GET_CODE (op0) == ASHIFTRT |
10047 && (code != GTU && code != LTU | 10379 && (code != GTU && code != LTU |
10048 && code != GEU && code != LEU))) | 10380 && code != GEU && code != LEU))) |
10049 && GET_CODE (XEXP (op0, 1)) == CONST_INT | 10381 && CONST_INT_P (XEXP (op0, 1)) |
10050 && INTVAL (XEXP (op0, 1)) >= 0 | 10382 && INTVAL (XEXP (op0, 1)) >= 0 |
10051 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT | 10383 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT |
10052 && XEXP (op0, 1) == XEXP (op1, 1)) | 10384 && XEXP (op0, 1) == XEXP (op1, 1)) |
10053 { | 10385 { |
10054 enum machine_mode mode = GET_MODE (op0); | 10386 enum machine_mode mode = GET_MODE (op0); |
10079 Similarly, check for a case where the AND's are ZERO_EXTEND | 10411 Similarly, check for a case where the AND's are ZERO_EXTEND |
10080 operations from some narrower mode even though a SUBREG is not | 10412 operations from some narrower mode even though a SUBREG is not |
10081 present. */ | 10413 present. */ |
10082 | 10414 |
10083 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND | 10415 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND |
10084 && GET_CODE (XEXP (op0, 1)) == CONST_INT | 10416 && CONST_INT_P (XEXP (op0, 1)) |
10085 && GET_CODE (XEXP (op1, 1)) == CONST_INT) | 10417 && CONST_INT_P (XEXP (op1, 1))) |
10086 { | 10418 { |
10087 rtx inner_op0 = XEXP (op0, 0); | 10419 rtx inner_op0 = XEXP (op0, 0); |
10088 rtx inner_op1 = XEXP (op1, 0); | 10420 rtx inner_op1 = XEXP (op1, 0); |
10089 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1)); | 10421 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1)); |
10090 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1)); | 10422 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1)); |
10153 /* We now enter a loop during which we will try to simplify the comparison. | 10485 /* We now enter a loop during which we will try to simplify the comparison. |
10154 For the most part, we only are concerned with comparisons with zero, | 10486 For the most part, we only are concerned with comparisons with zero, |
10155 but some things may really be comparisons with zero but not start | 10487 but some things may really be comparisons with zero but not start |
10156 out looking that way. */ | 10488 out looking that way. */ |
10157 | 10489 |
10158 while (GET_CODE (op1) == CONST_INT) | 10490 while (CONST_INT_P (op1)) |
10159 { | 10491 { |
10160 enum machine_mode mode = GET_MODE (op0); | 10492 enum machine_mode mode = GET_MODE (op0); |
10161 unsigned int mode_width = GET_MODE_BITSIZE (mode); | 10493 unsigned int mode_width = GET_MODE_BITSIZE (mode); |
10162 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); | 10494 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); |
10163 int equality_comparison_p; | 10495 int equality_comparison_p; |
10377 with zero, we can convert this into an equality comparison | 10709 with zero, we can convert this into an equality comparison |
10378 between the position and the location of the single bit. */ | 10710 between the position and the location of the single bit. */ |
10379 /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might | 10711 /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might |
10380 have already reduced the shift count modulo the word size. */ | 10712 have already reduced the shift count modulo the word size. */ |
10381 if (!SHIFT_COUNT_TRUNCATED | 10713 if (!SHIFT_COUNT_TRUNCATED |
10382 && GET_CODE (XEXP (op0, 0)) == CONST_INT | 10714 && CONST_INT_P (XEXP (op0, 0)) |
10383 && XEXP (op0, 1) == const1_rtx | 10715 && XEXP (op0, 1) == const1_rtx |
10384 && equality_comparison_p && const_op == 0 | 10716 && equality_comparison_p && const_op == 0 |
10385 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0) | 10717 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0) |
10386 { | 10718 { |
10387 if (BITS_BIG_ENDIAN) | 10719 if (BITS_BIG_ENDIAN) |
10476 break; | 10808 break; |
10477 | 10809 |
10478 case ROTATE: | 10810 case ROTATE: |
10479 /* If we are testing equality and our count is a constant, we | 10811 /* If we are testing equality and our count is a constant, we |
10480 can perform the inverse operation on our RHS. */ | 10812 can perform the inverse operation on our RHS. */ |
10481 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT | 10813 if (equality_comparison_p && CONST_INT_P (XEXP (op0, 1)) |
10482 && (tem = simplify_binary_operation (ROTATERT, mode, | 10814 && (tem = simplify_binary_operation (ROTATERT, mode, |
10483 op1, XEXP (op0, 1))) != 0) | 10815 op1, XEXP (op0, 1))) != 0) |
10484 { | 10816 { |
10485 op0 = XEXP (op0, 0); | 10817 op0 = XEXP (op0, 0); |
10486 op1 = tem; | 10818 op1 = tem; |
10489 | 10821 |
10490 /* If we are doing a < 0 or >= 0 comparison, it means we are testing | 10822 /* If we are doing a < 0 or >= 0 comparison, it means we are testing |
10491 a particular bit. Convert it to an AND of a constant of that | 10823 a particular bit. Convert it to an AND of a constant of that |
10492 bit. This will be converted into a ZERO_EXTRACT. */ | 10824 bit. This will be converted into a ZERO_EXTRACT. */ |
10493 if (const_op == 0 && sign_bit_comparison_p | 10825 if (const_op == 0 && sign_bit_comparison_p |
10494 && GET_CODE (XEXP (op0, 1)) == CONST_INT | 10826 && CONST_INT_P (XEXP (op0, 1)) |
10495 && mode_width <= HOST_BITS_PER_WIDE_INT) | 10827 && mode_width <= HOST_BITS_PER_WIDE_INT) |
10496 { | 10828 { |
10497 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), | 10829 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), |
10498 ((HOST_WIDE_INT) 1 | 10830 ((HOST_WIDE_INT) 1 |
10499 << (mode_width - 1 | 10831 << (mode_width - 1 |
10527 && ! unsigned_comparison_p | 10859 && ! unsigned_comparison_p |
10528 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | 10860 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) |
10529 && ((unsigned HOST_WIDE_INT) const_op | 10861 && ((unsigned HOST_WIDE_INT) const_op |
10530 < (((unsigned HOST_WIDE_INT) 1 | 10862 < (((unsigned HOST_WIDE_INT) 1 |
10531 << (GET_MODE_BITSIZE (mode) - 1)))) | 10863 << (GET_MODE_BITSIZE (mode) - 1)))) |
10532 && optab_handler (cmp_optab, mode)->insn_code != CODE_FOR_nothing) | 10864 && have_insn_for (COMPARE, mode)) |
10533 { | 10865 { |
10534 op0 = XEXP (op0, 0); | 10866 op0 = XEXP (op0, 0); |
10535 continue; | 10867 continue; |
10536 } | 10868 } |
10537 break; | 10869 break; |
10559 | 10891 |
10560 if (mode_width <= HOST_BITS_PER_WIDE_INT | 10892 if (mode_width <= HOST_BITS_PER_WIDE_INT |
10561 && subreg_lowpart_p (op0) | 10893 && subreg_lowpart_p (op0) |
10562 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width | 10894 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width |
10563 && GET_CODE (SUBREG_REG (op0)) == PLUS | 10895 && GET_CODE (SUBREG_REG (op0)) == PLUS |
10564 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT) | 10896 && CONST_INT_P (XEXP (SUBREG_REG (op0), 1))) |
10565 { | 10897 { |
10566 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0)); | 10898 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0)); |
10567 rtx a = XEXP (SUBREG_REG (op0), 0); | 10899 rtx a = XEXP (SUBREG_REG (op0), 0); |
10568 HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1)); | 10900 HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1)); |
10569 | 10901 |
10608 mode = GET_MODE (XEXP (op0, 0)); | 10940 mode = GET_MODE (XEXP (op0, 0)); |
10609 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT | 10941 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT |
10610 && (unsigned_comparison_p || equality_comparison_p) | 10942 && (unsigned_comparison_p || equality_comparison_p) |
10611 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | 10943 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) |
10612 && ((unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode)) | 10944 && ((unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode)) |
10613 && optab_handler (cmp_optab, mode)->insn_code != CODE_FOR_nothing) | 10945 && have_insn_for (COMPARE, mode)) |
10614 { | 10946 { |
10615 op0 = XEXP (op0, 0); | 10947 op0 = XEXP (op0, 0); |
10616 continue; | 10948 continue; |
10617 } | 10949 } |
10618 break; | 10950 break; |
10669 } | 11001 } |
10670 | 11002 |
10671 /* The sign bit of (minus (ashiftrt X C) X), where C is the number | 11003 /* The sign bit of (minus (ashiftrt X C) X), where C is the number |
10672 of bits in X minus 1, is one iff X > 0. */ | 11004 of bits in X minus 1, is one iff X > 0. */ |
10673 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT | 11005 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT |
10674 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | 11006 && CONST_INT_P (XEXP (XEXP (op0, 0), 1)) |
10675 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1)) | 11007 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1)) |
10676 == mode_width - 1 | 11008 == mode_width - 1 |
10677 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1))) | 11009 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1))) |
10678 { | 11010 { |
10679 op0 = XEXP (op0, 1); | 11011 op0 = XEXP (op0, 1); |
10770 /* If we are comparing (and (lshiftrt X C1) C2) for equality with | 11102 /* If we are comparing (and (lshiftrt X C1) C2) for equality with |
10771 zero and X is a comparison and C1 and C2 describe only bits set | 11103 zero and X is a comparison and C1 and C2 describe only bits set |
10772 in STORE_FLAG_VALUE, we can compare with X. */ | 11104 in STORE_FLAG_VALUE, we can compare with X. */ |
10773 if (const_op == 0 && equality_comparison_p | 11105 if (const_op == 0 && equality_comparison_p |
10774 && mode_width <= HOST_BITS_PER_WIDE_INT | 11106 && mode_width <= HOST_BITS_PER_WIDE_INT |
10775 && GET_CODE (XEXP (op0, 1)) == CONST_INT | 11107 && CONST_INT_P (XEXP (op0, 1)) |
10776 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT | 11108 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT |
10777 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | 11109 && CONST_INT_P (XEXP (XEXP (op0, 0), 1)) |
10778 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0 | 11110 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0 |
10779 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT) | 11111 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT) |
10780 { | 11112 { |
10781 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode)) | 11113 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode)) |
10782 << INTVAL (XEXP (XEXP (op0, 0), 1))); | 11114 << INTVAL (XEXP (XEXP (op0, 0), 1))); |
10793 /* If we are doing an equality comparison of an AND of a bit equal | 11125 /* If we are doing an equality comparison of an AND of a bit equal |
10794 to the sign bit, replace this with a LT or GE comparison of | 11126 to the sign bit, replace this with a LT or GE comparison of |
10795 the underlying value. */ | 11127 the underlying value. */ |
10796 if (equality_comparison_p | 11128 if (equality_comparison_p |
10797 && const_op == 0 | 11129 && const_op == 0 |
10798 && GET_CODE (XEXP (op0, 1)) == CONST_INT | 11130 && CONST_INT_P (XEXP (op0, 1)) |
10799 && mode_width <= HOST_BITS_PER_WIDE_INT | 11131 && mode_width <= HOST_BITS_PER_WIDE_INT |
10800 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode)) | 11132 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode)) |
10801 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1))) | 11133 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1))) |
10802 { | 11134 { |
10803 op0 = XEXP (op0, 0); | 11135 op0 = XEXP (op0, 0); |
10817 | 11149 |
10818 unless TRULY_NOOP_TRUNCATION allows it or the register is | 11150 unless TRULY_NOOP_TRUNCATION allows it or the register is |
10819 known to hold a value of the required mode the | 11151 known to hold a value of the required mode the |
10820 transformation is invalid. */ | 11152 transformation is invalid. */ |
10821 if ((equality_comparison_p || unsigned_comparison_p) | 11153 if ((equality_comparison_p || unsigned_comparison_p) |
10822 && GET_CODE (XEXP (op0, 1)) == CONST_INT | 11154 && CONST_INT_P (XEXP (op0, 1)) |
10823 && (i = exact_log2 ((INTVAL (XEXP (op0, 1)) | 11155 && (i = exact_log2 ((INTVAL (XEXP (op0, 1)) |
10824 & GET_MODE_MASK (mode)) | 11156 & GET_MODE_MASK (mode)) |
10825 + 1)) >= 0 | 11157 + 1)) >= 0 |
10826 && const_op >> i == 0 | 11158 && const_op >> i == 0 |
10827 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode | 11159 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode |
10857 || (mode_width > GET_MODE_BITSIZE (tmode) | 11189 || (mode_width > GET_MODE_BITSIZE (tmode) |
10858 && mode_width <= BITS_PER_WORD) | 11190 && mode_width <= BITS_PER_WORD) |
10859 #endif | 11191 #endif |
10860 || (mode_width <= GET_MODE_BITSIZE (tmode) | 11192 || (mode_width <= GET_MODE_BITSIZE (tmode) |
10861 && subreg_lowpart_p (XEXP (op0, 0)))) | 11193 && subreg_lowpart_p (XEXP (op0, 0)))) |
10862 && GET_CODE (XEXP (op0, 1)) == CONST_INT | 11194 && CONST_INT_P (XEXP (op0, 1)) |
10863 && mode_width <= HOST_BITS_PER_WIDE_INT | 11195 && mode_width <= HOST_BITS_PER_WIDE_INT |
10864 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT | 11196 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT |
10865 && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0 | 11197 && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0 |
10866 && (c1 & ~GET_MODE_MASK (tmode)) == 0 | 11198 && (c1 & ~GET_MODE_MASK (tmode)) == 0 |
10867 && c1 != mask | 11199 && c1 != mask |
10896 rtx shift_op = XEXP (XEXP (op0, 0), 0); | 11228 rtx shift_op = XEXP (XEXP (op0, 0), 0); |
10897 rtx shift_count = XEXP (XEXP (op0, 0), 1); | 11229 rtx shift_count = XEXP (XEXP (op0, 0), 1); |
10898 | 11230 |
10899 if (GET_CODE (shift_op) == NOT | 11231 if (GET_CODE (shift_op) == NOT |
10900 || (GET_CODE (shift_op) == XOR | 11232 || (GET_CODE (shift_op) == XOR |
10901 && GET_CODE (XEXP (shift_op, 1)) == CONST_INT | 11233 && CONST_INT_P (XEXP (shift_op, 1)) |
10902 && GET_CODE (shift_count) == CONST_INT | 11234 && CONST_INT_P (shift_count) |
10903 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | 11235 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
10904 && (INTVAL (XEXP (shift_op, 1)) | 11236 && (INTVAL (XEXP (shift_op, 1)) |
10905 == (HOST_WIDE_INT) 1 << INTVAL (shift_count)))) | 11237 == (HOST_WIDE_INT) 1 << INTVAL (shift_count)))) |
10906 { | 11238 { |
10907 op0 = simplify_and_const_int | 11239 op0 = simplify_and_const_int |
10918 /* If we have (compare (ashift FOO N) (const_int C)) and | 11250 /* If we have (compare (ashift FOO N) (const_int C)) and |
10919 the high order N bits of FOO (N+1 if an inequality comparison) | 11251 the high order N bits of FOO (N+1 if an inequality comparison) |
10920 are known to be zero, we can do this by comparing FOO with C | 11252 are known to be zero, we can do this by comparing FOO with C |
10921 shifted right N bits so long as the low-order N bits of C are | 11253 shifted right N bits so long as the low-order N bits of C are |
10922 zero. */ | 11254 zero. */ |
10923 if (GET_CODE (XEXP (op0, 1)) == CONST_INT | 11255 if (CONST_INT_P (XEXP (op0, 1)) |
10924 && INTVAL (XEXP (op0, 1)) >= 0 | 11256 && INTVAL (XEXP (op0, 1)) >= 0 |
10925 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p) | 11257 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p) |
10926 < HOST_BITS_PER_WIDE_INT) | 11258 < HOST_BITS_PER_WIDE_INT) |
10927 && ((const_op | 11259 && ((const_op |
10928 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0) | 11260 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0) |
10941 continue; | 11273 continue; |
10942 } | 11274 } |
10943 | 11275 |
10944 /* If we are doing a sign bit comparison, it means we are testing | 11276 /* If we are doing a sign bit comparison, it means we are testing |
10945 a particular bit. Convert it to the appropriate AND. */ | 11277 a particular bit. Convert it to the appropriate AND. */ |
10946 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT | 11278 if (sign_bit_comparison_p && CONST_INT_P (XEXP (op0, 1)) |
10947 && mode_width <= HOST_BITS_PER_WIDE_INT) | 11279 && mode_width <= HOST_BITS_PER_WIDE_INT) |
10948 { | 11280 { |
10949 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), | 11281 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), |
10950 ((HOST_WIDE_INT) 1 | 11282 ((HOST_WIDE_INT) 1 |
10951 << (mode_width - 1 | 11283 << (mode_width - 1 |
10956 | 11288 |
10957 /* If this an equality comparison with zero and we are shifting | 11289 /* If this an equality comparison with zero and we are shifting |
10958 the low bit to the sign bit, we can convert this to an AND of the | 11290 the low bit to the sign bit, we can convert this to an AND of the |
10959 low-order bit. */ | 11291 low-order bit. */ |
10960 if (const_op == 0 && equality_comparison_p | 11292 if (const_op == 0 && equality_comparison_p |
10961 && GET_CODE (XEXP (op0, 1)) == CONST_INT | 11293 && CONST_INT_P (XEXP (op0, 1)) |
10962 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) | 11294 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) |
10963 == mode_width - 1) | 11295 == mode_width - 1) |
10964 { | 11296 { |
10965 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), | 11297 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), |
10966 (HOST_WIDE_INT) 1); | 11298 (HOST_WIDE_INT) 1); |
10970 | 11302 |
10971 case ASHIFTRT: | 11303 case ASHIFTRT: |
10972 /* If this is an equality comparison with zero, we can do this | 11304 /* If this is an equality comparison with zero, we can do this |
10973 as a logical shift, which might be much simpler. */ | 11305 as a logical shift, which might be much simpler. */ |
10974 if (equality_comparison_p && const_op == 0 | 11306 if (equality_comparison_p && const_op == 0 |
10975 && GET_CODE (XEXP (op0, 1)) == CONST_INT) | 11307 && CONST_INT_P (XEXP (op0, 1))) |
10976 { | 11308 { |
10977 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, | 11309 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, |
10978 XEXP (op0, 0), | 11310 XEXP (op0, 0), |
10979 INTVAL (XEXP (op0, 1))); | 11311 INTVAL (XEXP (op0, 1))); |
10980 continue; | 11312 continue; |
10981 } | 11313 } |
10982 | 11314 |
10983 /* If OP0 is a sign extension and CODE is not an unsigned comparison, | 11315 /* If OP0 is a sign extension and CODE is not an unsigned comparison, |
10984 do the comparison in a narrower mode. */ | 11316 do the comparison in a narrower mode. */ |
10985 if (! unsigned_comparison_p | 11317 if (! unsigned_comparison_p |
10986 && GET_CODE (XEXP (op0, 1)) == CONST_INT | 11318 && CONST_INT_P (XEXP (op0, 1)) |
10987 && GET_CODE (XEXP (op0, 0)) == ASHIFT | 11319 && GET_CODE (XEXP (op0, 0)) == ASHIFT |
10988 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1) | 11320 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1) |
10989 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), | 11321 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), |
10990 MODE_INT, 1)) != BLKmode | 11322 MODE_INT, 1)) != BLKmode |
10991 && (((unsigned HOST_WIDE_INT) const_op | 11323 && (((unsigned HOST_WIDE_INT) const_op |
10998 | 11330 |
10999 /* Likewise if OP0 is a PLUS of a sign extension with a | 11331 /* Likewise if OP0 is a PLUS of a sign extension with a |
11000 constant, which is usually represented with the PLUS | 11332 constant, which is usually represented with the PLUS |
11001 between the shifts. */ | 11333 between the shifts. */ |
11002 if (! unsigned_comparison_p | 11334 if (! unsigned_comparison_p |
11003 && GET_CODE (XEXP (op0, 1)) == CONST_INT | 11335 && CONST_INT_P (XEXP (op0, 1)) |
11004 && GET_CODE (XEXP (op0, 0)) == PLUS | 11336 && GET_CODE (XEXP (op0, 0)) == PLUS |
11005 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | 11337 && CONST_INT_P (XEXP (XEXP (op0, 0), 1)) |
11006 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT | 11338 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT |
11007 && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1) | 11339 && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1) |
11008 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), | 11340 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), |
11009 MODE_INT, 1)) != BLKmode | 11341 MODE_INT, 1)) != BLKmode |
11010 && (((unsigned HOST_WIDE_INT) const_op | 11342 && (((unsigned HOST_WIDE_INT) const_op |
11026 case LSHIFTRT: | 11358 case LSHIFTRT: |
11027 /* If we have (compare (xshiftrt FOO N) (const_int C)) and | 11359 /* If we have (compare (xshiftrt FOO N) (const_int C)) and |
11028 the low order N bits of FOO are known to be zero, we can do this | 11360 the low order N bits of FOO are known to be zero, we can do this |
11029 by comparing FOO with C shifted left N bits so long as no | 11361 by comparing FOO with C shifted left N bits so long as no |
11030 overflow occurs. */ | 11362 overflow occurs. */ |
11031 if (GET_CODE (XEXP (op0, 1)) == CONST_INT | 11363 if (CONST_INT_P (XEXP (op0, 1)) |
11032 && INTVAL (XEXP (op0, 1)) >= 0 | 11364 && INTVAL (XEXP (op0, 1)) >= 0 |
11033 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT | 11365 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT |
11034 && mode_width <= HOST_BITS_PER_WIDE_INT | 11366 && mode_width <= HOST_BITS_PER_WIDE_INT |
11035 && (nonzero_bits (XEXP (op0, 0), mode) | 11367 && (nonzero_bits (XEXP (op0, 0), mode) |
11036 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0 | 11368 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0 |
11054 | 11386 |
11055 /* If we are using this shift to extract just the sign bit, we | 11387 /* If we are using this shift to extract just the sign bit, we |
11056 can replace this with an LT or GE comparison. */ | 11388 can replace this with an LT or GE comparison. */ |
11057 if (const_op == 0 | 11389 if (const_op == 0 |
11058 && (equality_comparison_p || sign_bit_comparison_p) | 11390 && (equality_comparison_p || sign_bit_comparison_p) |
11059 && GET_CODE (XEXP (op0, 1)) == CONST_INT | 11391 && CONST_INT_P (XEXP (op0, 1)) |
11060 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) | 11392 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) |
11061 == mode_width - 1) | 11393 == mode_width - 1) |
11062 { | 11394 { |
11063 op0 = XEXP (op0, 0); | 11395 op0 = XEXP (op0, 0); |
11064 code = (code == NE || code == GT ? LT : GE); | 11396 code = (code == NE || code == GT ? LT : GE); |
11144 tmode = GET_MODE_WIDER_MODE (tmode)) | 11476 tmode = GET_MODE_WIDER_MODE (tmode)) |
11145 if (have_insn_for (COMPARE, tmode)) | 11477 if (have_insn_for (COMPARE, tmode)) |
11146 { | 11478 { |
11147 int zero_extended; | 11479 int zero_extended; |
11148 | 11480 |
11481 /* If this is a test for negative, we can make an explicit | |
11482 test of the sign bit. Test this first so we can use | |
11483 a paradoxical subreg to extend OP0. */ | |
11484 | |
11485 if (op1 == const0_rtx && (code == LT || code == GE) | |
11486 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
11487 { | |
11488 op0 = simplify_gen_binary (AND, tmode, | |
11489 gen_lowpart (tmode, op0), | |
11490 GEN_INT ((HOST_WIDE_INT) 1 | |
11491 << (GET_MODE_BITSIZE (mode) | |
11492 - 1))); | |
11493 code = (code == LT) ? NE : EQ; | |
11494 break; | |
11495 } | |
11496 | |
11149 /* If the only nonzero bits in OP0 and OP1 are those in the | 11497 /* If the only nonzero bits in OP0 and OP1 are those in the |
11150 narrower mode and this is an equality or unsigned comparison, | 11498 narrower mode and this is an equality or unsigned comparison, |
11151 we can use the wider mode. Similarly for sign-extended | 11499 we can use the wider mode. Similarly for sign-extended |
11152 values, in which case it is true for all comparisons. */ | 11500 values, in which case it is true for all comparisons. */ |
11153 zero_extended = ((code == EQ || code == NE | 11501 zero_extended = ((code == EQ || code == NE |
11154 || code == GEU || code == GTU | 11502 || code == GEU || code == GTU |
11155 || code == LEU || code == LTU) | 11503 || code == LEU || code == LTU) |
11156 && (nonzero_bits (op0, tmode) | 11504 && (nonzero_bits (op0, tmode) |
11157 & ~GET_MODE_MASK (mode)) == 0 | 11505 & ~GET_MODE_MASK (mode)) == 0 |
11158 && ((GET_CODE (op1) == CONST_INT | 11506 && ((CONST_INT_P (op1) |
11159 || (nonzero_bits (op1, tmode) | 11507 || (nonzero_bits (op1, tmode) |
11160 & ~GET_MODE_MASK (mode)) == 0))); | 11508 & ~GET_MODE_MASK (mode)) == 0))); |
11161 | 11509 |
11162 if (zero_extended | 11510 if (zero_extended |
11163 || ((num_sign_bit_copies (op0, tmode) | 11511 || ((num_sign_bit_copies (op0, tmode) |
11174 op0 = simplify_gen_binary (AND, tmode, | 11522 op0 = simplify_gen_binary (AND, tmode, |
11175 gen_lowpart (tmode, | 11523 gen_lowpart (tmode, |
11176 XEXP (op0, 0)), | 11524 XEXP (op0, 0)), |
11177 gen_lowpart (tmode, | 11525 gen_lowpart (tmode, |
11178 XEXP (op0, 1))); | 11526 XEXP (op0, 1))); |
11179 | 11527 else |
11180 op0 = gen_lowpart (tmode, op0); | 11528 { |
11181 if (zero_extended && GET_CODE (op1) == CONST_INT) | 11529 if (zero_extended) |
11182 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode)); | 11530 { |
11183 op1 = gen_lowpart (tmode, op1); | 11531 op0 = simplify_gen_unary (ZERO_EXTEND, tmode, op0, mode); |
11184 break; | 11532 op1 = simplify_gen_unary (ZERO_EXTEND, tmode, op1, mode); |
11185 } | 11533 } |
11186 | 11534 else |
11187 /* If this is a test for negative, we can make an explicit | 11535 { |
11188 test of the sign bit. */ | 11536 op0 = simplify_gen_unary (SIGN_EXTEND, tmode, op0, mode); |
11189 | 11537 op1 = simplify_gen_unary (SIGN_EXTEND, tmode, op1, mode); |
11190 if (op1 == const0_rtx && (code == LT || code == GE) | 11538 } |
11191 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | 11539 break; |
11192 { | 11540 } |
11193 op0 = simplify_gen_binary (AND, tmode, | |
11194 gen_lowpart (tmode, op0), | |
11195 GEN_INT ((HOST_WIDE_INT) 1 | |
11196 << (GET_MODE_BITSIZE (mode) | |
11197 - 1))); | |
11198 code = (code == LT) ? NE : EQ; | |
11199 break; | |
11200 } | 11541 } |
11201 } | 11542 } |
11202 | 11543 |
11203 #ifdef CANONICALIZE_COMPARISON | 11544 #ifdef CANONICALIZE_COMPARISON |
11204 /* If this machine only supports a subset of valid comparisons, see if we | 11545 /* If this machine only supports a subset of valid comparisons, see if we |
11381 | 11722 |
11382 if (insn) | 11723 if (insn) |
11383 rsp->last_set = insn; | 11724 rsp->last_set = insn; |
11384 | 11725 |
11385 rsp->last_set_value = 0; | 11726 rsp->last_set_value = 0; |
11386 rsp->last_set_mode = 0; | 11727 rsp->last_set_mode = VOIDmode; |
11387 rsp->last_set_nonzero_bits = 0; | 11728 rsp->last_set_nonzero_bits = 0; |
11388 rsp->last_set_sign_bit_copies = 0; | 11729 rsp->last_set_sign_bit_copies = 0; |
11389 rsp->last_death = 0; | 11730 rsp->last_death = 0; |
11390 rsp->truncated_to_mode = 0; | 11731 rsp->truncated_to_mode = VOIDmode; |
11391 } | 11732 } |
11392 | 11733 |
11393 /* Mark registers that are being referenced in this value. */ | 11734 /* Mark registers that are being referenced in this value. */ |
11394 if (value) | 11735 if (value) |
11395 update_table_tick (value); | 11736 update_table_tick (value); |
11414 | 11755 |
11415 /* The value being assigned might refer to X (like in "x++;"). In that | 11756 /* The value being assigned might refer to X (like in "x++;"). In that |
11416 case, we must replace it with (clobber (const_int 0)) to prevent | 11757 case, we must replace it with (clobber (const_int 0)) to prevent |
11417 infinite loops. */ | 11758 infinite loops. */ |
11418 rsp = VEC_index (reg_stat_type, reg_stat, regno); | 11759 rsp = VEC_index (reg_stat_type, reg_stat, regno); |
11419 if (value && ! get_last_value_validate (&value, insn, | 11760 if (value && !get_last_value_validate (&value, insn, label_tick, 0)) |
11420 rsp->last_set_label, 0)) | |
11421 { | 11761 { |
11422 value = copy_rtx (value); | 11762 value = copy_rtx (value); |
11423 if (! get_last_value_validate (&value, insn, | 11763 if (!get_last_value_validate (&value, insn, label_tick, 1)) |
11424 rsp->last_set_label, 1)) | |
11425 value = 0; | 11764 value = 0; |
11426 } | 11765 } |
11427 | 11766 |
11428 /* For the main register being modified, update the value, the mode, the | 11767 /* For the main register being modified, update the value, the mode, the |
11429 nonzero bits, and the number of sign bit copies. */ | 11768 nonzero bits, and the number of sign bit copies. */ |
11532 | 11871 |
11533 rsp = VEC_index (reg_stat_type, reg_stat, i); | 11872 rsp = VEC_index (reg_stat_type, reg_stat, i); |
11534 rsp->last_set_invalid = 1; | 11873 rsp->last_set_invalid = 1; |
11535 rsp->last_set = insn; | 11874 rsp->last_set = insn; |
11536 rsp->last_set_value = 0; | 11875 rsp->last_set_value = 0; |
11537 rsp->last_set_mode = 0; | 11876 rsp->last_set_mode = VOIDmode; |
11538 rsp->last_set_nonzero_bits = 0; | 11877 rsp->last_set_nonzero_bits = 0; |
11539 rsp->last_set_sign_bit_copies = 0; | 11878 rsp->last_set_sign_bit_copies = 0; |
11540 rsp->last_death = 0; | 11879 rsp->last_death = 0; |
11541 rsp->truncated_to_mode = 0; | 11880 rsp->truncated_to_mode = VOIDmode; |
11542 } | 11881 } |
11543 | 11882 |
11544 last_call_luid = mem_last_set = DF_INSN_LUID (insn); | 11883 last_call_luid = mem_last_set = DF_INSN_LUID (insn); |
11545 | 11884 |
11546 /* We can't combine into a call pattern. Remember, though, that | 11885 /* We can't combine into a call pattern. Remember, though, that |
11711 break; | 12050 break; |
11712 } | 12051 } |
11713 } | 12052 } |
11714 } | 12053 } |
11715 | 12054 |
11716 /* Utility routine for the following function. Verify that all the registers | 12055 /* Verify that all the registers and memory references mentioned in *LOC are |
11717 mentioned in *LOC are valid when *LOC was part of a value set when | 12056 still valid. *LOC was part of a value set in INSN when label_tick was |
11718 label_tick == TICK. Return 0 if some are not. | 12057 equal to TICK. Return 0 if some are not. If REPLACE is nonzero, replace |
11719 | 12058 the invalid references with (clobber (const_int 0)) and return 1. This |
11720 If REPLACE is nonzero, replace the invalid reference with | 12059 replacement is useful because we often can get useful information about |
11721 (clobber (const_int 0)) and return 1. This replacement is useful because | 12060 the form of a value (e.g., if it was produced by a shift that always |
11722 we often can get useful information about the form of a value (e.g., if | 12061 produces -1 or 0) even though we don't know exactly what registers it |
11723 it was produced by a shift that always produces -1 or 0) even though | 12062 was produced from. */ |
11724 we don't know exactly what registers it was produced from. */ | |
11725 | 12063 |
11726 static int | 12064 static int |
11727 get_last_value_validate (rtx *loc, rtx insn, int tick, int replace) | 12065 get_last_value_validate (rtx *loc, rtx insn, int tick, int replace) |
11728 { | 12066 { |
11729 rtx x = *loc; | 12067 rtx x = *loc; |
11755 } | 12093 } |
11756 } | 12094 } |
11757 | 12095 |
11758 return 1; | 12096 return 1; |
11759 } | 12097 } |
11760 /* If this is a memory reference, make sure that there were | 12098 /* If this is a memory reference, make sure that there were no stores after |
11761 no stores after it that might have clobbered the value. We don't | 12099 it that might have clobbered the value. We don't have alias info, so we |
11762 have alias info, so we assume any store invalidates it. */ | 12100 assume any store invalidates it. Moreover, we only have local UIDs, so |
12101 we also assume that there were stores in the intervening basic blocks. */ | |
11763 else if (MEM_P (x) && !MEM_READONLY_P (x) | 12102 else if (MEM_P (x) && !MEM_READONLY_P (x) |
11764 && DF_INSN_LUID (insn) <= mem_last_set) | 12103 && (tick != label_tick || DF_INSN_LUID (insn) <= mem_last_set)) |
11765 { | 12104 { |
11766 if (replace) | 12105 if (replace) |
11767 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); | 12106 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); |
11768 return replace; | 12107 return replace; |
11769 } | 12108 } |
11869 if (rsp->last_set_label == label_tick | 12208 if (rsp->last_set_label == label_tick |
11870 && DF_INSN_LUID (rsp->last_set) >= subst_low_luid) | 12209 && DF_INSN_LUID (rsp->last_set) >= subst_low_luid) |
11871 return 0; | 12210 return 0; |
11872 | 12211 |
11873 /* If the value has all its registers valid, return it. */ | 12212 /* If the value has all its registers valid, return it. */ |
11874 if (get_last_value_validate (&value, rsp->last_set, | 12213 if (get_last_value_validate (&value, rsp->last_set, rsp->last_set_label, 0)) |
11875 rsp->last_set_label, 0)) | |
11876 return value; | 12214 return value; |
11877 | 12215 |
11878 /* Otherwise, make a copy and replace any invalid register with | 12216 /* Otherwise, make a copy and replace any invalid register with |
11879 (clobber (const_int 0)). If that fails for some reason, return 0. */ | 12217 (clobber (const_int 0)). If that fails for some reason, return 0. */ |
11880 | 12218 |
11881 value = copy_rtx (value); | 12219 value = copy_rtx (value); |
11882 if (get_last_value_validate (&value, rsp->last_set, | 12220 if (get_last_value_validate (&value, rsp->last_set, rsp->last_set_label, 1)) |
11883 rsp->last_set_label, 1)) | |
11884 return value; | 12221 return value; |
11885 | 12222 |
11886 return 0; | 12223 return 0; |
11887 } | 12224 } |
11888 | 12225 |
11992 for (i = reg_dead_regno; i < reg_dead_endregno; i++) | 12329 for (i = reg_dead_regno; i < reg_dead_endregno; i++) |
11993 if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i)) | 12330 if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i)) |
11994 return 0; | 12331 return 0; |
11995 } | 12332 } |
11996 | 12333 |
11997 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or | 12334 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, or |
11998 beginning of function. */ | 12335 beginning of basic block. */ |
11999 for (; insn && !LABEL_P (insn) && !BARRIER_P (insn); | 12336 block = BLOCK_FOR_INSN (insn); |
12000 insn = prev_nonnote_insn (insn)) | 12337 for (;;) |
12001 { | 12338 { |
12002 note_stores (PATTERN (insn), reg_dead_at_p_1, NULL); | 12339 if (INSN_P (insn)) |
12003 if (reg_dead_flag) | 12340 { |
12004 return reg_dead_flag == 1 ? 1 : 0; | 12341 note_stores (PATTERN (insn), reg_dead_at_p_1, NULL); |
12005 | 12342 if (reg_dead_flag) |
12006 if (find_regno_note (insn, REG_DEAD, reg_dead_regno)) | 12343 return reg_dead_flag == 1 ? 1 : 0; |
12007 return 1; | 12344 |
12008 } | 12345 if (find_regno_note (insn, REG_DEAD, reg_dead_regno)) |
12009 | 12346 return 1; |
12010 /* Get the basic block that we were in. */ | 12347 } |
12011 if (insn == 0) | 12348 |
12012 block = ENTRY_BLOCK_PTR->next_bb; | 12349 if (insn == BB_HEAD (block)) |
12013 else | 12350 break; |
12014 { | 12351 |
12015 FOR_EACH_BB (block) | 12352 insn = PREV_INSN (insn); |
12016 if (insn == BB_HEAD (block)) | 12353 } |
12017 break; | 12354 |
12018 | 12355 /* Look at live-in sets for the basic block that we were in. */ |
12019 if (block == EXIT_BLOCK_PTR) | |
12020 return 0; | |
12021 } | |
12022 | |
12023 for (i = reg_dead_regno; i < reg_dead_endregno; i++) | 12356 for (i = reg_dead_regno; i < reg_dead_endregno; i++) |
12024 if (REGNO_REG_SET_P (df_get_live_in (block), i)) | 12357 if (REGNO_REG_SET_P (df_get_live_in (block), i)) |
12025 return 0; | 12358 return 0; |
12026 | 12359 |
12027 return 1; | 12360 return 1; |
12167 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn) | 12500 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn) |
12168 && ! reg_referenced_p (x, maybe_kill_insn)) | 12501 && ! reg_referenced_p (x, maybe_kill_insn)) |
12169 return; | 12502 return; |
12170 | 12503 |
12171 if (where_dead | 12504 if (where_dead |
12505 && BLOCK_FOR_INSN (where_dead) == BLOCK_FOR_INSN (to_insn) | |
12172 && DF_INSN_LUID (where_dead) >= from_luid | 12506 && DF_INSN_LUID (where_dead) >= from_luid |
12173 && DF_INSN_LUID (where_dead) < DF_INSN_LUID (to_insn)) | 12507 && DF_INSN_LUID (where_dead) < DF_INSN_LUID (to_insn)) |
12174 { | 12508 { |
12175 rtx note = remove_death (regno, where_dead); | 12509 rtx note = remove_death (regno, where_dead); |
12176 | 12510 |
12227 { | 12561 { |
12228 XEXP (note, 1) = *pnotes; | 12562 XEXP (note, 1) = *pnotes; |
12229 *pnotes = note; | 12563 *pnotes = note; |
12230 } | 12564 } |
12231 else | 12565 else |
12232 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes); | 12566 *pnotes = alloc_reg_note (REG_DEAD, x, *pnotes); |
12233 } | 12567 } |
12234 | 12568 |
12235 return; | 12569 return; |
12236 } | 12570 } |
12237 | 12571 |
12336 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i))) | 12670 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i))) |
12337 return 1; | 12671 return 1; |
12338 | 12672 |
12339 return 0; | 12673 return 0; |
12340 } | 12674 } |
12675 | |
12676 /* Return the next insn after INSN that is neither a NOTE nor a | |
12677 DEBUG_INSN. This routine does not look inside SEQUENCEs. */ | |
12678 | |
12679 static rtx | |
12680 next_nonnote_nondebug_insn (rtx insn) | |
12681 { | |
12682 while (insn) | |
12683 { | |
12684 insn = NEXT_INSN (insn); | |
12685 if (insn == 0) | |
12686 break; | |
12687 if (NOTE_P (insn)) | |
12688 continue; | |
12689 if (DEBUG_INSN_P (insn)) | |
12690 continue; | |
12691 break; | |
12692 } | |
12693 | |
12694 return insn; | |
12695 } | |
12696 | |
12697 | |
12341 | 12698 |
12342 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them | 12699 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them |
12343 as appropriate. I3 and I2 are the insns resulting from the combination | 12700 as appropriate. I3 and I2 are the insns resulting from the combination |
12344 insns including FROM (I2 may be zero). | 12701 insns including FROM (I2 may be zero). |
12345 | 12702 |
12589 && CALL_P (from_insn) | 12946 && CALL_P (from_insn) |
12590 && find_reg_fusage (from_insn, USE, XEXP (note, 0))) | 12947 && find_reg_fusage (from_insn, USE, XEXP (note, 0))) |
12591 place = from_insn; | 12948 place = from_insn; |
12592 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))) | 12949 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))) |
12593 place = i3; | 12950 place = i3; |
12594 else if (i2 != 0 && next_nonnote_insn (i2) == i3 | 12951 else if (i2 != 0 && next_nonnote_nondebug_insn (i2) == i3 |
12595 && reg_referenced_p (XEXP (note, 0), PATTERN (i2))) | 12952 && reg_referenced_p (XEXP (note, 0), PATTERN (i2))) |
12596 place = i2; | 12953 place = i2; |
12597 else if ((rtx_equal_p (XEXP (note, 0), elim_i2) | 12954 else if ((rtx_equal_p (XEXP (note, 0), elim_i2) |
12598 && !(i2mod | 12955 && !(i2mod |
12599 && reg_overlap_mentioned_p (XEXP (note, 0), | 12956 && reg_overlap_mentioned_p (XEXP (note, 0), |
12607 { | 12964 { |
12608 basic_block bb = this_basic_block; | 12965 basic_block bb = this_basic_block; |
12609 | 12966 |
12610 for (tem = PREV_INSN (tem); place == 0; tem = PREV_INSN (tem)) | 12967 for (tem = PREV_INSN (tem); place == 0; tem = PREV_INSN (tem)) |
12611 { | 12968 { |
12612 if (! INSN_P (tem)) | 12969 if (!NONDEBUG_INSN_P (tem)) |
12613 { | 12970 { |
12614 if (tem == BB_HEAD (bb)) | 12971 if (tem == BB_HEAD (bb)) |
12615 break; | 12972 break; |
12616 continue; | 12973 continue; |
12617 } | 12974 } |
12796 | 13153 |
12797 if (! dead_or_set_p (place, piece) | 13154 if (! dead_or_set_p (place, piece) |
12798 && ! reg_bitfield_target_p (piece, | 13155 && ! reg_bitfield_target_p (piece, |
12799 PATTERN (place))) | 13156 PATTERN (place))) |
12800 { | 13157 { |
12801 rtx new_note | 13158 rtx new_note = alloc_reg_note (REG_DEAD, piece, |
12802 = gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX); | 13159 NULL_RTX); |
12803 | 13160 |
12804 distribute_notes (new_note, place, place, | 13161 distribute_notes (new_note, place, place, |
12805 NULL_RTX, NULL_RTX, NULL_RTX); | 13162 NULL_RTX, NULL_RTX, NULL_RTX); |
12806 } | 13163 } |
12807 else if (! refers_to_regno_p (i, i + 1, | 13164 else if (! refers_to_regno_p (i, i + 1, |
12808 PATTERN (place), 0) | 13165 PATTERN (place), 0) |
12809 && ! find_regno_fusage (place, USE, i)) | 13166 && ! find_regno_fusage (place, USE, i)) |
12810 for (tem = PREV_INSN (place); ; | 13167 for (tem = PREV_INSN (place); ; |
12811 tem = PREV_INSN (tem)) | 13168 tem = PREV_INSN (tem)) |
12812 { | 13169 { |
12813 if (! INSN_P (tem)) | 13170 if (!NONDEBUG_INSN_P (tem)) |
12814 { | 13171 { |
12815 if (tem == BB_HEAD (bb)) | 13172 if (tem == BB_HEAD (bb)) |
12816 break; | 13173 break; |
12817 continue; | 13174 continue; |
12818 } | 13175 } |
12844 XEXP (note, 1) = REG_NOTES (place); | 13201 XEXP (note, 1) = REG_NOTES (place); |
12845 REG_NOTES (place) = note; | 13202 REG_NOTES (place) = note; |
12846 } | 13203 } |
12847 | 13204 |
12848 if (place2) | 13205 if (place2) |
12849 REG_NOTES (place2) | 13206 add_reg_note (place2, REG_NOTE_KIND (note), XEXP (note, 0)); |
12850 = gen_rtx_fmt_ee (GET_CODE (note), REG_NOTE_KIND (note), | |
12851 XEXP (note, 0), REG_NOTES (place2)); | |
12852 } | 13207 } |
12853 } | 13208 } |
12854 | 13209 |
12855 /* Similarly to above, distribute the LOG_LINKS that used to be present on | 13210 /* Similarly to above, distribute the LOG_LINKS that used to be present on |
12856 I3, I2, and I1 to new locations. This is also called to add a link | 13211 I3, I2, and I1 to new locations. This is also called to add a link |
12900 | 13255 |
12901 for (insn = NEXT_INSN (XEXP (link, 0)); | 13256 for (insn = NEXT_INSN (XEXP (link, 0)); |
12902 (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR | 13257 (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR |
12903 || BB_HEAD (this_basic_block->next_bb) != insn)); | 13258 || BB_HEAD (this_basic_block->next_bb) != insn)); |
12904 insn = NEXT_INSN (insn)) | 13259 insn = NEXT_INSN (insn)) |
12905 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn))) | 13260 if (DEBUG_INSN_P (insn)) |
13261 continue; | |
13262 else if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn))) | |
12906 { | 13263 { |
12907 if (reg_referenced_p (reg, PATTERN (insn))) | 13264 if (reg_referenced_p (reg, PATTERN (insn))) |
12908 place = insn; | 13265 place = insn; |
12909 break; | 13266 break; |
12910 } | 13267 } |
13033 rest_of_handle_combine, /* execute */ | 13390 rest_of_handle_combine, /* execute */ |
13034 NULL, /* sub */ | 13391 NULL, /* sub */ |
13035 NULL, /* next */ | 13392 NULL, /* next */ |
13036 0, /* static_pass_number */ | 13393 0, /* static_pass_number */ |
13037 TV_COMBINE, /* tv_id */ | 13394 TV_COMBINE, /* tv_id */ |
13038 0, /* properties_required */ | 13395 PROP_cfglayout, /* properties_required */ |
13039 0, /* properties_provided */ | 13396 0, /* properties_provided */ |
13040 0, /* properties_destroyed */ | 13397 0, /* properties_destroyed */ |
13041 0, /* todo_flags_start */ | 13398 0, /* todo_flags_start */ |
13042 TODO_dump_func | | 13399 TODO_dump_func | |
13043 TODO_df_finish | TODO_verify_rtl_sharing | | 13400 TODO_df_finish | TODO_verify_rtl_sharing | |