comparison gcc/simplify-rtx.c @ 67:f6334be47118

update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
author nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
date Tue, 22 Mar 2011 17:18:12 +0900
parents b7f97abdc517
children 04ced10e8804
comparison
equal deleted inserted replaced
65:65488c3d617d 67:f6334be47118
32 #include "flags.h" 32 #include "flags.h"
33 #include "insn-config.h" 33 #include "insn-config.h"
34 #include "recog.h" 34 #include "recog.h"
35 #include "function.h" 35 #include "function.h"
36 #include "expr.h" 36 #include "expr.h"
37 #include "toplev.h" 37 #include "diagnostic-core.h"
38 #include "output.h" 38 #include "output.h"
39 #include "ggc.h" 39 #include "ggc.h"
40 #include "target.h" 40 #include "target.h"
41 41
42 /* Simplification and canonicalization of RTL. */ 42 /* Simplification and canonicalization of RTL. */
206 overrider call it. */ 206 overrider call it. */
207 207
208 rtx 208 rtx
209 delegitimize_mem_from_attrs (rtx x) 209 delegitimize_mem_from_attrs (rtx x)
210 { 210 {
211 /* MEMs without MEM_OFFSETs may have been offset, so we can't just
212 use their base addresses as equivalent. */
211 if (MEM_P (x) 213 if (MEM_P (x)
212 && MEM_EXPR (x) 214 && MEM_EXPR (x)
213 && (!MEM_OFFSET (x) 215 && MEM_OFFSET (x))
214 || GET_CODE (MEM_OFFSET (x)) == CONST_INT))
215 { 216 {
216 tree decl = MEM_EXPR (x); 217 tree decl = MEM_EXPR (x);
217 enum machine_mode mode = GET_MODE (x); 218 enum machine_mode mode = GET_MODE (x);
218 HOST_WIDE_INT offset = 0; 219 HOST_WIDE_INT offset = 0;
219 220
262 && DECL_RTL_SET_P (decl) 263 && DECL_RTL_SET_P (decl)
263 && MEM_P (DECL_RTL (decl))) 264 && MEM_P (DECL_RTL (decl)))
264 { 265 {
265 rtx newx; 266 rtx newx;
266 267
267 if (MEM_OFFSET (x)) 268 offset += INTVAL (MEM_OFFSET (x));
268 offset += INTVAL (MEM_OFFSET (x));
269 269
270 newx = DECL_RTL (decl); 270 newx = DECL_RTL (decl);
271 271
272 if (MEM_P (newx)) 272 if (MEM_P (newx))
273 { 273 {
810 STORE_FLAG_VALUE permits. This is like the previous test, 810 STORE_FLAG_VALUE permits. This is like the previous test,
811 but it works even if the comparison is done in a mode larger 811 but it works even if the comparison is done in a mode larger
812 than HOST_BITS_PER_WIDE_INT. */ 812 than HOST_BITS_PER_WIDE_INT. */
813 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT 813 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
814 && COMPARISON_P (op) 814 && COMPARISON_P (op)
815 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0) 815 && (STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
816 return rtl_hooks.gen_lowpart_no_emit (mode, op); 816 return rtl_hooks.gen_lowpart_no_emit (mode, op);
817 break; 817 break;
818 818
819 case FLOAT_TRUNCATE: 819 case FLOAT_TRUNCATE:
820 if (DECIMAL_FLOAT_MODE_P (mode)) 820 if (DECIMAL_FLOAT_MODE_P (mode))
909 /* If operand is something known to be positive, ignore the ABS. */ 909 /* If operand is something known to be positive, ignore the ABS. */
910 if (GET_CODE (op) == FFS || GET_CODE (op) == ABS 910 if (GET_CODE (op) == FFS || GET_CODE (op) == ABS
911 || ((GET_MODE_BITSIZE (GET_MODE (op)) 911 || ((GET_MODE_BITSIZE (GET_MODE (op))
912 <= HOST_BITS_PER_WIDE_INT) 912 <= HOST_BITS_PER_WIDE_INT)
913 && ((nonzero_bits (op, GET_MODE (op)) 913 && ((nonzero_bits (op, GET_MODE (op))
914 & ((HOST_WIDE_INT) 1 914 & ((unsigned HOST_WIDE_INT) 1
915 << (GET_MODE_BITSIZE (GET_MODE (op)) - 1))) 915 << (GET_MODE_BITSIZE (GET_MODE (op)) - 1)))
916 == 0))) 916 == 0)))
917 return op; 917 return op;
918 918
919 /* If operand is known to be only -1 or 0, convert ABS to NEG. */ 919 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
1006 if (GET_CODE (op) == SUBREG 1006 if (GET_CODE (op) == SUBREG
1007 && SUBREG_PROMOTED_VAR_P (op) 1007 && SUBREG_PROMOTED_VAR_P (op)
1008 && ! SUBREG_PROMOTED_UNSIGNED_P (op) 1008 && ! SUBREG_PROMOTED_UNSIGNED_P (op)
1009 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (XEXP (op, 0)))) 1009 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (XEXP (op, 0))))
1010 return rtl_hooks.gen_lowpart_no_emit (mode, op); 1010 return rtl_hooks.gen_lowpart_no_emit (mode, op);
1011
1012 /* (sign_extend:M (sign_extend:N <X>)) is (sign_extend:M <X>).
1013 (sign_extend:M (zero_extend:N <X>)) is (zero_extend:M <X>). */
1014 if (GET_CODE (op) == SIGN_EXTEND || GET_CODE (op) == ZERO_EXTEND)
1015 {
1016 gcc_assert (GET_MODE_BITSIZE (mode)
1017 > GET_MODE_BITSIZE (GET_MODE (op)));
1018 return simplify_gen_unary (GET_CODE (op), mode, XEXP (op, 0),
1019 GET_MODE (XEXP (op, 0)));
1020 }
1021
1022 /* (sign_extend:M (ashiftrt:N (ashift <X> (const_int I)) (const_int I)))
1023 is (sign_extend:M (subreg:O <X>)) if there is mode with
1024 GET_MODE_BITSIZE (N) - I bits.
1025 (sign_extend:M (lshiftrt:N (ashift <X> (const_int I)) (const_int I)))
1026 is similarly (zero_extend:M (subreg:O <X>)). */
1027 if ((GET_CODE (op) == ASHIFTRT || GET_CODE (op) == LSHIFTRT)
1028 && GET_CODE (XEXP (op, 0)) == ASHIFT
1029 && CONST_INT_P (XEXP (op, 1))
1030 && XEXP (XEXP (op, 0), 1) == XEXP (op, 1)
1031 && GET_MODE_BITSIZE (GET_MODE (op)) > INTVAL (XEXP (op, 1)))
1032 {
1033 enum machine_mode tmode
1034 = mode_for_size (GET_MODE_BITSIZE (GET_MODE (op))
1035 - INTVAL (XEXP (op, 1)), MODE_INT, 1);
1036 gcc_assert (GET_MODE_BITSIZE (mode)
1037 > GET_MODE_BITSIZE (GET_MODE (op)));
1038 if (tmode != BLKmode)
1039 {
1040 rtx inner =
1041 rtl_hooks.gen_lowpart_no_emit (tmode, XEXP (XEXP (op, 0), 0));
1042 return simplify_gen_unary (GET_CODE (op) == ASHIFTRT
1043 ? SIGN_EXTEND : ZERO_EXTEND,
1044 mode, inner, tmode);
1045 }
1046 }
1011 1047
1012 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) 1048 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1013 /* As we do not know which address space the pointer is refering to, 1049 /* As we do not know which address space the pointer is refering to,
1014 we can do this only if the target does not support different pointer 1050 we can do this only if the target does not support different pointer
1015 or address modes depending on the address space. */ 1051 or address modes depending on the address space. */
1033 && SUBREG_PROMOTED_VAR_P (op) 1069 && SUBREG_PROMOTED_VAR_P (op)
1034 && SUBREG_PROMOTED_UNSIGNED_P (op) > 0 1070 && SUBREG_PROMOTED_UNSIGNED_P (op) > 0
1035 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (XEXP (op, 0)))) 1071 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (XEXP (op, 0))))
1036 return rtl_hooks.gen_lowpart_no_emit (mode, op); 1072 return rtl_hooks.gen_lowpart_no_emit (mode, op);
1037 1073
1074 /* (zero_extend:M (zero_extend:N <X>)) is (zero_extend:M <X>). */
1075 if (GET_CODE (op) == ZERO_EXTEND)
1076 return simplify_gen_unary (ZERO_EXTEND, mode, XEXP (op, 0),
1077 GET_MODE (XEXP (op, 0)));
1078
1079 /* (zero_extend:M (lshiftrt:N (ashift <X> (const_int I)) (const_int I)))
1080 is (zero_extend:M (subreg:O <X>)) if there is mode with
1081 GET_MODE_BITSIZE (N) - I bits. */
1082 if (GET_CODE (op) == LSHIFTRT
1083 && GET_CODE (XEXP (op, 0)) == ASHIFT
1084 && CONST_INT_P (XEXP (op, 1))
1085 && XEXP (XEXP (op, 0), 1) == XEXP (op, 1)
1086 && GET_MODE_BITSIZE (GET_MODE (op)) > INTVAL (XEXP (op, 1)))
1087 {
1088 enum machine_mode tmode
1089 = mode_for_size (GET_MODE_BITSIZE (GET_MODE (op))
1090 - INTVAL (XEXP (op, 1)), MODE_INT, 1);
1091 if (tmode != BLKmode)
1092 {
1093 rtx inner =
1094 rtl_hooks.gen_lowpart_no_emit (tmode, XEXP (XEXP (op, 0), 0));
1095 return simplify_gen_unary (ZERO_EXTEND, mode, inner, tmode);
1096 }
1097 }
1098
1038 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) 1099 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1039 /* As we do not know which address space the pointer is refering to, 1100 /* As we do not know which address space the pointer is refering to,
1040 we can do this only if the target does not support different pointer 1101 we can do this only if the target does not support different pointer
1041 or address modes depending on the address space. */ 1102 or address modes depending on the address space. */
1042 if (target_default_pointer_address_modes_p () 1103 if (target_default_pointer_address_modes_p ()
1194 case ABS: 1255 case ABS:
1195 val = (arg0 >= 0 ? arg0 : - arg0); 1256 val = (arg0 >= 0 ? arg0 : - arg0);
1196 break; 1257 break;
1197 1258
1198 case FFS: 1259 case FFS:
1199 /* Don't use ffs here. Instead, get low order bit and then its
1200 number. If arg0 is zero, this will return 0, as desired. */
1201 arg0 &= GET_MODE_MASK (mode); 1260 arg0 &= GET_MODE_MASK (mode);
1202 val = exact_log2 (arg0 & (- arg0)) + 1; 1261 val = ffs_hwi (arg0);
1203 break; 1262 break;
1204 1263
1205 case CLZ: 1264 case CLZ:
1206 arg0 &= GET_MODE_MASK (mode); 1265 arg0 &= GET_MODE_MASK (mode);
1207 if (arg0 == 0 && CLZ_DEFINED_VALUE_AT_ZERO (mode, val)) 1266 if (arg0 == 0 && CLZ_DEFINED_VALUE_AT_ZERO (mode, val))
1218 up with some replacement. Seems good enough. */ 1277 up with some replacement. Seems good enough. */
1219 if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, val)) 1278 if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, val))
1220 val = GET_MODE_BITSIZE (mode); 1279 val = GET_MODE_BITSIZE (mode);
1221 } 1280 }
1222 else 1281 else
1223 val = exact_log2 (arg0 & -arg0); 1282 val = ctz_hwi (arg0);
1224 break; 1283 break;
1225 1284
1226 case POPCOUNT: 1285 case POPCOUNT:
1227 arg0 &= GET_MODE_MASK (mode); 1286 arg0 &= GET_MODE_MASK (mode);
1228 val = 0; 1287 val = 0;
1268 and sign-extension. */ 1327 and sign-extension. */
1269 gcc_assert (width == GET_MODE_BITSIZE (op_mode)); 1328 gcc_assert (width == GET_MODE_BITSIZE (op_mode));
1270 val = arg0; 1329 val = arg0;
1271 } 1330 }
1272 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT) 1331 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
1273 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode)); 1332 val = arg0 & ~((unsigned HOST_WIDE_INT) (-1)
1333 << GET_MODE_BITSIZE (op_mode));
1274 else 1334 else
1275 return 0; 1335 return 0;
1276 break; 1336 break;
1277 1337
1278 case SIGN_EXTEND: 1338 case SIGN_EXTEND:
1287 val = arg0; 1347 val = arg0;
1288 } 1348 }
1289 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT) 1349 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
1290 { 1350 {
1291 val 1351 val
1292 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode)); 1352 = arg0 & ~((unsigned HOST_WIDE_INT) (-1)
1293 if (val 1353 << GET_MODE_BITSIZE (op_mode));
1294 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1))) 1354 if (val & ((unsigned HOST_WIDE_INT) 1
1295 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode); 1355 << (GET_MODE_BITSIZE (op_mode) - 1)))
1356 val
1357 -= (unsigned HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
1296 } 1358 }
1297 else 1359 else
1298 return 0; 1360 return 0;
1299 break; 1361 break;
1300 1362
1348 lv = l1, hv = h1; 1410 lv = l1, hv = h1;
1349 break; 1411 break;
1350 1412
1351 case FFS: 1413 case FFS:
1352 hv = 0; 1414 hv = 0;
1353 if (l1 == 0) 1415 if (l1 != 0)
1354 { 1416 lv = ffs_hwi (l1);
1355 if (h1 == 0) 1417 else if (h1 != 0)
1356 lv = 0; 1418 lv = HOST_BITS_PER_WIDE_INT + ffs_hwi (h1);
1357 else
1358 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & -h1) + 1;
1359 }
1360 else 1419 else
1361 lv = exact_log2 (l1 & -l1) + 1; 1420 lv = 0;
1362 break; 1421 break;
1363 1422
1364 case CLZ: 1423 case CLZ:
1365 hv = 0; 1424 hv = 0;
1366 if (h1 != 0) 1425 if (h1 != 0)
1373 break; 1432 break;
1374 1433
1375 case CTZ: 1434 case CTZ:
1376 hv = 0; 1435 hv = 0;
1377 if (l1 != 0) 1436 if (l1 != 0)
1378 lv = exact_log2 (l1 & -l1); 1437 lv = ctz_hwi (l1);
1379 else if (h1 != 0) 1438 else if (h1 != 0)
1380 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & -h1); 1439 lv = HOST_BITS_PER_WIDE_INT + ctz_hwi (h1);
1381 else if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, lv)) 1440 else if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, lv))
1382 lv = GET_MODE_BITSIZE (mode); 1441 lv = GET_MODE_BITSIZE (mode);
1383 break; 1442 break;
1384 1443
1385 case POPCOUNT: 1444 case POPCOUNT:
1446 return 0; 1505 return 0;
1447 else 1506 else
1448 { 1507 {
1449 lv = l1 & GET_MODE_MASK (op_mode); 1508 lv = l1 & GET_MODE_MASK (op_mode);
1450 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT 1509 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
1451 && (lv & ((HOST_WIDE_INT) 1 1510 && (lv & ((unsigned HOST_WIDE_INT) 1
1452 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0) 1511 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
1453 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode); 1512 lv -= (unsigned HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
1454 1513
1455 hv = HWI_SIGN_EXTEND (lv); 1514 hv = HWI_SIGN_EXTEND (lv);
1456 } 1515 }
1457 break; 1516 break;
1458 1517
1554 } 1613 }
1555 1614
1556 /* Test against the signed lower bound. */ 1615 /* Test against the signed lower bound. */
1557 if (width > HOST_BITS_PER_WIDE_INT) 1616 if (width > HOST_BITS_PER_WIDE_INT)
1558 { 1617 {
1559 th = (HOST_WIDE_INT) -1 << (width - HOST_BITS_PER_WIDE_INT - 1); 1618 th = (unsigned HOST_WIDE_INT) (-1)
1619 << (width - HOST_BITS_PER_WIDE_INT - 1);
1560 tl = 0; 1620 tl = 0;
1561 } 1621 }
1562 else 1622 else
1563 { 1623 {
1564 th = -1; 1624 th = -1;
1565 tl = (HOST_WIDE_INT) -1 << (width - 1); 1625 tl = (unsigned HOST_WIDE_INT) (-1) << (width - 1);
1566 } 1626 }
1567 real_from_integer (&t, VOIDmode, tl, th, 0); 1627 real_from_integer (&t, VOIDmode, tl, th, 0);
1568 if (REAL_VALUES_LESS (x, t)) 1628 if (REAL_VALUES_LESS (x, t))
1569 { 1629 {
1570 xh = th; 1630 xh = th;
2106 2166
2107 case MULT: 2167 case MULT:
2108 if (trueop1 == constm1_rtx) 2168 if (trueop1 == constm1_rtx)
2109 return simplify_gen_unary (NEG, mode, op0, mode); 2169 return simplify_gen_unary (NEG, mode, op0, mode);
2110 2170
2171 if (GET_CODE (op0) == NEG)
2172 {
2173 rtx temp = simplify_unary_operation (NEG, mode, op1, mode);
2174 if (temp)
2175 return simplify_gen_binary (MULT, mode, XEXP (op0, 0), temp);
2176 }
2177 if (GET_CODE (op1) == NEG)
2178 {
2179 rtx temp = simplify_unary_operation (NEG, mode, op0, mode);
2180 if (temp)
2181 return simplify_gen_binary (MULT, mode, temp, XEXP (op1, 0));
2182 }
2183
2111 /* Maybe simplify x * 0 to 0. The reduction is not valid if 2184 /* Maybe simplify x * 0 to 0. The reduction is not valid if
2112 x is NaN, since x * 0 is then also NaN. Nor is it valid 2185 x is NaN, since x * 0 is then also NaN. Nor is it valid
2113 when the mode has signed zeros, since multiplying a negative 2186 when the mode has signed zeros, since multiplying a negative
2114 number by 0 will give -0, not 0. */ 2187 number by 0 will give -0, not 0. */
2115 if (!HONOR_NANS (mode) 2188 if (!HONOR_NANS (mode)
2125 return op0; 2198 return op0;
2126 2199
2127 /* Convert multiply by constant power of two into shift unless 2200 /* Convert multiply by constant power of two into shift unless
2128 we are still generating RTL. This test is a kludge. */ 2201 we are still generating RTL. This test is a kludge. */
2129 if (CONST_INT_P (trueop1) 2202 if (CONST_INT_P (trueop1)
2130 && (val = exact_log2 (INTVAL (trueop1))) >= 0 2203 && (val = exact_log2 (UINTVAL (trueop1))) >= 0
2131 /* If the mode is larger than the host word size, and the 2204 /* If the mode is larger than the host word size, and the
2132 uppermost bit is set, then this isn't a power of two due 2205 uppermost bit is set, then this isn't a power of two due
2133 to implicit sign extension. */ 2206 to implicit sign extension. */
2134 && (width <= HOST_BITS_PER_WIDE_INT 2207 && (width <= HOST_BITS_PER_WIDE_INT
2135 || val != HOST_BITS_PER_WIDE_INT - 1)) 2208 || val != HOST_BITS_PER_WIDE_INT - 1))
2188 return tem; 2261 return tem;
2189 } 2262 }
2190 break; 2263 break;
2191 2264
2192 case IOR: 2265 case IOR:
2193 if (trueop1 == const0_rtx) 2266 if (trueop1 == CONST0_RTX (mode))
2194 return op0; 2267 return op0;
2195 if (CONST_INT_P (trueop1) 2268 if (CONST_INT_P (trueop1)
2196 && ((INTVAL (trueop1) & GET_MODE_MASK (mode)) 2269 && ((UINTVAL (trueop1) & GET_MODE_MASK (mode))
2197 == GET_MODE_MASK (mode))) 2270 == GET_MODE_MASK (mode)))
2198 return op1; 2271 return op1;
2199 if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)) 2272 if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2200 return op0; 2273 return op0;
2201 /* A | (~A) -> -1 */ 2274 /* A | (~A) -> -1 */
2206 return constm1_rtx; 2279 return constm1_rtx;
2207 2280
2208 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */ 2281 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
2209 if (CONST_INT_P (op1) 2282 if (CONST_INT_P (op1)
2210 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT 2283 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2211 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0) 2284 && (nonzero_bits (op0, mode) & ~UINTVAL (op1)) == 0)
2212 return op1; 2285 return op1;
2213 2286
2214 /* Canonicalize (X & C1) | C2. */ 2287 /* Canonicalize (X & C1) | C2. */
2215 if (GET_CODE (op0) == AND 2288 if (GET_CODE (op0) == AND
2216 && CONST_INT_P (trueop1) 2289 && CONST_INT_P (trueop1)
2295 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT 2368 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2296 || INTVAL (op1) > 0) 2369 || INTVAL (op1) > 0)
2297 && GET_CODE (op0) == AND 2370 && GET_CODE (op0) == AND
2298 && CONST_INT_P (XEXP (op0, 1)) 2371 && CONST_INT_P (XEXP (op0, 1))
2299 && CONST_INT_P (op1) 2372 && CONST_INT_P (op1)
2300 && (INTVAL (XEXP (op0, 1)) & INTVAL (op1)) != 0) 2373 && (UINTVAL (XEXP (op0, 1)) & UINTVAL (op1)) != 0)
2301 return simplify_gen_binary (IOR, mode, 2374 return simplify_gen_binary (IOR, mode,
2302 simplify_gen_binary 2375 simplify_gen_binary
2303 (AND, mode, XEXP (op0, 0), 2376 (AND, mode, XEXP (op0, 0),
2304 GEN_INT (INTVAL (XEXP (op0, 1)) 2377 GEN_INT (UINTVAL (XEXP (op0, 1))
2305 & ~INTVAL (op1))), 2378 & ~UINTVAL (op1))),
2306 op1); 2379 op1);
2307 2380
2308 /* If OP0 is (ashiftrt (plus ...) C), it might actually be 2381 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
2309 a (sign_extend (plus ...)). Then check if OP1 is a CONST_INT and 2382 a (sign_extend (plus ...)). Then check if OP1 is a CONST_INT and
2310 the PLUS does not affect any of the bits in OP1: then we can do 2383 the PLUS does not affect any of the bits in OP1: then we can do
2330 if (tem) 2403 if (tem)
2331 return tem; 2404 return tem;
2332 break; 2405 break;
2333 2406
2334 case XOR: 2407 case XOR:
2335 if (trueop1 == const0_rtx) 2408 if (trueop1 == CONST0_RTX (mode))
2336 return op0; 2409 return op0;
2337 if (CONST_INT_P (trueop1) 2410 if (CONST_INT_P (trueop1)
2338 && ((INTVAL (trueop1) & GET_MODE_MASK (mode)) 2411 && ((UINTVAL (trueop1) & GET_MODE_MASK (mode))
2339 == GET_MODE_MASK (mode))) 2412 == GET_MODE_MASK (mode)))
2340 return simplify_gen_unary (NOT, mode, op0, mode); 2413 return simplify_gen_unary (NOT, mode, op0, mode);
2341 if (rtx_equal_p (trueop0, trueop1) 2414 if (rtx_equal_p (trueop0, trueop1)
2342 && ! side_effects_p (op0) 2415 && ! side_effects_p (op0)
2343 && GET_MODE_CLASS (mode) != MODE_CC) 2416 && GET_MODE_CLASS (mode) != MODE_CC)
2477 if ((GET_CODE (op0) == SIGN_EXTEND 2550 if ((GET_CODE (op0) == SIGN_EXTEND
2478 || GET_CODE (op0) == ZERO_EXTEND) 2551 || GET_CODE (op0) == ZERO_EXTEND)
2479 && CONST_INT_P (trueop1) 2552 && CONST_INT_P (trueop1)
2480 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT 2553 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2481 && (~GET_MODE_MASK (GET_MODE (XEXP (op0, 0))) 2554 && (~GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))
2482 & INTVAL (trueop1)) == 0) 2555 & UINTVAL (trueop1)) == 0)
2483 { 2556 {
2484 enum machine_mode imode = GET_MODE (XEXP (op0, 0)); 2557 enum machine_mode imode = GET_MODE (XEXP (op0, 0));
2485 tem = simplify_gen_binary (AND, imode, XEXP (op0, 0), 2558 tem = simplify_gen_binary (AND, imode, XEXP (op0, 0),
2486 gen_int_mode (INTVAL (trueop1), 2559 gen_int_mode (INTVAL (trueop1),
2487 imode)); 2560 imode));
2558 and for - instead of + and/or ^ instead of |. 2631 and for - instead of + and/or ^ instead of |.
2559 Also, if (N & M) == 0, then 2632 Also, if (N & M) == 0, then
2560 (A +- N) & M -> A & M. */ 2633 (A +- N) & M -> A & M. */
2561 if (CONST_INT_P (trueop1) 2634 if (CONST_INT_P (trueop1)
2562 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT 2635 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2563 && ~INTVAL (trueop1) 2636 && ~UINTVAL (trueop1)
2564 && (INTVAL (trueop1) & (INTVAL (trueop1) + 1)) == 0 2637 && (UINTVAL (trueop1) & (UINTVAL (trueop1) + 1)) == 0
2565 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS)) 2638 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS))
2566 { 2639 {
2567 rtx pmop[2]; 2640 rtx pmop[2];
2568 int which; 2641 int which;
2569 2642
2570 pmop[0] = XEXP (op0, 0); 2643 pmop[0] = XEXP (op0, 0);
2571 pmop[1] = XEXP (op0, 1); 2644 pmop[1] = XEXP (op0, 1);
2572 2645
2573 if (CONST_INT_P (pmop[1]) 2646 if (CONST_INT_P (pmop[1])
2574 && (INTVAL (pmop[1]) & INTVAL (trueop1)) == 0) 2647 && (UINTVAL (pmop[1]) & UINTVAL (trueop1)) == 0)
2575 return simplify_gen_binary (AND, mode, pmop[0], op1); 2648 return simplify_gen_binary (AND, mode, pmop[0], op1);
2576 2649
2577 for (which = 0; which < 2; which++) 2650 for (which = 0; which < 2; which++)
2578 { 2651 {
2579 tem = pmop[which]; 2652 tem = pmop[which];
2580 switch (GET_CODE (tem)) 2653 switch (GET_CODE (tem))
2581 { 2654 {
2582 case AND: 2655 case AND:
2583 if (CONST_INT_P (XEXP (tem, 1)) 2656 if (CONST_INT_P (XEXP (tem, 1))
2584 && (INTVAL (XEXP (tem, 1)) & INTVAL (trueop1)) 2657 && (UINTVAL (XEXP (tem, 1)) & UINTVAL (trueop1))
2585 == INTVAL (trueop1)) 2658 == UINTVAL (trueop1))
2586 pmop[which] = XEXP (tem, 0); 2659 pmop[which] = XEXP (tem, 0);
2587 break; 2660 break;
2588 case IOR: 2661 case IOR:
2589 case XOR: 2662 case XOR:
2590 if (CONST_INT_P (XEXP (tem, 1)) 2663 if (CONST_INT_P (XEXP (tem, 1))
2591 && (INTVAL (XEXP (tem, 1)) & INTVAL (trueop1)) == 0) 2664 && (UINTVAL (XEXP (tem, 1)) & UINTVAL (trueop1)) == 0)
2592 pmop[which] = XEXP (tem, 0); 2665 pmop[which] = XEXP (tem, 0);
2593 break; 2666 break;
2594 default: 2667 default:
2595 break; 2668 break;
2596 } 2669 }
2632 /* x/1 is x. */ 2705 /* x/1 is x. */
2633 if (trueop1 == CONST1_RTX (mode)) 2706 if (trueop1 == CONST1_RTX (mode))
2634 return rtl_hooks.gen_lowpart_no_emit (mode, op0); 2707 return rtl_hooks.gen_lowpart_no_emit (mode, op0);
2635 /* Convert divide by power of two into shift. */ 2708 /* Convert divide by power of two into shift. */
2636 if (CONST_INT_P (trueop1) 2709 if (CONST_INT_P (trueop1)
2637 && (val = exact_log2 (INTVAL (trueop1))) > 0) 2710 && (val = exact_log2 (UINTVAL (trueop1))) > 0)
2638 return simplify_gen_binary (LSHIFTRT, mode, op0, GEN_INT (val)); 2711 return simplify_gen_binary (LSHIFTRT, mode, op0, GEN_INT (val));
2639 break; 2712 break;
2640 2713
2641 case DIV: 2714 case DIV:
2642 /* Handle floating point and integers separately. */ 2715 /* Handle floating point and integers separately. */
2679 } 2752 }
2680 } 2753 }
2681 else 2754 else
2682 { 2755 {
2683 /* 0/x is 0 (or x&0 if x has side-effects). */ 2756 /* 0/x is 0 (or x&0 if x has side-effects). */
2684 if (trueop0 == CONST0_RTX (mode)) 2757 if (trueop0 == CONST0_RTX (mode)
2758 && !cfun->can_throw_non_call_exceptions)
2685 { 2759 {
2686 if (side_effects_p (op1)) 2760 if (side_effects_p (op1))
2687 return simplify_gen_binary (AND, mode, op1, trueop0); 2761 return simplify_gen_binary (AND, mode, op1, trueop0);
2688 return trueop0; 2762 return trueop0;
2689 } 2763 }
2714 return simplify_gen_binary (AND, mode, op0, CONST0_RTX (mode)); 2788 return simplify_gen_binary (AND, mode, op0, CONST0_RTX (mode));
2715 return CONST0_RTX (mode); 2789 return CONST0_RTX (mode);
2716 } 2790 }
2717 /* Implement modulus by power of two as AND. */ 2791 /* Implement modulus by power of two as AND. */
2718 if (CONST_INT_P (trueop1) 2792 if (CONST_INT_P (trueop1)
2719 && exact_log2 (INTVAL (trueop1)) > 0) 2793 && exact_log2 (UINTVAL (trueop1)) > 0)
2720 return simplify_gen_binary (AND, mode, op0, 2794 return simplify_gen_binary (AND, mode, op0,
2721 GEN_INT (INTVAL (op1) - 1)); 2795 GEN_INT (INTVAL (op1) - 1));
2722 break; 2796 break;
2723 2797
2724 case MOD: 2798 case MOD:
2745 return op0; 2819 return op0;
2746 if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1)) 2820 if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1))
2747 return op0; 2821 return op0;
2748 /* Rotating ~0 always results in ~0. */ 2822 /* Rotating ~0 always results in ~0. */
2749 if (CONST_INT_P (trueop0) && width <= HOST_BITS_PER_WIDE_INT 2823 if (CONST_INT_P (trueop0) && width <= HOST_BITS_PER_WIDE_INT
2750 && (unsigned HOST_WIDE_INT) INTVAL (trueop0) == GET_MODE_MASK (mode) 2824 && UINTVAL (trueop0) == GET_MODE_MASK (mode)
2751 && ! side_effects_p (op1)) 2825 && ! side_effects_p (op1))
2752 return op0; 2826 return op0;
2753 canonicalize_shift: 2827 canonicalize_shift:
2754 if (SHIFT_COUNT_TRUNCATED && CONST_INT_P (op1)) 2828 if (SHIFT_COUNT_TRUNCATED && CONST_INT_P (op1))
2755 { 2829 {
2791 goto canonicalize_shift; 2865 goto canonicalize_shift;
2792 2866
2793 case SMIN: 2867 case SMIN:
2794 if (width <= HOST_BITS_PER_WIDE_INT 2868 if (width <= HOST_BITS_PER_WIDE_INT
2795 && CONST_INT_P (trueop1) 2869 && CONST_INT_P (trueop1)
2796 && INTVAL (trueop1) == (HOST_WIDE_INT) 1 << (width -1) 2870 && UINTVAL (trueop1) == (unsigned HOST_WIDE_INT) 1 << (width -1)
2797 && ! side_effects_p (op0)) 2871 && ! side_effects_p (op0))
2798 return op1; 2872 return op1;
2799 if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)) 2873 if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2800 return op0; 2874 return op0;
2801 tem = simplify_associative_operation (code, mode, op0, op1); 2875 tem = simplify_associative_operation (code, mode, op0, op1);
2804 break; 2878 break;
2805 2879
2806 case SMAX: 2880 case SMAX:
2807 if (width <= HOST_BITS_PER_WIDE_INT 2881 if (width <= HOST_BITS_PER_WIDE_INT
2808 && CONST_INT_P (trueop1) 2882 && CONST_INT_P (trueop1)
2809 && ((unsigned HOST_WIDE_INT) INTVAL (trueop1) 2883 && (UINTVAL (trueop1) == GET_MODE_MASK (mode) >> 1)
2810 == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
2811 && ! side_effects_p (op0)) 2884 && ! side_effects_p (op0))
2812 return op1; 2885 return op1;
2813 if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)) 2886 if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
2814 return op0; 2887 return op0;
2815 tem = simplify_associative_operation (code, mode, op0, op1); 2888 tem = simplify_associative_operation (code, mode, op0, op1);
3266 } 3339 }
3267 } 3340 }
3268 3341
3269 /* We can fold some multi-word operations. */ 3342 /* We can fold some multi-word operations. */
3270 if (GET_MODE_CLASS (mode) == MODE_INT 3343 if (GET_MODE_CLASS (mode) == MODE_INT
3271 && width == HOST_BITS_PER_WIDE_INT * 2 3344 && width == HOST_BITS_PER_DOUBLE_INT
3272 && (GET_CODE (op0) == CONST_DOUBLE || CONST_INT_P (op0)) 3345 && (CONST_DOUBLE_P (op0) || CONST_INT_P (op0))
3273 && (GET_CODE (op1) == CONST_DOUBLE || CONST_INT_P (op1))) 3346 && (CONST_DOUBLE_P (op1) || CONST_INT_P (op1)))
3274 { 3347 {
3275 unsigned HOST_WIDE_INT l1, l2, lv, lt; 3348 double_int o0, o1, res, tmp;
3276 HOST_WIDE_INT h1, h2, hv, ht; 3349
3277 3350 o0 = rtx_to_double_int (op0);
3278 if (GET_CODE (op0) == CONST_DOUBLE) 3351 o1 = rtx_to_double_int (op1);
3279 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3280 else
3281 l1 = INTVAL (op0), h1 = HWI_SIGN_EXTEND (l1);
3282
3283 if (GET_CODE (op1) == CONST_DOUBLE)
3284 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3285 else
3286 l2 = INTVAL (op1), h2 = HWI_SIGN_EXTEND (l2);
3287 3352
3288 switch (code) 3353 switch (code)
3289 { 3354 {
3290 case MINUS: 3355 case MINUS:
3291 /* A - B == A + (-B). */ 3356 /* A - B == A + (-B). */
3292 neg_double (l2, h2, &lv, &hv); 3357 o1 = double_int_neg (o1);
3293 l2 = lv, h2 = hv;
3294 3358
3295 /* Fall through.... */ 3359 /* Fall through.... */
3296 3360
3297 case PLUS: 3361 case PLUS:
3298 add_double (l1, h1, l2, h2, &lv, &hv); 3362 res = double_int_add (o0, o1);
3299 break; 3363 break;
3300 3364
3301 case MULT: 3365 case MULT:
3302 mul_double (l1, h1, l2, h2, &lv, &hv); 3366 res = double_int_mul (o0, o1);
3303 break; 3367 break;
3304 3368
3305 case DIV: 3369 case DIV:
3306 if (div_and_round_double (TRUNC_DIV_EXPR, 0, l1, h1, l2, h2, 3370 if (div_and_round_double (TRUNC_DIV_EXPR, 0,
3307 &lv, &hv, &lt, &ht)) 3371 o0.low, o0.high, o1.low, o1.high,
3372 &res.low, &res.high,
3373 &tmp.low, &tmp.high))
3308 return 0; 3374 return 0;
3309 break; 3375 break;
3310 3376
3311 case MOD: 3377 case MOD:
3312 if (div_and_round_double (TRUNC_DIV_EXPR, 0, l1, h1, l2, h2, 3378 if (div_and_round_double (TRUNC_DIV_EXPR, 0,
3313 &lt, &ht, &lv, &hv)) 3379 o0.low, o0.high, o1.low, o1.high,
3380 &tmp.low, &tmp.high,
3381 &res.low, &res.high))
3314 return 0; 3382 return 0;
3315 break; 3383 break;
3316 3384
3317 case UDIV: 3385 case UDIV:
3318 if (div_and_round_double (TRUNC_DIV_EXPR, 1, l1, h1, l2, h2, 3386 if (div_and_round_double (TRUNC_DIV_EXPR, 1,
3319 &lv, &hv, &lt, &ht)) 3387 o0.low, o0.high, o1.low, o1.high,
3388 &res.low, &res.high,
3389 &tmp.low, &tmp.high))
3320 return 0; 3390 return 0;
3321 break; 3391 break;
3322 3392
3323 case UMOD: 3393 case UMOD:
3324 if (div_and_round_double (TRUNC_DIV_EXPR, 1, l1, h1, l2, h2, 3394 if (div_and_round_double (TRUNC_DIV_EXPR, 1,
3325 &lt, &ht, &lv, &hv)) 3395 o0.low, o0.high, o1.low, o1.high,
3396 &tmp.low, &tmp.high,
3397 &res.low, &res.high))
3326 return 0; 3398 return 0;
3327 break; 3399 break;
3328 3400
3329 case AND: 3401 case AND:
3330 lv = l1 & l2, hv = h1 & h2; 3402 res = double_int_and (o0, o1);
3331 break; 3403 break;
3332 3404
3333 case IOR: 3405 case IOR:
3334 lv = l1 | l2, hv = h1 | h2; 3406 res = double_int_ior (o0, o1);
3335 break; 3407 break;
3336 3408
3337 case XOR: 3409 case XOR:
3338 lv = l1 ^ l2, hv = h1 ^ h2; 3410 res = double_int_xor (o0, o1);
3339 break; 3411 break;
3340 3412
3341 case SMIN: 3413 case SMIN:
3342 if (h1 < h2 3414 res = double_int_smin (o0, o1);
3343 || (h1 == h2
3344 && ((unsigned HOST_WIDE_INT) l1
3345 < (unsigned HOST_WIDE_INT) l2)))
3346 lv = l1, hv = h1;
3347 else
3348 lv = l2, hv = h2;
3349 break; 3415 break;
3350 3416
3351 case SMAX: 3417 case SMAX:
3352 if (h1 > h2 3418 res = double_int_smax (o0, o1);
3353 || (h1 == h2
3354 && ((unsigned HOST_WIDE_INT) l1
3355 > (unsigned HOST_WIDE_INT) l2)))
3356 lv = l1, hv = h1;
3357 else
3358 lv = l2, hv = h2;
3359 break; 3419 break;
3360 3420
3361 case UMIN: 3421 case UMIN:
3362 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2 3422 res = double_int_umin (o0, o1);
3363 || (h1 == h2
3364 && ((unsigned HOST_WIDE_INT) l1
3365 < (unsigned HOST_WIDE_INT) l2)))
3366 lv = l1, hv = h1;
3367 else
3368 lv = l2, hv = h2;
3369 break; 3423 break;
3370 3424
3371 case UMAX: 3425 case UMAX:
3372 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2 3426 res = double_int_umax (o0, o1);
3373 || (h1 == h2
3374 && ((unsigned HOST_WIDE_INT) l1
3375 > (unsigned HOST_WIDE_INT) l2)))
3376 lv = l1, hv = h1;
3377 else
3378 lv = l2, hv = h2;
3379 break; 3427 break;
3380 3428
3381 case LSHIFTRT: case ASHIFTRT: 3429 case LSHIFTRT: case ASHIFTRT:
3382 case ASHIFT: 3430 case ASHIFT:
3383 case ROTATE: case ROTATERT: 3431 case ROTATE: case ROTATERT:
3384 if (SHIFT_COUNT_TRUNCATED) 3432 {
3385 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0; 3433 unsigned HOST_WIDE_INT cnt;
3386 3434
3387 if (h2 != 0 || l2 >= GET_MODE_BITSIZE (mode)) 3435 if (SHIFT_COUNT_TRUNCATED)
3388 return 0; 3436 o1 = double_int_zext (o1, GET_MODE_BITSIZE (mode));
3389 3437
3390 if (code == LSHIFTRT || code == ASHIFTRT) 3438 if (!double_int_fits_in_uhwi_p (o1)
3391 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 3439 || double_int_to_uhwi (o1) >= GET_MODE_BITSIZE (mode))
3392 code == ASHIFTRT); 3440 return 0;
3393 else if (code == ASHIFT) 3441
3394 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1); 3442 cnt = double_int_to_uhwi (o1);
3395 else if (code == ROTATE) 3443
3396 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv); 3444 if (code == LSHIFTRT || code == ASHIFTRT)
3397 else /* code == ROTATERT */ 3445 res = double_int_rshift (o0, cnt, GET_MODE_BITSIZE (mode),
3398 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv); 3446 code == ASHIFTRT);
3447 else if (code == ASHIFT)
3448 res = double_int_lshift (o0, cnt, GET_MODE_BITSIZE (mode),
3449 true);
3450 else if (code == ROTATE)
3451 res = double_int_lrotate (o0, cnt, GET_MODE_BITSIZE (mode));
3452 else /* code == ROTATERT */
3453 res = double_int_rrotate (o0, cnt, GET_MODE_BITSIZE (mode));
3454 }
3399 break; 3455 break;
3400 3456
3401 default: 3457 default:
3402 return 0; 3458 return 0;
3403 } 3459 }
3404 3460
3405 return immed_double_const (lv, hv, mode); 3461 return immed_double_int_const (res, mode);
3406 } 3462 }
3407 3463
3408 if (CONST_INT_P (op0) && CONST_INT_P (op1) 3464 if (CONST_INT_P (op0) && CONST_INT_P (op1)
3409 && width <= HOST_BITS_PER_WIDE_INT && width != 0) 3465 && width <= HOST_BITS_PER_WIDE_INT && width != 0)
3410 { 3466 {
3414 arg0 = INTVAL (op0); 3470 arg0 = INTVAL (op0);
3415 arg1 = INTVAL (op1); 3471 arg1 = INTVAL (op1);
3416 3472
3417 if (width < HOST_BITS_PER_WIDE_INT) 3473 if (width < HOST_BITS_PER_WIDE_INT)
3418 { 3474 {
3419 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1; 3475 arg0 &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
3420 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1; 3476 arg1 &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
3421 3477
3422 arg0s = arg0; 3478 arg0s = arg0;
3423 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1))) 3479 if (arg0s & ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
3424 arg0s |= ((HOST_WIDE_INT) (-1) << width); 3480 arg0s |= ((unsigned HOST_WIDE_INT) (-1) << width);
3425 3481
3426 arg1s = arg1; 3482 arg1s = arg1;
3427 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1))) 3483 if (arg1s & ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
3428 arg1s |= ((HOST_WIDE_INT) (-1) << width); 3484 arg1s |= ((unsigned HOST_WIDE_INT) (-1) << width);
3429 } 3485 }
3430 else 3486 else
3431 { 3487 {
3432 arg0s = arg0; 3488 arg0s = arg0;
3433 arg1s = arg1; 3489 arg1s = arg1;
3449 val = arg0s * arg1s; 3505 val = arg0s * arg1s;
3450 break; 3506 break;
3451 3507
3452 case DIV: 3508 case DIV:
3453 if (arg1s == 0 3509 if (arg1s == 0
3454 || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1) 3510 || ((unsigned HOST_WIDE_INT) arg0s
3511 == (unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
3455 && arg1s == -1)) 3512 && arg1s == -1))
3456 return 0; 3513 return 0;
3457 val = arg0s / arg1s; 3514 val = arg0s / arg1s;
3458 break; 3515 break;
3459 3516
3460 case MOD: 3517 case MOD:
3461 if (arg1s == 0 3518 if (arg1s == 0
3462 || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1) 3519 || ((unsigned HOST_WIDE_INT) arg0s
3520 == (unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
3463 && arg1s == -1)) 3521 && arg1s == -1))
3464 return 0; 3522 return 0;
3465 val = arg0s % arg1s; 3523 val = arg0s % arg1s;
3466 break; 3524 break;
3467 3525
3468 case UDIV: 3526 case UDIV:
3469 if (arg1 == 0 3527 if (arg1 == 0
3470 || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1) 3528 || ((unsigned HOST_WIDE_INT) arg0s
3529 == (unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
3471 && arg1s == -1)) 3530 && arg1s == -1))
3472 return 0; 3531 return 0;
3473 val = (unsigned HOST_WIDE_INT) arg0 / arg1; 3532 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
3474 break; 3533 break;
3475 3534
3476 case UMOD: 3535 case UMOD:
3477 if (arg1 == 0 3536 if (arg1 == 0
3478 || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1) 3537 || ((unsigned HOST_WIDE_INT) arg0s
3538 == (unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
3479 && arg1s == -1)) 3539 && arg1s == -1))
3480 return 0; 3540 return 0;
3481 val = (unsigned HOST_WIDE_INT) arg0 % arg1; 3541 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
3482 break; 3542 break;
3483 3543
3512 ? ((unsigned HOST_WIDE_INT) arg0) << arg1 3572 ? ((unsigned HOST_WIDE_INT) arg0) << arg1
3513 : ((unsigned HOST_WIDE_INT) arg0) >> arg1); 3573 : ((unsigned HOST_WIDE_INT) arg0) >> arg1);
3514 3574
3515 /* Sign-extend the result for arithmetic right shifts. */ 3575 /* Sign-extend the result for arithmetic right shifts. */
3516 if (code == ASHIFTRT && arg0s < 0 && arg1 > 0) 3576 if (code == ASHIFTRT && arg0s < 0 && arg1 > 0)
3517 val |= ((HOST_WIDE_INT) -1) << (width - arg1); 3577 val |= ((unsigned HOST_WIDE_INT) (-1)) << (width - arg1);
3518 break; 3578 break;
3519 3579
3520 case ROTATERT: 3580 case ROTATERT:
3521 if (arg1 < 0) 3581 if (arg1 < 0)
3522 return 0; 3582 return 0;
4392 4452
4393 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT, 4453 /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4394 we have to sign or zero-extend the values. */ 4454 we have to sign or zero-extend the values. */
4395 if (width != 0 && width < HOST_BITS_PER_WIDE_INT) 4455 if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4396 { 4456 {
4397 l0u &= ((HOST_WIDE_INT) 1 << width) - 1; 4457 l0u &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
4398 l1u &= ((HOST_WIDE_INT) 1 << width) - 1; 4458 l1u &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
4399 4459
4400 if (l0s & ((HOST_WIDE_INT) 1 << (width - 1))) 4460 if (l0s & ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
4401 l0s |= ((HOST_WIDE_INT) (-1) << width); 4461 l0s |= ((unsigned HOST_WIDE_INT) (-1) << width);
4402 4462
4403 if (l1s & ((HOST_WIDE_INT) 1 << (width - 1))) 4463 if (l1s & ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
4404 l1s |= ((HOST_WIDE_INT) (-1) << width); 4464 l1s |= ((unsigned HOST_WIDE_INT) (-1) << width);
4405 } 4465 }
4406 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT) 4466 if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4407 h0u = h1u = 0, h0s = HWI_SIGN_EXTEND (l0s), h1s = HWI_SIGN_EXTEND (l1s); 4467 h0u = h1u = 0, h0s = HWI_SIGN_EXTEND (l0s), h1s = HWI_SIGN_EXTEND (l1s);
4408 4468
4409 if (h0u == h1u && l0u == l1u) 4469 if (h0u == h1u && l0u == l1u)
4552 rtx inner_const = avoid_constant_pool_reference (XEXP (op0, 1)); 4612 rtx inner_const = avoid_constant_pool_reference (XEXP (op0, 1));
4553 if (CONST_INT_P (inner_const) && inner_const != const0_rtx) 4613 if (CONST_INT_P (inner_const) && inner_const != const0_rtx)
4554 { 4614 {
4555 int sign_bitnum = GET_MODE_BITSIZE (mode) - 1; 4615 int sign_bitnum = GET_MODE_BITSIZE (mode) - 1;
4556 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum 4616 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4557 && (INTVAL (inner_const) 4617 && (UINTVAL (inner_const)
4558 & ((HOST_WIDE_INT) 1 << sign_bitnum))); 4618 & ((unsigned HOST_WIDE_INT) 1
4619 << sign_bitnum)));
4559 4620
4560 switch (code) 4621 switch (code)
4561 { 4622 {
4562 case EQ: 4623 case EQ:
4563 case LEU: 4624 case LEU:
4642 simplify_ternary_operation (enum rtx_code code, enum machine_mode mode, 4703 simplify_ternary_operation (enum rtx_code code, enum machine_mode mode,
4643 enum machine_mode op0_mode, rtx op0, rtx op1, 4704 enum machine_mode op0_mode, rtx op0, rtx op1,
4644 rtx op2) 4705 rtx op2)
4645 { 4706 {
4646 unsigned int width = GET_MODE_BITSIZE (mode); 4707 unsigned int width = GET_MODE_BITSIZE (mode);
4708 bool any_change = false;
4709 rtx tem;
4647 4710
4648 /* VOIDmode means "infinite" precision. */ 4711 /* VOIDmode means "infinite" precision. */
4649 if (width == 0) 4712 if (width == 0)
4650 width = HOST_BITS_PER_WIDE_INT; 4713 width = HOST_BITS_PER_WIDE_INT;
4651 4714
4652 switch (code) 4715 switch (code)
4653 { 4716 {
4717 case FMA:
4718 /* Simplify negations around the multiplication. */
4719 /* -a * -b + c => a * b + c. */
4720 if (GET_CODE (op0) == NEG)
4721 {
4722 tem = simplify_unary_operation (NEG, mode, op1, mode);
4723 if (tem)
4724 op1 = tem, op0 = XEXP (op0, 0), any_change = true;
4725 }
4726 else if (GET_CODE (op1) == NEG)
4727 {
4728 tem = simplify_unary_operation (NEG, mode, op0, mode);
4729 if (tem)
4730 op0 = tem, op1 = XEXP (op1, 0), any_change = true;
4731 }
4732
4733 /* Canonicalize the two multiplication operands. */
4734 /* a * -b + c => -b * a + c. */
4735 if (swap_commutative_operands_p (op0, op1))
4736 tem = op0, op0 = op1, op1 = tem, any_change = true;
4737
4738 if (any_change)
4739 return gen_rtx_FMA (mode, op0, op1, op2);
4740 return NULL_RTX;
4741
4654 case SIGN_EXTRACT: 4742 case SIGN_EXTRACT:
4655 case ZERO_EXTRACT: 4743 case ZERO_EXTRACT:
4656 if (CONST_INT_P (op0) 4744 if (CONST_INT_P (op0)
4657 && CONST_INT_P (op1) 4745 && CONST_INT_P (op1)
4658 && CONST_INT_P (op2) 4746 && CONST_INT_P (op2)
4659 && ((unsigned) INTVAL (op1) + (unsigned) INTVAL (op2) <= width) 4747 && ((unsigned) INTVAL (op1) + (unsigned) INTVAL (op2) <= width)
4660 && width <= (unsigned) HOST_BITS_PER_WIDE_INT) 4748 && width <= (unsigned) HOST_BITS_PER_WIDE_INT)
4661 { 4749 {
4662 /* Extracting a bit-field from a constant */ 4750 /* Extracting a bit-field from a constant */
4663 HOST_WIDE_INT val = INTVAL (op0); 4751 unsigned HOST_WIDE_INT val = UINTVAL (op0);
4664 4752
4665 if (BITS_BIG_ENDIAN) 4753 if (BITS_BIG_ENDIAN)
4666 val >>= (GET_MODE_BITSIZE (op0_mode) 4754 val >>= GET_MODE_BITSIZE (op0_mode) - INTVAL (op2) - INTVAL (op1);
4667 - INTVAL (op2) - INTVAL (op1));
4668 else 4755 else
4669 val >>= INTVAL (op2); 4756 val >>= INTVAL (op2);
4670 4757
4671 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1)) 4758 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4672 { 4759 {
4673 /* First zero-extend. */ 4760 /* First zero-extend. */
4674 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1; 4761 val &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4675 /* If desired, propagate sign bit. */ 4762 /* If desired, propagate sign bit. */
4676 if (code == SIGN_EXTRACT 4763 if (code == SIGN_EXTRACT
4677 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1)))) 4764 && (val & ((unsigned HOST_WIDE_INT) 1 << (INTVAL (op1) - 1)))
4678 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1); 4765 != 0)
4766 val |= ~ (((unsigned HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4679 } 4767 }
4680 4768
4681 /* Clear the bits that don't belong in our mode, 4769 /* Clear the bits that don't belong in our mode,
4682 unless they and our sign bit are all one. 4770 unless they and our sign bit are all one.
4683 So we get either a reasonable negative value or a reasonable 4771 So we get either a reasonable negative value or a reasonable
4684 unsigned value for this mode. */ 4772 unsigned value for this mode. */
4685 if (width < HOST_BITS_PER_WIDE_INT 4773 if (width < HOST_BITS_PER_WIDE_INT
4686 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1))) 4774 && ((val & ((unsigned HOST_WIDE_INT) (-1) << (width - 1)))
4687 != ((HOST_WIDE_INT) (-1) << (width - 1)))) 4775 != ((unsigned HOST_WIDE_INT) (-1) << (width - 1))))
4688 val &= ((HOST_WIDE_INT) 1 << width) - 1; 4776 val &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
4689 4777
4690 return gen_int_mode (val, mode); 4778 return gen_int_mode (val, mode);
4691 } 4779 }
4692 break; 4780 break;
4693 4781
5041 unsigned HOST_WIDE_INT hi = 0, lo = 0; 5129 unsigned HOST_WIDE_INT hi = 0, lo = 0;
5042 5130
5043 for (i = 0; 5131 for (i = 0;
5044 i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize; 5132 i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize;
5045 i += value_bit) 5133 i += value_bit)
5046 lo |= (HOST_WIDE_INT)(*vp++ & value_mask) << i; 5134 lo |= (unsigned HOST_WIDE_INT)(*vp++ & value_mask) << i;
5047 for (; i < elem_bitsize; i += value_bit) 5135 for (; i < elem_bitsize; i += value_bit)
5048 hi |= ((HOST_WIDE_INT)(*vp++ & value_mask) 5136 hi |= (unsigned HOST_WIDE_INT)(*vp++ & value_mask)
5049 << (i - HOST_BITS_PER_WIDE_INT)); 5137 << (i - HOST_BITS_PER_WIDE_INT);
5050 5138
5051 /* immed_double_const doesn't call trunc_int_for_mode. I don't 5139 /* immed_double_const doesn't call trunc_int_for_mode. I don't
5052 know why. */ 5140 know why. */
5053 if (elem_bitsize <= HOST_BITS_PER_WIDE_INT) 5141 if (elem_bitsize <= HOST_BITS_PER_WIDE_INT)
5054 elems[elem] = gen_int_mode (lo, outer_submode); 5142 elems[elem] = gen_int_mode (lo, outer_submode);
5097 f.mode = outer_submode; 5185 f.mode = outer_submode;
5098 5186
5099 for (i = 0; 5187 for (i = 0;
5100 i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize; 5188 i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize;
5101 i += value_bit) 5189 i += value_bit)
5102 f.data.low |= (HOST_WIDE_INT)(*vp++ & value_mask) << i; 5190 f.data.low |= (unsigned HOST_WIDE_INT)(*vp++ & value_mask) << i;
5103 for (; i < elem_bitsize; i += value_bit) 5191 for (; i < elem_bitsize; i += value_bit)
5104 f.data.high |= ((HOST_WIDE_INT)(*vp++ & value_mask) 5192 f.data.high |= ((unsigned HOST_WIDE_INT)(*vp++ & value_mask)
5105 << (i - HOST_BITS_PER_WIDE_INT)); 5193 << (i - HOST_BITS_PER_WIDE_INT));
5106 5194
5107 elems[elem] = CONST_FIXED_FROM_FIXED_VALUE (f, outer_submode); 5195 elems[elem] = CONST_FIXED_FROM_FIXED_VALUE (f, outer_submode);
5108 } 5196 }
5109 break; 5197 break;
5427 (WORDS_BIG_ENDIAN 5515 (WORDS_BIG_ENDIAN
5428 ? byte - shifted_bytes 5516 ? byte - shifted_bytes
5429 : byte + shifted_bytes)); 5517 : byte + shifted_bytes));
5430 } 5518 }
5431 5519
5520 /* If we have a lowpart SUBREG of a right shift of MEM, make a new MEM
5521 and try replacing the SUBREG and shift with it. Don't do this if
5522 the MEM has a mode-dependent address or if we would be widening it. */
5523
5524 if ((GET_CODE (op) == LSHIFTRT
5525 || GET_CODE (op) == ASHIFTRT)
5526 && MEM_P (XEXP (op, 0))
5527 && CONST_INT_P (XEXP (op, 1))
5528 && GET_MODE_SIZE (outermode) < GET_MODE_SIZE (GET_MODE (op))
5529 && (INTVAL (XEXP (op, 1)) % GET_MODE_BITSIZE (outermode)) == 0
5530 && INTVAL (XEXP (op, 1)) > 0
5531 && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (innermode)
5532 && ! mode_dependent_address_p (XEXP (XEXP (op, 0), 0))
5533 && ! MEM_VOLATILE_P (XEXP (op, 0))
5534 && byte == subreg_lowpart_offset (outermode, innermode)
5535 && (GET_MODE_SIZE (outermode) >= UNITS_PER_WORD
5536 || WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN))
5537 {
5538 int shifted_bytes = INTVAL (XEXP (op, 1)) / BITS_PER_UNIT;
5539 return adjust_address_nv (XEXP (op, 0), outermode,
5540 (WORDS_BIG_ENDIAN
5541 ? byte - shifted_bytes
5542 : byte + shifted_bytes));
5543 }
5544
5432 return NULL_RTX; 5545 return NULL_RTX;
5433 } 5546 }
5434 5547
5435 /* Make a SUBREG operation or equivalent if it folds. */ 5548 /* Make a SUBREG operation or equivalent if it folds. */
5436 5549