comparison gcc/alias.c @ 132:d34655255c78

update gcc-8.2
author mir3636
date Thu, 25 Oct 2018 10:21:07 +0900
parents 84e7813d76e9
children 1830386684a0
comparison
equal deleted inserted replaced
130:e108057fa461 132:d34655255c78
1 /* Alias analysis for GNU C 1 /* Alias analysis for GNU C
2 Copyright (C) 1997-2017 Free Software Foundation, Inc. 2 Copyright (C) 1997-2018 Free Software Foundation, Inc.
3 Contributed by John Carr (jfc@mit.edu). 3 Contributed by John Carr (jfc@mit.edu).
4 4
5 This file is part of GCC. 5 This file is part of GCC.
6 6
7 GCC is free software; you can redistribute it and/or modify it under 7 GCC is free software; you can redistribute it and/or modify it under
146 `int', `double', `float', and `struct S'. */ 146 `int', `double', `float', and `struct S'. */
147 hash_map<alias_set_hash, int> *children; 147 hash_map<alias_set_hash, int> *children;
148 }; 148 };
149 149
150 static int rtx_equal_for_memref_p (const_rtx, const_rtx); 150 static int rtx_equal_for_memref_p (const_rtx, const_rtx);
151 static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
152 static void record_set (rtx, const_rtx, void *); 151 static void record_set (rtx, const_rtx, void *);
153 static int base_alias_check (rtx, rtx, rtx, rtx, machine_mode, 152 static int base_alias_check (rtx, rtx, rtx, rtx, machine_mode,
154 machine_mode); 153 machine_mode);
155 static rtx find_base_value (rtx); 154 static rtx find_base_value (rtx);
156 static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx); 155 static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx);
328 || !MEM_SIZE_KNOWN_P (mem)) 327 || !MEM_SIZE_KNOWN_P (mem))
329 return true; 328 return true;
330 329
331 /* If MEM_OFFSET/MEM_SIZE get us outside of ref->offset/ref->max_size 330 /* If MEM_OFFSET/MEM_SIZE get us outside of ref->offset/ref->max_size
332 drop ref->ref. */ 331 drop ref->ref. */
333 if (MEM_OFFSET (mem) < 0 332 if (maybe_lt (MEM_OFFSET (mem), 0)
334 || (ref->max_size != -1 333 || (ref->max_size_known_p ()
335 && ((MEM_OFFSET (mem) + MEM_SIZE (mem)) * BITS_PER_UNIT 334 && maybe_gt ((MEM_OFFSET (mem) + MEM_SIZE (mem)) * BITS_PER_UNIT,
336 > ref->max_size))) 335 ref->max_size)))
337 ref->ref = NULL_TREE; 336 ref->ref = NULL_TREE;
338 337
339 /* Refine size and offset we got from analyzing MEM_EXPR by using 338 /* Refine size and offset we got from analyzing MEM_EXPR by using
340 MEM_SIZE and MEM_OFFSET. */ 339 MEM_SIZE and MEM_OFFSET. */
341 340
342 ref->offset += MEM_OFFSET (mem) * BITS_PER_UNIT; 341 ref->offset += MEM_OFFSET (mem) * BITS_PER_UNIT;
343 ref->size = MEM_SIZE (mem) * BITS_PER_UNIT; 342 ref->size = MEM_SIZE (mem) * BITS_PER_UNIT;
344 343
345 /* The MEM may extend into adjacent fields, so adjust max_size if 344 /* The MEM may extend into adjacent fields, so adjust max_size if
346 necessary. */ 345 necessary. */
347 if (ref->max_size != -1 346 if (ref->max_size_known_p ())
348 && ref->size > ref->max_size) 347 ref->max_size = upper_bound (ref->max_size, ref->size);
349 ref->max_size = ref->size; 348
350 349 /* If MEM_OFFSET and MEM_SIZE might get us outside of the base object of
351 /* If MEM_OFFSET and MEM_SIZE get us outside of the base object of
352 the MEM_EXPR punt. This happens for STRICT_ALIGNMENT targets a lot. */ 350 the MEM_EXPR punt. This happens for STRICT_ALIGNMENT targets a lot. */
353 if (MEM_EXPR (mem) != get_spill_slot_decl (false) 351 if (MEM_EXPR (mem) != get_spill_slot_decl (false)
354 && (ref->offset < 0 352 && (maybe_lt (ref->offset, 0)
355 || (DECL_P (ref->base) 353 || (DECL_P (ref->base)
356 && (DECL_SIZE (ref->base) == NULL_TREE 354 && (DECL_SIZE (ref->base) == NULL_TREE
357 || TREE_CODE (DECL_SIZE (ref->base)) != INTEGER_CST 355 || !poly_int_tree_p (DECL_SIZE (ref->base))
358 || wi::ltu_p (wi::to_offset (DECL_SIZE (ref->base)), 356 || maybe_lt (wi::to_poly_offset (DECL_SIZE (ref->base)),
359 ref->offset + ref->size))))) 357 ref->offset + ref->size)))))
360 return false; 358 return false;
361 359
362 return true; 360 return true;
363 } 361 }
364 362
1347 1345
1348 static rtx 1346 static rtx
1349 find_base_value (rtx src) 1347 find_base_value (rtx src)
1350 { 1348 {
1351 unsigned int regno; 1349 unsigned int regno;
1350 scalar_int_mode int_mode;
1352 1351
1353 #if defined (FIND_BASE_TERM) 1352 #if defined (FIND_BASE_TERM)
1354 /* Try machine-dependent ways to find the base term. */ 1353 /* Try machine-dependent ways to find the base term. */
1355 src = FIND_BASE_TERM (src); 1354 src = FIND_BASE_TERM (src);
1356 #endif 1355 #endif
1473 /* As we do not know which address space the pointer is referring to, we can 1472 /* As we do not know which address space the pointer is referring to, we can
1474 handle this only if the target does not support different pointer or 1473 handle this only if the target does not support different pointer or
1475 address modes depending on the address space. */ 1474 address modes depending on the address space. */
1476 if (!target_default_pointer_address_modes_p ()) 1475 if (!target_default_pointer_address_modes_p ())
1477 break; 1476 break;
1478 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode)) 1477 if (!is_a <scalar_int_mode> (GET_MODE (src), &int_mode)
1478 || GET_MODE_PRECISION (int_mode) < GET_MODE_PRECISION (Pmode))
1479 break; 1479 break;
1480 /* Fall through. */ 1480 /* Fall through. */
1481 case HIGH: 1481 case HIGH:
1482 case PRE_INC: 1482 case PRE_INC:
1483 case PRE_DEC: 1483 case PRE_DEC:
1552 if (GET_CODE (set) == CLOBBER) 1552 if (GET_CODE (set) == CLOBBER)
1553 { 1553 {
1554 new_reg_base_value[regno] = 0; 1554 new_reg_base_value[regno] = 0;
1555 return; 1555 return;
1556 } 1556 }
1557 /* A CLOBBER_HIGH only wipes out the old value if the mode of the old
1558 value is greater than that of the clobber. */
1559 else if (GET_CODE (set) == CLOBBER_HIGH)
1560 {
1561 if (new_reg_base_value[regno] != 0
1562 && reg_is_clobbered_by_clobber_high (
1563 regno, GET_MODE (new_reg_base_value[regno]), XEXP (set, 0)))
1564 new_reg_base_value[regno] = 0;
1565 return;
1566 }
1567
1557 src = SET_SRC (set); 1568 src = SET_SRC (set);
1558 } 1569 }
1559 else 1570 else
1560 { 1571 {
1561 /* There's a REG_NOALIAS note against DEST. */ 1572 /* There's a REG_NOALIAS note against DEST. */
1830 case 'i': 1841 case 'i':
1831 if (XINT (x, i) != XINT (y, i)) 1842 if (XINT (x, i) != XINT (y, i))
1832 return 0; 1843 return 0;
1833 break; 1844 break;
1834 1845
1846 case 'p':
1847 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
1848 return 0;
1849 break;
1850
1835 case 'E': 1851 case 'E':
1836 /* Two vectors must have the same length. */ 1852 /* Two vectors must have the same length. */
1837 if (XVECLEN (x, i) != XVECLEN (y, i)) 1853 if (XVECLEN (x, i) != XVECLEN (y, i))
1838 return 0; 1854 return 0;
1839 1855
1869 } 1885 }
1870 return 1; 1886 return 1;
1871 } 1887 }
1872 1888
1873 static rtx 1889 static rtx
1874 find_base_term (rtx x) 1890 find_base_term (rtx x, vec<std::pair<cselib_val *,
1891 struct elt_loc_list *> > &visited_vals)
1875 { 1892 {
1876 cselib_val *val; 1893 cselib_val *val;
1877 struct elt_loc_list *l, *f; 1894 struct elt_loc_list *l, *f;
1878 rtx ret; 1895 rtx ret;
1896 scalar_int_mode int_mode;
1879 1897
1880 #if defined (FIND_BASE_TERM) 1898 #if defined (FIND_BASE_TERM)
1881 /* Try machine-dependent ways to find the base term. */ 1899 /* Try machine-dependent ways to find the base term. */
1882 x = FIND_BASE_TERM (x); 1900 x = FIND_BASE_TERM (x);
1883 #endif 1901 #endif
1891 /* As we do not know which address space the pointer is referring to, we can 1909 /* As we do not know which address space the pointer is referring to, we can
1892 handle this only if the target does not support different pointer or 1910 handle this only if the target does not support different pointer or
1893 address modes depending on the address space. */ 1911 address modes depending on the address space. */
1894 if (!target_default_pointer_address_modes_p ()) 1912 if (!target_default_pointer_address_modes_p ())
1895 return 0; 1913 return 0;
1896 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode)) 1914 if (!is_a <scalar_int_mode> (GET_MODE (x), &int_mode)
1915 || GET_MODE_PRECISION (int_mode) < GET_MODE_PRECISION (Pmode))
1897 return 0; 1916 return 0;
1898 /* Fall through. */ 1917 /* Fall through. */
1899 case HIGH: 1918 case HIGH:
1900 case PRE_INC: 1919 case PRE_INC:
1901 case PRE_DEC: 1920 case PRE_DEC:
1902 case POST_INC: 1921 case POST_INC:
1903 case POST_DEC: 1922 case POST_DEC:
1904 case PRE_MODIFY: 1923 case PRE_MODIFY:
1905 case POST_MODIFY: 1924 case POST_MODIFY:
1906 return find_base_term (XEXP (x, 0)); 1925 return find_base_term (XEXP (x, 0), visited_vals);
1907 1926
1908 case ZERO_EXTEND: 1927 case ZERO_EXTEND:
1909 case SIGN_EXTEND: /* Used for Alpha/NT pointers */ 1928 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
1910 /* As we do not know which address space the pointer is referring to, we can 1929 /* As we do not know which address space the pointer is referring to, we can
1911 handle this only if the target does not support different pointer or 1930 handle this only if the target does not support different pointer or
1912 address modes depending on the address space. */ 1931 address modes depending on the address space. */
1913 if (!target_default_pointer_address_modes_p ()) 1932 if (!target_default_pointer_address_modes_p ())
1914 return 0; 1933 return 0;
1915 1934
1916 { 1935 {
1917 rtx temp = find_base_term (XEXP (x, 0)); 1936 rtx temp = find_base_term (XEXP (x, 0), visited_vals);
1918 1937
1919 if (temp != 0 && CONSTANT_P (temp)) 1938 if (temp != 0 && CONSTANT_P (temp))
1920 temp = convert_memory_address (Pmode, temp); 1939 temp = convert_memory_address (Pmode, temp);
1921 1940
1922 return temp; 1941 return temp;
1931 1950
1932 if (cselib_sp_based_value_p (val)) 1951 if (cselib_sp_based_value_p (val))
1933 return static_reg_base_value[STACK_POINTER_REGNUM]; 1952 return static_reg_base_value[STACK_POINTER_REGNUM];
1934 1953
1935 f = val->locs; 1954 f = val->locs;
1936 /* Temporarily reset val->locs to avoid infinite recursion. */ 1955 /* Reset val->locs to avoid infinite recursion. */
1956 if (f)
1957 visited_vals.safe_push (std::make_pair (val, f));
1937 val->locs = NULL; 1958 val->locs = NULL;
1938 1959
1939 for (l = f; l; l = l->next) 1960 for (l = f; l; l = l->next)
1940 if (GET_CODE (l->loc) == VALUE 1961 if (GET_CODE (l->loc) == VALUE
1941 && CSELIB_VAL_PTR (l->loc)->locs 1962 && CSELIB_VAL_PTR (l->loc)->locs
1942 && !CSELIB_VAL_PTR (l->loc)->locs->next 1963 && !CSELIB_VAL_PTR (l->loc)->locs->next
1943 && CSELIB_VAL_PTR (l->loc)->locs->loc == x) 1964 && CSELIB_VAL_PTR (l->loc)->locs->loc == x)
1944 continue; 1965 continue;
1945 else if ((ret = find_base_term (l->loc)) != 0) 1966 else if ((ret = find_base_term (l->loc, visited_vals)) != 0)
1946 break; 1967 break;
1947 1968
1948 val->locs = f;
1949 return ret; 1969 return ret;
1950 1970
1951 case LO_SUM: 1971 case LO_SUM:
1952 /* The standard form is (lo_sum reg sym) so look only at the 1972 /* The standard form is (lo_sum reg sym) so look only at the
1953 second operand. */ 1973 second operand. */
1954 return find_base_term (XEXP (x, 1)); 1974 return find_base_term (XEXP (x, 1), visited_vals);
1955 1975
1956 case CONST: 1976 case CONST:
1957 x = XEXP (x, 0); 1977 x = XEXP (x, 0);
1958 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS) 1978 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1959 return 0; 1979 return 0;
1975 tests can certainly be added. For example, if one of the operands 1995 tests can certainly be added. For example, if one of the operands
1976 is a shift or multiply, then it must be the index register and the 1996 is a shift or multiply, then it must be the index register and the
1977 other operand is the base register. */ 1997 other operand is the base register. */
1978 1998
1979 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2)) 1999 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1980 return find_base_term (tmp2); 2000 return find_base_term (tmp2, visited_vals);
1981 2001
1982 /* If either operand is known to be a pointer, then prefer it 2002 /* If either operand is known to be a pointer, then prefer it
1983 to determine the base term. */ 2003 to determine the base term. */
1984 if (REG_P (tmp1) && REG_POINTER (tmp1)) 2004 if (REG_P (tmp1) && REG_POINTER (tmp1))
1985 ; 2005 ;
1992 2012
1993 /* Go ahead and find the base term for both operands. If either base 2013 /* Go ahead and find the base term for both operands. If either base
1994 term is from a pointer or is a named object or a special address 2014 term is from a pointer or is a named object or a special address
1995 (like an argument or stack reference), then use it for the 2015 (like an argument or stack reference), then use it for the
1996 base term. */ 2016 base term. */
1997 rtx base = find_base_term (tmp1); 2017 rtx base = find_base_term (tmp1, visited_vals);
1998 if (base != NULL_RTX 2018 if (base != NULL_RTX
1999 && ((REG_P (tmp1) && REG_POINTER (tmp1)) 2019 && ((REG_P (tmp1) && REG_POINTER (tmp1))
2000 || known_base_value_p (base))) 2020 || known_base_value_p (base)))
2001 return base; 2021 return base;
2002 base = find_base_term (tmp2); 2022 base = find_base_term (tmp2, visited_vals);
2003 if (base != NULL_RTX 2023 if (base != NULL_RTX
2004 && ((REG_P (tmp2) && REG_POINTER (tmp2)) 2024 && ((REG_P (tmp2) && REG_POINTER (tmp2))
2005 || known_base_value_p (base))) 2025 || known_base_value_p (base)))
2006 return base; 2026 return base;
2007 2027
2011 return 0; 2031 return 0;
2012 } 2032 }
2013 2033
2014 case AND: 2034 case AND:
2015 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0) 2035 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
2016 return find_base_term (XEXP (x, 0)); 2036 return find_base_term (XEXP (x, 0), visited_vals);
2017 return 0; 2037 return 0;
2018 2038
2019 case SYMBOL_REF: 2039 case SYMBOL_REF:
2020 case LABEL_REF: 2040 case LABEL_REF:
2021 return x; 2041 return x;
2022 2042
2023 default: 2043 default:
2024 return 0; 2044 return 0;
2025 } 2045 }
2046 }
2047
2048 /* Wrapper around the worker above which removes locs from visited VALUEs
2049 to avoid visiting them multiple times. We unwind that changes here. */
2050
2051 static rtx
2052 find_base_term (rtx x)
2053 {
2054 auto_vec<std::pair<cselib_val *, struct elt_loc_list *>, 32> visited_vals;
2055 rtx res = find_base_term (x, visited_vals);
2056 for (unsigned i = 0; i < visited_vals.length (); ++i)
2057 visited_vals[i].first->locs = visited_vals[i].second;
2058 return res;
2026 } 2059 }
2027 2060
2028 /* Return true if accesses to address X may alias accesses based 2061 /* Return true if accesses to address X may alias accesses based
2029 on the stack pointer. */ 2062 on the stack pointer. */
2030 2063
2238 && CONST_SCALAR_INT_P (XEXP (x, 1))) 2271 && CONST_SCALAR_INT_P (XEXP (x, 1)))
2239 { 2272 {
2240 rtx op0 = get_addr (XEXP (x, 0)); 2273 rtx op0 = get_addr (XEXP (x, 0));
2241 if (op0 != XEXP (x, 0)) 2274 if (op0 != XEXP (x, 0))
2242 { 2275 {
2276 poly_int64 c;
2243 if (GET_CODE (x) == PLUS 2277 if (GET_CODE (x) == PLUS
2244 && GET_CODE (XEXP (x, 1)) == CONST_INT) 2278 && poly_int_rtx_p (XEXP (x, 1), &c))
2245 return plus_constant (GET_MODE (x), op0, INTVAL (XEXP (x, 1))); 2279 return plus_constant (GET_MODE (x), op0, c);
2246 return simplify_gen_binary (GET_CODE (x), GET_MODE (x), 2280 return simplify_gen_binary (GET_CODE (x), GET_MODE (x),
2247 op0, XEXP (x, 1)); 2281 op0, XEXP (x, 1));
2248 } 2282 }
2249 } 2283 }
2250 return x; 2284 return x;
2287 /* Return the address of the (N_REFS + 1)th memory reference to ADDR 2321 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
2288 where SIZE is the size in bytes of the memory reference. If ADDR 2322 where SIZE is the size in bytes of the memory reference. If ADDR
2289 is not modified by the memory reference then ADDR is returned. */ 2323 is not modified by the memory reference then ADDR is returned. */
2290 2324
2291 static rtx 2325 static rtx
2292 addr_side_effect_eval (rtx addr, int size, int n_refs) 2326 addr_side_effect_eval (rtx addr, poly_int64 size, int n_refs)
2293 { 2327 {
2294 int offset = 0; 2328 poly_int64 offset = 0;
2295 2329
2296 switch (GET_CODE (addr)) 2330 switch (GET_CODE (addr))
2297 { 2331 {
2298 case PRE_INC: 2332 case PRE_INC:
2299 offset = (n_refs + 1) * size; 2333 offset = (n_refs + 1) * size;
2310 2344
2311 default: 2345 default:
2312 return addr; 2346 return addr;
2313 } 2347 }
2314 2348
2315 if (offset) 2349 addr = plus_constant (GET_MODE (addr), XEXP (addr, 0), offset);
2316 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
2317 gen_int_mode (offset, GET_MODE (addr)));
2318 else
2319 addr = XEXP (addr, 0);
2320 addr = canon_rtx (addr); 2350 addr = canon_rtx (addr);
2321 2351
2322 return addr; 2352 return addr;
2323 } 2353 }
2324 2354
2326 Y sized at YSIZE bytes, starting C bytes after X, may overlap. If 2356 Y sized at YSIZE bytes, starting C bytes after X, may overlap. If
2327 any of the sizes is zero, assume an overlap, otherwise use the 2357 any of the sizes is zero, assume an overlap, otherwise use the
2328 absolute value of the sizes as the actual sizes. */ 2358 absolute value of the sizes as the actual sizes. */
2329 2359
2330 static inline bool 2360 static inline bool
2331 offset_overlap_p (HOST_WIDE_INT c, int xsize, int ysize) 2361 offset_overlap_p (poly_int64 c, poly_int64 xsize, poly_int64 ysize)
2332 { 2362 {
2333 return (xsize == 0 || ysize == 0 2363 if (known_eq (xsize, 0) || known_eq (ysize, 0))
2334 || (c >= 0 2364 return true;
2335 ? (abs (xsize) > c) 2365
2336 : (abs (ysize) > -c))); 2366 if (maybe_ge (c, 0))
2367 return maybe_gt (maybe_lt (xsize, 0) ? -xsize : xsize, c);
2368 else
2369 return maybe_gt (maybe_lt (ysize, 0) ? -ysize : ysize, -c);
2337 } 2370 }
2338 2371
2339 /* Return one if X and Y (memory addresses) reference the 2372 /* Return one if X and Y (memory addresses) reference the
2340 same location in memory or if the references overlap. 2373 same location in memory or if the references overlap.
2341 Return zero if they do not overlap, else return 2374 Return zero if they do not overlap, else return
2361 ??? Contrary to the tree alias oracle this does not return 2394 ??? Contrary to the tree alias oracle this does not return
2362 one for X + non-constant and Y + non-constant when X and Y are equal. 2395 one for X + non-constant and Y + non-constant when X and Y are equal.
2363 If that is fixed the TBAA hack for union type-punning can be removed. */ 2396 If that is fixed the TBAA hack for union type-punning can be removed. */
2364 2397
2365 static int 2398 static int
2366 memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c) 2399 memrefs_conflict_p (poly_int64 xsize, rtx x, poly_int64 ysize, rtx y,
2400 poly_int64 c)
2367 { 2401 {
2368 if (GET_CODE (x) == VALUE) 2402 if (GET_CODE (x) == VALUE)
2369 { 2403 {
2370 if (REG_P (y)) 2404 if (REG_P (y))
2371 { 2405 {
2406 if (GET_CODE (x) == HIGH) 2440 if (GET_CODE (x) == HIGH)
2407 x = XEXP (x, 0); 2441 x = XEXP (x, 0);
2408 else if (GET_CODE (x) == LO_SUM) 2442 else if (GET_CODE (x) == LO_SUM)
2409 x = XEXP (x, 1); 2443 x = XEXP (x, 1);
2410 else 2444 else
2411 x = addr_side_effect_eval (x, abs (xsize), 0); 2445 x = addr_side_effect_eval (x, maybe_lt (xsize, 0) ? -xsize : xsize, 0);
2412 if (GET_CODE (y) == HIGH) 2446 if (GET_CODE (y) == HIGH)
2413 y = XEXP (y, 0); 2447 y = XEXP (y, 0);
2414 else if (GET_CODE (y) == LO_SUM) 2448 else if (GET_CODE (y) == LO_SUM)
2415 y = XEXP (y, 1); 2449 y = XEXP (y, 1);
2416 else 2450 else
2417 y = addr_side_effect_eval (y, abs (ysize), 0); 2451 y = addr_side_effect_eval (y, maybe_lt (ysize, 0) ? -ysize : ysize, 0);
2418 2452
2419 if (GET_CODE (x) == SYMBOL_REF && GET_CODE (y) == SYMBOL_REF) 2453 if (GET_CODE (x) == SYMBOL_REF && GET_CODE (y) == SYMBOL_REF)
2420 { 2454 {
2421 int cmp = compare_base_symbol_refs (x,y); 2455 int cmp = compare_base_symbol_refs (x,y);
2422 2456
2425 return offset_overlap_p (c, xsize, ysize); 2459 return offset_overlap_p (c, xsize, ysize);
2426 /* Assume a potential overlap for symbolic addresses that went 2460 /* Assume a potential overlap for symbolic addresses that went
2427 through alignment adjustments (i.e., that have negative 2461 through alignment adjustments (i.e., that have negative
2428 sizes), because we can't know how far they are from each 2462 sizes), because we can't know how far they are from each
2429 other. */ 2463 other. */
2430 if (xsize < 0 || ysize < 0) 2464 if (maybe_lt (xsize, 0) || maybe_lt (ysize, 0))
2431 return -1; 2465 return -1;
2432 /* If decls are different or we know by offsets that there is no overlap, 2466 /* If decls are different or we know by offsets that there is no overlap,
2433 we win. */ 2467 we win. */
2434 if (!cmp || !offset_overlap_p (c, xsize, ysize)) 2468 if (!cmp || !offset_overlap_p (c, xsize, ysize))
2435 return 0; 2469 return 0;
2456 if (x0 == y) 2490 if (x0 == y)
2457 return memrefs_conflict_p (xsize, x1, ysize, const0_rtx, c); 2491 return memrefs_conflict_p (xsize, x1, ysize, const0_rtx, c);
2458 else if (x1 == y) 2492 else if (x1 == y)
2459 return memrefs_conflict_p (xsize, x0, ysize, const0_rtx, c); 2493 return memrefs_conflict_p (xsize, x0, ysize, const0_rtx, c);
2460 2494
2495 poly_int64 cx1, cy1;
2461 if (GET_CODE (y) == PLUS) 2496 if (GET_CODE (y) == PLUS)
2462 { 2497 {
2463 /* The fact that Y is canonicalized means that this 2498 /* The fact that Y is canonicalized means that this
2464 PLUS rtx is canonicalized. */ 2499 PLUS rtx is canonicalized. */
2465 rtx y0 = XEXP (y, 0); 2500 rtx y0 = XEXP (y, 0);
2472 2507
2473 if (rtx_equal_for_memref_p (x1, y1)) 2508 if (rtx_equal_for_memref_p (x1, y1))
2474 return memrefs_conflict_p (xsize, x0, ysize, y0, c); 2509 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
2475 if (rtx_equal_for_memref_p (x0, y0)) 2510 if (rtx_equal_for_memref_p (x0, y0))
2476 return memrefs_conflict_p (xsize, x1, ysize, y1, c); 2511 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
2477 if (CONST_INT_P (x1)) 2512 if (poly_int_rtx_p (x1, &cx1))
2478 { 2513 {
2479 if (CONST_INT_P (y1)) 2514 if (poly_int_rtx_p (y1, &cy1))
2480 return memrefs_conflict_p (xsize, x0, ysize, y0, 2515 return memrefs_conflict_p (xsize, x0, ysize, y0,
2481 c - INTVAL (x1) + INTVAL (y1)); 2516 c - cx1 + cy1);
2482 else 2517 else
2483 return memrefs_conflict_p (xsize, x0, ysize, y, 2518 return memrefs_conflict_p (xsize, x0, ysize, y, c - cx1);
2484 c - INTVAL (x1));
2485 } 2519 }
2486 else if (CONST_INT_P (y1)) 2520 else if (poly_int_rtx_p (y1, &cy1))
2487 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1)); 2521 return memrefs_conflict_p (xsize, x, ysize, y0, c + cy1);
2488 2522
2489 return -1; 2523 return -1;
2490 } 2524 }
2491 else if (CONST_INT_P (x1)) 2525 else if (poly_int_rtx_p (x1, &cx1))
2492 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1)); 2526 return memrefs_conflict_p (xsize, x0, ysize, y, c - cx1);
2493 } 2527 }
2494 else if (GET_CODE (y) == PLUS) 2528 else if (GET_CODE (y) == PLUS)
2495 { 2529 {
2496 /* The fact that Y is canonicalized means that this 2530 /* The fact that Y is canonicalized means that this
2497 PLUS rtx is canonicalized. */ 2531 PLUS rtx is canonicalized. */
2501 if (x == y0) 2535 if (x == y0)
2502 return memrefs_conflict_p (xsize, const0_rtx, ysize, y1, c); 2536 return memrefs_conflict_p (xsize, const0_rtx, ysize, y1, c);
2503 if (x == y1) 2537 if (x == y1)
2504 return memrefs_conflict_p (xsize, const0_rtx, ysize, y0, c); 2538 return memrefs_conflict_p (xsize, const0_rtx, ysize, y0, c);
2505 2539
2506 if (CONST_INT_P (y1)) 2540 poly_int64 cy1;
2507 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1)); 2541 if (poly_int_rtx_p (y1, &cy1))
2542 return memrefs_conflict_p (xsize, x, ysize, y0, c + cy1);
2508 else 2543 else
2509 return -1; 2544 return -1;
2510 } 2545 }
2511 2546
2512 if (GET_CODE (x) == GET_CODE (y)) 2547 if (GET_CODE (x) == GET_CODE (y))
2526 y0 = canon_rtx (XEXP (y, 0)); 2561 y0 = canon_rtx (XEXP (y, 0));
2527 if (rtx_equal_for_memref_p (x0, y0)) 2562 if (rtx_equal_for_memref_p (x0, y0))
2528 return offset_overlap_p (c, xsize, ysize); 2563 return offset_overlap_p (c, xsize, ysize);
2529 2564
2530 /* Can't properly adjust our sizes. */ 2565 /* Can't properly adjust our sizes. */
2531 if (!CONST_INT_P (x1)) 2566 poly_int64 c1;
2567 if (!poly_int_rtx_p (x1, &c1)
2568 || !can_div_trunc_p (xsize, c1, &xsize)
2569 || !can_div_trunc_p (ysize, c1, &ysize)
2570 || !can_div_trunc_p (c, c1, &c))
2532 return -1; 2571 return -1;
2533 xsize /= INTVAL (x1);
2534 ysize /= INTVAL (x1);
2535 c /= INTVAL (x1);
2536 return memrefs_conflict_p (xsize, x0, ysize, y0, c); 2572 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
2537 } 2573 }
2538 2574
2539 default: 2575 default:
2540 break; 2576 break;
2551 { 2587 {
2552 HOST_WIDE_INT sc = INTVAL (XEXP (x, 1)); 2588 HOST_WIDE_INT sc = INTVAL (XEXP (x, 1));
2553 unsigned HOST_WIDE_INT uc = sc; 2589 unsigned HOST_WIDE_INT uc = sc;
2554 if (sc < 0 && pow2_or_zerop (-uc)) 2590 if (sc < 0 && pow2_or_zerop (-uc))
2555 { 2591 {
2556 if (xsize > 0) 2592 if (maybe_gt (xsize, 0))
2557 xsize = -xsize; 2593 xsize = -xsize;
2558 if (xsize) 2594 if (maybe_ne (xsize, 0))
2559 xsize += sc + 1; 2595 xsize += sc + 1;
2560 c -= sc + 1; 2596 c -= sc + 1;
2561 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), 2597 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
2562 ysize, y, c); 2598 ysize, y, c);
2563 } 2599 }
2566 { 2602 {
2567 HOST_WIDE_INT sc = INTVAL (XEXP (y, 1)); 2603 HOST_WIDE_INT sc = INTVAL (XEXP (y, 1));
2568 unsigned HOST_WIDE_INT uc = sc; 2604 unsigned HOST_WIDE_INT uc = sc;
2569 if (sc < 0 && pow2_or_zerop (-uc)) 2605 if (sc < 0 && pow2_or_zerop (-uc))
2570 { 2606 {
2571 if (ysize > 0) 2607 if (maybe_gt (ysize, 0))
2572 ysize = -ysize; 2608 ysize = -ysize;
2573 if (ysize) 2609 if (maybe_ne (ysize, 0))
2574 ysize += sc + 1; 2610 ysize += sc + 1;
2575 c += sc + 1; 2611 c += sc + 1;
2576 return memrefs_conflict_p (xsize, x, 2612 return memrefs_conflict_p (xsize, x,
2577 ysize, canon_rtx (XEXP (y, 0)), c); 2613 ysize, canon_rtx (XEXP (y, 0)), c);
2578 } 2614 }
2579 } 2615 }
2580 2616
2581 if (CONSTANT_P (x)) 2617 if (CONSTANT_P (x))
2582 { 2618 {
2583 if (CONST_INT_P (x) && CONST_INT_P (y)) 2619 poly_int64 cx, cy;
2620 if (poly_int_rtx_p (x, &cx) && poly_int_rtx_p (y, &cy))
2584 { 2621 {
2585 c += (INTVAL (y) - INTVAL (x)); 2622 c += cy - cx;
2586 return offset_overlap_p (c, xsize, ysize); 2623 return offset_overlap_p (c, xsize, ysize);
2587 } 2624 }
2588 2625
2589 if (GET_CODE (x) == CONST) 2626 if (GET_CODE (x) == CONST)
2590 { 2627 {
2602 /* Assume a potential overlap for symbolic addresses that went 2639 /* Assume a potential overlap for symbolic addresses that went
2603 through alignment adjustments (i.e., that have negative 2640 through alignment adjustments (i.e., that have negative
2604 sizes), because we can't know how far they are from each 2641 sizes), because we can't know how far they are from each
2605 other. */ 2642 other. */
2606 if (CONSTANT_P (y)) 2643 if (CONSTANT_P (y))
2607 return (xsize < 0 || ysize < 0 || offset_overlap_p (c, xsize, ysize)); 2644 return (maybe_lt (xsize, 0)
2645 || maybe_lt (ysize, 0)
2646 || offset_overlap_p (c, xsize, ysize));
2608 2647
2609 return -1; 2648 return -1;
2610 } 2649 }
2611 2650
2612 return -1; 2651 return -1;
2662 for the offset of the field reference. *KNOWN_P says whether the 2701 for the offset of the field reference. *KNOWN_P says whether the
2663 offset is known. */ 2702 offset is known. */
2664 2703
2665 static void 2704 static void
2666 adjust_offset_for_component_ref (tree x, bool *known_p, 2705 adjust_offset_for_component_ref (tree x, bool *known_p,
2667 HOST_WIDE_INT *offset) 2706 poly_int64 *offset)
2668 { 2707 {
2669 if (!*known_p) 2708 if (!*known_p)
2670 return; 2709 return;
2671 do 2710 do
2672 { 2711 {
2673 tree xoffset = component_ref_field_offset (x); 2712 tree xoffset = component_ref_field_offset (x);
2674 tree field = TREE_OPERAND (x, 1); 2713 tree field = TREE_OPERAND (x, 1);
2675 if (TREE_CODE (xoffset) != INTEGER_CST) 2714 if (!poly_int_tree_p (xoffset))
2676 { 2715 {
2677 *known_p = false; 2716 *known_p = false;
2678 return; 2717 return;
2679 } 2718 }
2680 2719
2681 offset_int woffset 2720 poly_offset_int woffset
2682 = (wi::to_offset (xoffset) 2721 = (wi::to_poly_offset (xoffset)
2683 + (wi::to_offset (DECL_FIELD_BIT_OFFSET (field)) 2722 + (wi::to_offset (DECL_FIELD_BIT_OFFSET (field))
2684 >> LOG2_BITS_PER_UNIT)); 2723 >> LOG2_BITS_PER_UNIT)
2685 if (!wi::fits_uhwi_p (woffset)) 2724 + *offset);
2725 if (!woffset.to_shwi (offset))
2686 { 2726 {
2687 *known_p = false; 2727 *known_p = false;
2688 return; 2728 return;
2689 } 2729 }
2690 *offset += woffset.to_uhwi ();
2691 2730
2692 x = TREE_OPERAND (x, 0); 2731 x = TREE_OPERAND (x, 0);
2693 } 2732 }
2694 while (x && TREE_CODE (x) == COMPONENT_REF); 2733 while (x && TREE_CODE (x) == COMPONENT_REF);
2695 } 2734 }
2703 { 2742 {
2704 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y); 2743 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
2705 rtx rtlx, rtly; 2744 rtx rtlx, rtly;
2706 rtx basex, basey; 2745 rtx basex, basey;
2707 bool moffsetx_known_p, moffsety_known_p; 2746 bool moffsetx_known_p, moffsety_known_p;
2708 HOST_WIDE_INT moffsetx = 0, moffsety = 0; 2747 poly_int64 moffsetx = 0, moffsety = 0;
2709 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey; 2748 poly_int64 offsetx = 0, offsety = 0, sizex, sizey;
2710 2749
2711 /* Unless both have exprs, we can't tell anything. */ 2750 /* Unless both have exprs, we can't tell anything. */
2712 if (exprx == 0 || expry == 0) 2751 if (exprx == 0 || expry == 0)
2713 return 0; 2752 return 0;
2714 2753
2806 /* Get the base and offsets of both decls. If either is a register, we 2845 /* Get the base and offsets of both decls. If either is a register, we
2807 know both are and are the same, so use that as the base. The only 2846 know both are and are the same, so use that as the base. The only
2808 we can avoid overlap is if we can deduce that they are nonoverlapping 2847 we can avoid overlap is if we can deduce that they are nonoverlapping
2809 pieces of that decl, which is very rare. */ 2848 pieces of that decl, which is very rare. */
2810 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx; 2849 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
2811 if (GET_CODE (basex) == PLUS && CONST_INT_P (XEXP (basex, 1))) 2850 basex = strip_offset_and_add (basex, &offsetx);
2812 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2813 2851
2814 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly; 2852 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
2815 if (GET_CODE (basey) == PLUS && CONST_INT_P (XEXP (basey, 1))) 2853 basey = strip_offset_and_add (basey, &offsety);
2816 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2817 2854
2818 /* If the bases are different, we know they do not overlap if both 2855 /* If the bases are different, we know they do not overlap if both
2819 are constants or if one is a constant and the other a pointer into the 2856 are constants or if one is a constant and the other a pointer into the
2820 stack frame. Otherwise a different base means we can't tell if they 2857 stack frame. Otherwise a different base means we can't tell if they
2821 overlap or not. */ 2858 overlap or not. */
2832 2869
2833 /* Offset based disambiguation is OK even if we do not know that the 2870 /* Offset based disambiguation is OK even if we do not know that the
2834 declarations are necessarily different 2871 declarations are necessarily different
2835 (i.e. compare_base_decls (exprx, expry) == -1) */ 2872 (i.e. compare_base_decls (exprx, expry) == -1) */
2836 2873
2837 sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx)) 2874 sizex = (!MEM_P (rtlx) ? poly_int64 (GET_MODE_SIZE (GET_MODE (rtlx)))
2838 : MEM_SIZE_KNOWN_P (rtlx) ? MEM_SIZE (rtlx) 2875 : MEM_SIZE_KNOWN_P (rtlx) ? MEM_SIZE (rtlx)
2839 : -1); 2876 : -1);
2840 sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly)) 2877 sizey = (!MEM_P (rtly) ? poly_int64 (GET_MODE_SIZE (GET_MODE (rtly)))
2841 : MEM_SIZE_KNOWN_P (rtly) ? MEM_SIZE (rtly) 2878 : MEM_SIZE_KNOWN_P (rtly) ? MEM_SIZE (rtly)
2842 : -1); 2879 : -1);
2843 2880
2844 /* If we have an offset for either memref, it can update the values computed 2881 /* If we have an offset for either memref, it can update the values computed
2845 above. */ 2882 above. */
2854 if (MEM_SIZE_KNOWN_P (x) && moffsetx_known_p) 2891 if (MEM_SIZE_KNOWN_P (x) && moffsetx_known_p)
2855 sizex = MEM_SIZE (x); 2892 sizex = MEM_SIZE (x);
2856 if (MEM_SIZE_KNOWN_P (y) && moffsety_known_p) 2893 if (MEM_SIZE_KNOWN_P (y) && moffsety_known_p)
2857 sizey = MEM_SIZE (y); 2894 sizey = MEM_SIZE (y);
2858 2895
2859 /* Put the values of the memref with the lower offset in X's values. */ 2896 return !ranges_maybe_overlap_p (offsetx, sizex, offsety, sizey);
2860 if (offsetx > offsety)
2861 {
2862 std::swap (offsetx, offsety);
2863 std::swap (sizex, sizey);
2864 }
2865
2866 /* If we don't know the size of the lower-offset value, we can't tell
2867 if they conflict. Otherwise, we do the test. */
2868 return sizex >= 0 && offsety >= offsetx + sizex;
2869 } 2897 }
2870 2898
2871 /* Helper for true_dependence and canon_true_dependence. 2899 /* Helper for true_dependence and canon_true_dependence.
2872 Checks for true dependence: X is read after store in MEM takes place. 2900 Checks for true dependence: X is read after store in MEM takes place.
2873 2901
2997 rtx true_mem_addr, true_x_addr; 3025 rtx true_mem_addr, true_x_addr;
2998 rtx base; 3026 rtx base;
2999 int ret; 3027 int ret;
3000 3028
3001 gcc_checking_assert (x_canonicalized 3029 gcc_checking_assert (x_canonicalized
3002 ? (x_addr != NULL_RTX && x_mode != VOIDmode) 3030 ? (x_addr != NULL_RTX
3031 && (x_mode != VOIDmode || GET_MODE (x) == VOIDmode))
3003 : (x_addr == NULL_RTX && x_mode == VOIDmode)); 3032 : (x_addr == NULL_RTX && x_mode == VOIDmode));
3004 3033
3005 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem)) 3034 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
3006 return 1; 3035 return 1;
3007 3036
3189 numbers, so translate if necessary due to register windows. */ 3218 numbers, so translate if necessary due to register windows. */
3190 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i)) 3219 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
3191 && targetm.hard_regno_mode_ok (i, Pmode)) 3220 && targetm.hard_regno_mode_ok (i, Pmode))
3192 static_reg_base_value[i] = arg_base_value; 3221 static_reg_base_value[i] = arg_base_value;
3193 3222
3223 /* RTL code is required to be consistent about whether it uses the
3224 stack pointer, the frame pointer or the argument pointer to
3225 access a given area of the frame. We can therefore use the
3226 base address to distinguish between the different areas. */
3194 static_reg_base_value[STACK_POINTER_REGNUM] 3227 static_reg_base_value[STACK_POINTER_REGNUM]
3195 = unique_base_value (UNIQUE_BASE_VALUE_SP); 3228 = unique_base_value (UNIQUE_BASE_VALUE_SP);
3196 static_reg_base_value[ARG_POINTER_REGNUM] 3229 static_reg_base_value[ARG_POINTER_REGNUM]
3197 = unique_base_value (UNIQUE_BASE_VALUE_ARGP); 3230 = unique_base_value (UNIQUE_BASE_VALUE_ARGP);
3198 static_reg_base_value[FRAME_POINTER_REGNUM] 3231 static_reg_base_value[FRAME_POINTER_REGNUM]
3199 = unique_base_value (UNIQUE_BASE_VALUE_FP); 3232 = unique_base_value (UNIQUE_BASE_VALUE_FP);
3233
3234 /* The above rules extend post-reload, with eliminations applying
3235 consistently to each of the three pointers. Cope with cases in
3236 which the frame pointer is eliminated to the hard frame pointer
3237 rather than the stack pointer. */
3200 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER) 3238 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3201 static_reg_base_value[HARD_FRAME_POINTER_REGNUM] 3239 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
3202 = unique_base_value (UNIQUE_BASE_VALUE_HFP); 3240 = unique_base_value (UNIQUE_BASE_VALUE_HFP);
3203 } 3241 }
3204 3242
3230 memory_modified = false; 3268 memory_modified = false;
3231 note_stores (PATTERN (insn), memory_modified_1, CONST_CAST_RTX(mem)); 3269 note_stores (PATTERN (insn), memory_modified_1, CONST_CAST_RTX(mem));
3232 return memory_modified; 3270 return memory_modified;
3233 } 3271 }
3234 3272
3235 /* Return TRUE if the destination of a set is rtx identical to
3236 ITEM. */
3237 static inline bool
3238 set_dest_equal_p (const_rtx set, const_rtx item)
3239 {
3240 rtx dest = SET_DEST (set);
3241 return rtx_equal_p (dest, item);
3242 }
3243
3244 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE 3273 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
3245 array. */ 3274 array. */
3246 3275
3247 void 3276 void
3248 init_alias_analysis (void) 3277 init_alias_analysis (void)
3327 /* Wipe the reg_seen array clean. */ 3356 /* Wipe the reg_seen array clean. */
3328 bitmap_clear (reg_seen); 3357 bitmap_clear (reg_seen);
3329 3358
3330 /* Initialize the alias information for this pass. */ 3359 /* Initialize the alias information for this pass. */
3331 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 3360 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3332 if (static_reg_base_value[i]) 3361 if (static_reg_base_value[i]
3362 /* Don't treat the hard frame pointer as special if we
3363 eliminated the frame pointer to the stack pointer instead. */
3364 && !(i == HARD_FRAME_POINTER_REGNUM
3365 && reload_completed
3366 && !frame_pointer_needed
3367 && targetm.can_eliminate (FRAME_POINTER_REGNUM,
3368 STACK_POINTER_REGNUM)))
3333 { 3369 {
3334 new_reg_base_value[i] = static_reg_base_value[i]; 3370 new_reg_base_value[i] = static_reg_base_value[i];
3335 bitmap_set_bit (reg_seen, i); 3371 bitmap_set_bit (reg_seen, i);
3336 } 3372 }
3337 3373
3373 note = find_reg_equal_equiv_note (insn); 3409 note = find_reg_equal_equiv_note (insn);
3374 if (note && REG_NOTE_KIND (note) == REG_EQUAL 3410 if (note && REG_NOTE_KIND (note) == REG_EQUAL
3375 && DF_REG_DEF_COUNT (regno) != 1) 3411 && DF_REG_DEF_COUNT (regno) != 1)
3376 note = NULL_RTX; 3412 note = NULL_RTX;
3377 3413
3414 poly_int64 offset;
3378 if (note != NULL_RTX 3415 if (note != NULL_RTX
3379 && GET_CODE (XEXP (note, 0)) != EXPR_LIST 3416 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
3380 && ! rtx_varies_p (XEXP (note, 0), 1) 3417 && ! rtx_varies_p (XEXP (note, 0), 1)
3381 && ! reg_overlap_mentioned_p (SET_DEST (set), 3418 && ! reg_overlap_mentioned_p (SET_DEST (set),
3382 XEXP (note, 0))) 3419 XEXP (note, 0)))
3387 } 3424 }
3388 else if (DF_REG_DEF_COUNT (regno) == 1 3425 else if (DF_REG_DEF_COUNT (regno) == 1
3389 && GET_CODE (src) == PLUS 3426 && GET_CODE (src) == PLUS
3390 && REG_P (XEXP (src, 0)) 3427 && REG_P (XEXP (src, 0))
3391 && (t = get_reg_known_value (REGNO (XEXP (src, 0)))) 3428 && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
3392 && CONST_INT_P (XEXP (src, 1))) 3429 && poly_int_rtx_p (XEXP (src, 1), &offset))
3393 { 3430 {
3394 t = plus_constant (GET_MODE (src), t, 3431 t = plus_constant (GET_MODE (src), t, offset);
3395 INTVAL (XEXP (src, 1)));
3396 set_reg_known_value (regno, t); 3432 set_reg_known_value (regno, t);
3397 set_reg_known_equiv_p (regno, false); 3433 set_reg_known_equiv_p (regno, false);
3398 } 3434 }
3399 else if (DF_REG_DEF_COUNT (regno) == 1 3435 else if (DF_REG_DEF_COUNT (regno) == 1
3400 && ! rtx_varies_p (src, 1)) 3436 && ! rtx_varies_p (src, 1))